1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_language_servers(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-rust-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102 let mut fake_json_servers = json_language
103 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
104 name: "the-json-language-server",
105 capabilities: lsp::ServerCapabilities {
106 completion_provider: Some(lsp::CompletionOptions {
107 trigger_characters: Some(vec![":".to_string()]),
108 ..Default::default()
109 }),
110 ..Default::default()
111 },
112 ..Default::default()
113 }))
114 .await;
115
116 let fs = FakeFs::new(cx.background());
117 fs.insert_tree(
118 "/the-root",
119 json!({
120 "test.rs": "const A: i32 = 1;",
121 "test2.rs": "",
122 "Cargo.toml": "a = 1",
123 "package.json": "{\"a\": 1}",
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129
130 // Open a buffer without an associated language server.
131 let toml_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/Cargo.toml", cx)
134 })
135 .await
136 .unwrap();
137
138 // Open a buffer with an associated language server before the language for it has been loaded.
139 let rust_buffer = project
140 .update(cx, |project, cx| {
141 project.open_local_buffer("/the-root/test.rs", cx)
142 })
143 .await
144 .unwrap();
145 rust_buffer.read_with(cx, |buffer, _| {
146 assert_eq!(buffer.language().map(|l| l.name()), None);
147 });
148
149 // Now we add the languages to the project, and ensure they get assigned to all
150 // the relevant open buffers.
151 project.update(cx, |project, _| {
152 project.languages.add(Arc::new(json_language));
153 project.languages.add(Arc::new(rust_language));
154 });
155 deterministic.run_until_parked();
156 rust_buffer.read_with(cx, |buffer, _| {
157 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
158 });
159
160 // A server is started up, and it is notified about Rust files.
161 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
162 assert_eq!(
163 fake_rust_server
164 .receive_notification::<lsp::notification::DidOpenTextDocument>()
165 .await
166 .text_document,
167 lsp::TextDocumentItem {
168 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
169 version: 0,
170 text: "const A: i32 = 1;".to_string(),
171 language_id: Default::default()
172 }
173 );
174
175 // The buffer is configured based on the language server's capabilities.
176 rust_buffer.read_with(cx, |buffer, _| {
177 assert_eq!(
178 buffer.completion_triggers(),
179 &[".".to_string(), "::".to_string()]
180 );
181 });
182 toml_buffer.read_with(cx, |buffer, _| {
183 assert!(buffer.completion_triggers().is_empty());
184 });
185
186 // Edit a buffer. The changes are reported to the language server.
187 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
188 assert_eq!(
189 fake_rust_server
190 .receive_notification::<lsp::notification::DidChangeTextDocument>()
191 .await
192 .text_document,
193 lsp::VersionedTextDocumentIdentifier::new(
194 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
195 1
196 )
197 );
198
199 // Open a third buffer with a different associated language server.
200 let json_buffer = project
201 .update(cx, |project, cx| {
202 project.open_local_buffer("/the-root/package.json", cx)
203 })
204 .await
205 .unwrap();
206
207 // A json language server is started up and is only notified about the json buffer.
208 let mut fake_json_server = fake_json_servers.next().await.unwrap();
209 assert_eq!(
210 fake_json_server
211 .receive_notification::<lsp::notification::DidOpenTextDocument>()
212 .await
213 .text_document,
214 lsp::TextDocumentItem {
215 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
216 version: 0,
217 text: "{\"a\": 1}".to_string(),
218 language_id: Default::default()
219 }
220 );
221
222 // This buffer is configured based on the second language server's
223 // capabilities.
224 json_buffer.read_with(cx, |buffer, _| {
225 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
226 });
227
228 // When opening another buffer whose language server is already running,
229 // it is also configured based on the existing language server's capabilities.
230 let rust_buffer2 = project
231 .update(cx, |project, cx| {
232 project.open_local_buffer("/the-root/test2.rs", cx)
233 })
234 .await
235 .unwrap();
236 rust_buffer2.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242
243 // Changes are reported only to servers matching the buffer's language.
244 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
245 rust_buffer2.update(cx, |buffer, cx| {
246 buffer.edit([(0..0, "let x = 1;")], None, cx)
247 });
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
255 1
256 )
257 );
258
259 // Save notifications are reported to all servers.
260 project
261 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
262 .await
263 .unwrap();
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidSaveTextDocument>()
267 .await
268 .text_document,
269 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
270 );
271 assert_eq!(
272 fake_json_server
273 .receive_notification::<lsp::notification::DidSaveTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
277 );
278
279 // Renames are reported only to servers matching the buffer's language.
280 fs.rename(
281 Path::new("/the-root/test2.rs"),
282 Path::new("/the-root/test3.rs"),
283 Default::default(),
284 )
285 .await
286 .unwrap();
287 assert_eq!(
288 fake_rust_server
289 .receive_notification::<lsp::notification::DidCloseTextDocument>()
290 .await
291 .text_document,
292 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
293 );
294 assert_eq!(
295 fake_rust_server
296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
297 .await
298 .text_document,
299 lsp::TextDocumentItem {
300 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
301 version: 0,
302 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
303 language_id: Default::default()
304 },
305 );
306
307 rust_buffer2.update(cx, |buffer, cx| {
308 buffer.update_diagnostics(
309 LanguageServerId(0),
310 DiagnosticSet::from_sorted_entries(
311 vec![DiagnosticEntry {
312 diagnostic: Default::default(),
313 range: Anchor::MIN..Anchor::MAX,
314 }],
315 &buffer.snapshot(),
316 ),
317 cx,
318 );
319 assert_eq!(
320 buffer
321 .snapshot()
322 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
323 .count(),
324 1
325 );
326 });
327
328 // When the rename changes the extension of the file, the buffer gets closed on the old
329 // language server and gets opened on the new one.
330 fs.rename(
331 Path::new("/the-root/test3.rs"),
332 Path::new("/the-root/test3.json"),
333 Default::default(),
334 )
335 .await
336 .unwrap();
337 assert_eq!(
338 fake_rust_server
339 .receive_notification::<lsp::notification::DidCloseTextDocument>()
340 .await
341 .text_document,
342 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
343 );
344 assert_eq!(
345 fake_json_server
346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
347 .await
348 .text_document,
349 lsp::TextDocumentItem {
350 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
351 version: 0,
352 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
353 language_id: Default::default()
354 },
355 );
356
357 // We clear the diagnostics, since the language has changed.
358 rust_buffer2.read_with(cx, |buffer, _| {
359 assert_eq!(
360 buffer
361 .snapshot()
362 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
363 .count(),
364 0
365 );
366 });
367
368 // The renamed file's version resets after changing language server.
369 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
370 assert_eq!(
371 fake_json_server
372 .receive_notification::<lsp::notification::DidChangeTextDocument>()
373 .await
374 .text_document,
375 lsp::VersionedTextDocumentIdentifier::new(
376 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
377 1
378 )
379 );
380
381 // Restart language servers
382 project.update(cx, |project, cx| {
383 project.restart_language_servers_for_buffers(
384 vec![rust_buffer.clone(), json_buffer.clone()],
385 cx,
386 );
387 });
388
389 let mut rust_shutdown_requests = fake_rust_server
390 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
391 let mut json_shutdown_requests = fake_json_server
392 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
393 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
394
395 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
396 let mut fake_json_server = fake_json_servers.next().await.unwrap();
397
398 // Ensure rust document is reopened in new rust language server
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
406 version: 0,
407 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 }
410 );
411
412 // Ensure json documents are reopened in new json language server
413 assert_set_eq!(
414 [
415 fake_json_server
416 .receive_notification::<lsp::notification::DidOpenTextDocument>()
417 .await
418 .text_document,
419 fake_json_server
420 .receive_notification::<lsp::notification::DidOpenTextDocument>()
421 .await
422 .text_document,
423 ],
424 [
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
427 version: 0,
428 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 0,
434 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 }
437 ]
438 );
439
440 // Close notifications are reported only to servers matching the buffer's language.
441 cx.update(|_| drop(json_buffer));
442 let close_message = lsp::DidCloseTextDocumentParams {
443 text_document: lsp::TextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 ),
446 };
447 assert_eq!(
448 fake_json_server
449 .receive_notification::<lsp::notification::DidCloseTextDocument>()
450 .await,
451 close_message,
452 );
453}
454
455#[gpui::test]
456async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
457 init_test(cx);
458
459 let mut language = Language::new(
460 LanguageConfig {
461 name: "Rust".into(),
462 path_suffixes: vec!["rs".to_string()],
463 ..Default::default()
464 },
465 Some(tree_sitter_rust::language()),
466 );
467 let mut fake_servers = language
468 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
469 name: "the-language-server",
470 ..Default::default()
471 }))
472 .await;
473
474 let fs = FakeFs::new(cx.background());
475 fs.insert_tree(
476 "/the-root",
477 json!({
478 "a.rs": "",
479 "b.rs": "",
480 }),
481 )
482 .await;
483
484 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
485 project.update(cx, |project, _| {
486 project.languages.add(Arc::new(language));
487 });
488 cx.foreground().run_until_parked();
489
490 // Start the language server by opening a buffer with a compatible file extension.
491 let _buffer = project
492 .update(cx, |project, cx| {
493 project.open_local_buffer("/the-root/a.rs", cx)
494 })
495 .await
496 .unwrap();
497
498 // Keep track of the FS events reported to the language server.
499 let fake_server = fake_servers.next().await.unwrap();
500 let file_changes = Arc::new(Mutex::new(Vec::new()));
501 fake_server
502 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
503 registrations: vec![lsp::Registration {
504 id: Default::default(),
505 method: "workspace/didChangeWatchedFiles".to_string(),
506 register_options: serde_json::to_value(
507 lsp::DidChangeWatchedFilesRegistrationOptions {
508 watchers: vec![lsp::FileSystemWatcher {
509 glob_pattern: lsp::GlobPattern::String(
510 "/the-root/*.{rs,c}".to_string(),
511 ),
512 kind: None,
513 }],
514 },
515 )
516 .ok(),
517 }],
518 })
519 .await
520 .unwrap();
521 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
522 let file_changes = file_changes.clone();
523 move |params, _| {
524 let mut file_changes = file_changes.lock();
525 file_changes.extend(params.changes);
526 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
527 }
528 });
529
530 cx.foreground().run_until_parked();
531 assert_eq!(file_changes.lock().len(), 0);
532
533 // Perform some file system mutations, two of which match the watched patterns,
534 // and one of which does not.
535 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
536 .await
537 .unwrap();
538 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
539 .await
540 .unwrap();
541 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
542 .await
543 .unwrap();
544
545 // The language server receives events for the FS mutations that match its watch patterns.
546 cx.foreground().run_until_parked();
547 assert_eq!(
548 &*file_changes.lock(),
549 &[
550 lsp::FileEvent {
551 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
552 typ: lsp::FileChangeType::DELETED,
553 },
554 lsp::FileEvent {
555 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
556 typ: lsp::FileChangeType::CREATED,
557 },
558 ]
559 );
560}
561
562#[gpui::test]
563async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
564 init_test(cx);
565
566 let fs = FakeFs::new(cx.background());
567 fs.insert_tree(
568 "/dir",
569 json!({
570 "a.rs": "let a = 1;",
571 "b.rs": "let b = 2;"
572 }),
573 )
574 .await;
575
576 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
577
578 let buffer_a = project
579 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
580 .await
581 .unwrap();
582 let buffer_b = project
583 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
584 .await
585 .unwrap();
586
587 project.update(cx, |project, cx| {
588 project
589 .update_diagnostics(
590 LanguageServerId(0),
591 lsp::PublishDiagnosticsParams {
592 uri: Url::from_file_path("/dir/a.rs").unwrap(),
593 version: None,
594 diagnostics: vec![lsp::Diagnostic {
595 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
596 severity: Some(lsp::DiagnosticSeverity::ERROR),
597 message: "error 1".to_string(),
598 ..Default::default()
599 }],
600 },
601 &[],
602 cx,
603 )
604 .unwrap();
605 project
606 .update_diagnostics(
607 LanguageServerId(0),
608 lsp::PublishDiagnosticsParams {
609 uri: Url::from_file_path("/dir/b.rs").unwrap(),
610 version: None,
611 diagnostics: vec![lsp::Diagnostic {
612 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
613 severity: Some(lsp::DiagnosticSeverity::WARNING),
614 message: "error 2".to_string(),
615 ..Default::default()
616 }],
617 },
618 &[],
619 cx,
620 )
621 .unwrap();
622 });
623
624 buffer_a.read_with(cx, |buffer, _| {
625 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
626 assert_eq!(
627 chunks
628 .iter()
629 .map(|(s, d)| (s.as_str(), *d))
630 .collect::<Vec<_>>(),
631 &[
632 ("let ", None),
633 ("a", Some(DiagnosticSeverity::ERROR)),
634 (" = 1;", None),
635 ]
636 );
637 });
638 buffer_b.read_with(cx, |buffer, _| {
639 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
640 assert_eq!(
641 chunks
642 .iter()
643 .map(|(s, d)| (s.as_str(), *d))
644 .collect::<Vec<_>>(),
645 &[
646 ("let ", None),
647 ("b", Some(DiagnosticSeverity::WARNING)),
648 (" = 2;", None),
649 ]
650 );
651 });
652}
653
654#[gpui::test]
655async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
656 init_test(cx);
657
658 let fs = FakeFs::new(cx.background());
659 fs.insert_tree(
660 "/root",
661 json!({
662 "dir": {
663 "a.rs": "let a = 1;",
664 },
665 "other.rs": "let b = c;"
666 }),
667 )
668 .await;
669
670 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
671
672 let (worktree, _) = project
673 .update(cx, |project, cx| {
674 project.find_or_create_local_worktree("/root/other.rs", false, cx)
675 })
676 .await
677 .unwrap();
678 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
679
680 project.update(cx, |project, cx| {
681 project
682 .update_diagnostics(
683 LanguageServerId(0),
684 lsp::PublishDiagnosticsParams {
685 uri: Url::from_file_path("/root/other.rs").unwrap(),
686 version: None,
687 diagnostics: vec![lsp::Diagnostic {
688 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
689 severity: Some(lsp::DiagnosticSeverity::ERROR),
690 message: "unknown variable 'c'".to_string(),
691 ..Default::default()
692 }],
693 },
694 &[],
695 cx,
696 )
697 .unwrap();
698 });
699
700 let buffer = project
701 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
702 .await
703 .unwrap();
704 buffer.read_with(cx, |buffer, _| {
705 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
706 assert_eq!(
707 chunks
708 .iter()
709 .map(|(s, d)| (s.as_str(), *d))
710 .collect::<Vec<_>>(),
711 &[
712 ("let b = ", None),
713 ("c", Some(DiagnosticSeverity::ERROR)),
714 (";", None),
715 ]
716 );
717 });
718
719 project.read_with(cx, |project, cx| {
720 assert_eq!(project.diagnostic_summaries(cx).next(), None);
721 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
722 });
723}
724
725#[gpui::test]
726async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
727 init_test(cx);
728
729 let progress_token = "the-progress-token";
730 let mut language = Language::new(
731 LanguageConfig {
732 name: "Rust".into(),
733 path_suffixes: vec!["rs".to_string()],
734 ..Default::default()
735 },
736 Some(tree_sitter_rust::language()),
737 );
738 let mut fake_servers = language
739 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
740 disk_based_diagnostics_progress_token: Some(progress_token.into()),
741 disk_based_diagnostics_sources: vec!["disk".into()],
742 ..Default::default()
743 }))
744 .await;
745
746 let fs = FakeFs::new(cx.background());
747 fs.insert_tree(
748 "/dir",
749 json!({
750 "a.rs": "fn a() { A }",
751 "b.rs": "const y: i32 = 1",
752 }),
753 )
754 .await;
755
756 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
757 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
758 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
759
760 // Cause worktree to start the fake language server
761 let _buffer = project
762 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
763 .await
764 .unwrap();
765
766 let mut events = subscribe(&project, cx);
767
768 let fake_server = fake_servers.next().await.unwrap();
769 fake_server
770 .start_progress(format!("{}/0", progress_token))
771 .await;
772 assert_eq!(
773 events.next().await.unwrap(),
774 Event::DiskBasedDiagnosticsStarted {
775 language_server_id: LanguageServerId(0),
776 }
777 );
778
779 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
780 uri: Url::from_file_path("/dir/a.rs").unwrap(),
781 version: None,
782 diagnostics: vec![lsp::Diagnostic {
783 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
784 severity: Some(lsp::DiagnosticSeverity::ERROR),
785 message: "undefined variable 'A'".to_string(),
786 ..Default::default()
787 }],
788 });
789 assert_eq!(
790 events.next().await.unwrap(),
791 Event::DiagnosticsUpdated {
792 language_server_id: LanguageServerId(0),
793 path: (worktree_id, Path::new("a.rs")).into()
794 }
795 );
796
797 fake_server.end_progress(format!("{}/0", progress_token));
798 assert_eq!(
799 events.next().await.unwrap(),
800 Event::DiskBasedDiagnosticsFinished {
801 language_server_id: LanguageServerId(0)
802 }
803 );
804
805 let buffer = project
806 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
807 .await
808 .unwrap();
809
810 buffer.read_with(cx, |buffer, _| {
811 let snapshot = buffer.snapshot();
812 let diagnostics = snapshot
813 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
814 .collect::<Vec<_>>();
815 assert_eq!(
816 diagnostics,
817 &[DiagnosticEntry {
818 range: Point::new(0, 9)..Point::new(0, 10),
819 diagnostic: Diagnostic {
820 severity: lsp::DiagnosticSeverity::ERROR,
821 message: "undefined variable 'A'".to_string(),
822 group_id: 0,
823 is_primary: true,
824 ..Default::default()
825 }
826 }]
827 )
828 });
829
830 // Ensure publishing empty diagnostics twice only results in one update event.
831 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
832 uri: Url::from_file_path("/dir/a.rs").unwrap(),
833 version: None,
834 diagnostics: Default::default(),
835 });
836 assert_eq!(
837 events.next().await.unwrap(),
838 Event::DiagnosticsUpdated {
839 language_server_id: LanguageServerId(0),
840 path: (worktree_id, Path::new("a.rs")).into()
841 }
842 );
843
844 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
845 uri: Url::from_file_path("/dir/a.rs").unwrap(),
846 version: None,
847 diagnostics: Default::default(),
848 });
849 cx.foreground().run_until_parked();
850 assert_eq!(futures::poll!(events.next()), Poll::Pending);
851}
852
853#[gpui::test]
854async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
855 init_test(cx);
856
857 let progress_token = "the-progress-token";
858 let mut language = Language::new(
859 LanguageConfig {
860 path_suffixes: vec!["rs".to_string()],
861 ..Default::default()
862 },
863 None,
864 );
865 let mut fake_servers = language
866 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
867 disk_based_diagnostics_sources: vec!["disk".into()],
868 disk_based_diagnostics_progress_token: Some(progress_token.into()),
869 ..Default::default()
870 }))
871 .await;
872
873 let fs = FakeFs::new(cx.background());
874 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
875
876 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
877 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
878
879 let buffer = project
880 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
881 .await
882 .unwrap();
883
884 // Simulate diagnostics starting to update.
885 let fake_server = fake_servers.next().await.unwrap();
886 fake_server.start_progress(progress_token).await;
887
888 // Restart the server before the diagnostics finish updating.
889 project.update(cx, |project, cx| {
890 project.restart_language_servers_for_buffers([buffer], cx);
891 });
892 let mut events = subscribe(&project, cx);
893
894 // Simulate the newly started server sending more diagnostics.
895 let fake_server = fake_servers.next().await.unwrap();
896 fake_server.start_progress(progress_token).await;
897 assert_eq!(
898 events.next().await.unwrap(),
899 Event::DiskBasedDiagnosticsStarted {
900 language_server_id: LanguageServerId(1)
901 }
902 );
903 project.read_with(cx, |project, _| {
904 assert_eq!(
905 project
906 .language_servers_running_disk_based_diagnostics()
907 .collect::<Vec<_>>(),
908 [LanguageServerId(1)]
909 );
910 });
911
912 // All diagnostics are considered done, despite the old server's diagnostic
913 // task never completing.
914 fake_server.end_progress(progress_token);
915 assert_eq!(
916 events.next().await.unwrap(),
917 Event::DiskBasedDiagnosticsFinished {
918 language_server_id: LanguageServerId(1)
919 }
920 );
921 project.read_with(cx, |project, _| {
922 assert_eq!(
923 project
924 .language_servers_running_disk_based_diagnostics()
925 .collect::<Vec<_>>(),
926 [LanguageServerId(0); 0]
927 );
928 });
929}
930
931#[gpui::test]
932async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
933 init_test(cx);
934
935 let mut language = Language::new(
936 LanguageConfig {
937 path_suffixes: vec!["rs".to_string()],
938 ..Default::default()
939 },
940 None,
941 );
942 let mut fake_servers = language
943 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
944 ..Default::default()
945 }))
946 .await;
947
948 let fs = FakeFs::new(cx.background());
949 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
950
951 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
952 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
953
954 let buffer = project
955 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
956 .await
957 .unwrap();
958
959 // Publish diagnostics
960 let fake_server = fake_servers.next().await.unwrap();
961 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
962 uri: Url::from_file_path("/dir/a.rs").unwrap(),
963 version: None,
964 diagnostics: vec![lsp::Diagnostic {
965 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
966 severity: Some(lsp::DiagnosticSeverity::ERROR),
967 message: "the message".to_string(),
968 ..Default::default()
969 }],
970 });
971
972 cx.foreground().run_until_parked();
973 buffer.read_with(cx, |buffer, _| {
974 assert_eq!(
975 buffer
976 .snapshot()
977 .diagnostics_in_range::<_, usize>(0..1, false)
978 .map(|entry| entry.diagnostic.message.clone())
979 .collect::<Vec<_>>(),
980 ["the message".to_string()]
981 );
982 });
983 project.read_with(cx, |project, cx| {
984 assert_eq!(
985 project.diagnostic_summary(cx),
986 DiagnosticSummary {
987 error_count: 1,
988 warning_count: 0,
989 }
990 );
991 });
992
993 project.update(cx, |project, cx| {
994 project.restart_language_servers_for_buffers([buffer.clone()], cx);
995 });
996
997 // The diagnostics are cleared.
998 cx.foreground().run_until_parked();
999 buffer.read_with(cx, |buffer, _| {
1000 assert_eq!(
1001 buffer
1002 .snapshot()
1003 .diagnostics_in_range::<_, usize>(0..1, false)
1004 .map(|entry| entry.diagnostic.message.clone())
1005 .collect::<Vec<_>>(),
1006 Vec::<String>::new(),
1007 );
1008 });
1009 project.read_with(cx, |project, cx| {
1010 assert_eq!(
1011 project.diagnostic_summary(cx),
1012 DiagnosticSummary {
1013 error_count: 0,
1014 warning_count: 0,
1015 }
1016 );
1017 });
1018}
1019
1020#[gpui::test]
1021async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1022 init_test(cx);
1023
1024 let mut language = Language::new(
1025 LanguageConfig {
1026 path_suffixes: vec!["rs".to_string()],
1027 ..Default::default()
1028 },
1029 None,
1030 );
1031 let mut fake_servers = language
1032 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1033 name: "the-lsp",
1034 ..Default::default()
1035 }))
1036 .await;
1037
1038 let fs = FakeFs::new(cx.background());
1039 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1040
1041 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1042 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1043
1044 let buffer = project
1045 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1046 .await
1047 .unwrap();
1048
1049 // Before restarting the server, report diagnostics with an unknown buffer version.
1050 let fake_server = fake_servers.next().await.unwrap();
1051 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1052 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1053 version: Some(10000),
1054 diagnostics: Vec::new(),
1055 });
1056 cx.foreground().run_until_parked();
1057
1058 project.update(cx, |project, cx| {
1059 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1060 });
1061 let mut fake_server = fake_servers.next().await.unwrap();
1062 let notification = fake_server
1063 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1064 .await
1065 .text_document;
1066 assert_eq!(notification.version, 0);
1067}
1068
1069#[gpui::test]
1070async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1071 init_test(cx);
1072
1073 let mut rust = Language::new(
1074 LanguageConfig {
1075 name: Arc::from("Rust"),
1076 path_suffixes: vec!["rs".to_string()],
1077 ..Default::default()
1078 },
1079 None,
1080 );
1081 let mut fake_rust_servers = rust
1082 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1083 name: "rust-lsp",
1084 ..Default::default()
1085 }))
1086 .await;
1087 let mut js = Language::new(
1088 LanguageConfig {
1089 name: Arc::from("JavaScript"),
1090 path_suffixes: vec!["js".to_string()],
1091 ..Default::default()
1092 },
1093 None,
1094 );
1095 let mut fake_js_servers = js
1096 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1097 name: "js-lsp",
1098 ..Default::default()
1099 }))
1100 .await;
1101
1102 let fs = FakeFs::new(cx.background());
1103 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1104 .await;
1105
1106 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1107 project.update(cx, |project, _| {
1108 project.languages.add(Arc::new(rust));
1109 project.languages.add(Arc::new(js));
1110 });
1111
1112 let _rs_buffer = project
1113 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1114 .await
1115 .unwrap();
1116 let _js_buffer = project
1117 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1118 .await
1119 .unwrap();
1120
1121 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1122 assert_eq!(
1123 fake_rust_server_1
1124 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1125 .await
1126 .text_document
1127 .uri
1128 .as_str(),
1129 "file:///dir/a.rs"
1130 );
1131
1132 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1133 assert_eq!(
1134 fake_js_server
1135 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1136 .await
1137 .text_document
1138 .uri
1139 .as_str(),
1140 "file:///dir/b.js"
1141 );
1142
1143 // Disable Rust language server, ensuring only that server gets stopped.
1144 cx.update(|cx| {
1145 cx.update_global(|settings: &mut SettingsStore, cx| {
1146 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1147 settings.languages.insert(
1148 Arc::from("Rust"),
1149 LanguageSettingsContent {
1150 enable_language_server: Some(false),
1151 ..Default::default()
1152 },
1153 );
1154 });
1155 })
1156 });
1157 fake_rust_server_1
1158 .receive_notification::<lsp::notification::Exit>()
1159 .await;
1160
1161 // Enable Rust and disable JavaScript language servers, ensuring that the
1162 // former gets started again and that the latter stops.
1163 cx.update(|cx| {
1164 cx.update_global(|settings: &mut SettingsStore, cx| {
1165 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1166 settings.languages.insert(
1167 Arc::from("Rust"),
1168 LanguageSettingsContent {
1169 enable_language_server: Some(true),
1170 ..Default::default()
1171 },
1172 );
1173 settings.languages.insert(
1174 Arc::from("JavaScript"),
1175 LanguageSettingsContent {
1176 enable_language_server: Some(false),
1177 ..Default::default()
1178 },
1179 );
1180 });
1181 })
1182 });
1183 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1184 assert_eq!(
1185 fake_rust_server_2
1186 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1187 .await
1188 .text_document
1189 .uri
1190 .as_str(),
1191 "file:///dir/a.rs"
1192 );
1193 fake_js_server
1194 .receive_notification::<lsp::notification::Exit>()
1195 .await;
1196}
1197
1198#[gpui::test(iterations = 3)]
1199async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1200 init_test(cx);
1201
1202 let mut language = Language::new(
1203 LanguageConfig {
1204 name: "Rust".into(),
1205 path_suffixes: vec!["rs".to_string()],
1206 ..Default::default()
1207 },
1208 Some(tree_sitter_rust::language()),
1209 );
1210 let mut fake_servers = language
1211 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1212 disk_based_diagnostics_sources: vec!["disk".into()],
1213 ..Default::default()
1214 }))
1215 .await;
1216
1217 let text = "
1218 fn a() { A }
1219 fn b() { BB }
1220 fn c() { CCC }
1221 "
1222 .unindent();
1223
1224 let fs = FakeFs::new(cx.background());
1225 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1226
1227 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1228 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1229
1230 let buffer = project
1231 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1232 .await
1233 .unwrap();
1234
1235 let mut fake_server = fake_servers.next().await.unwrap();
1236 let open_notification = fake_server
1237 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1238 .await;
1239
1240 // Edit the buffer, moving the content down
1241 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1242 let change_notification_1 = fake_server
1243 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1244 .await;
1245 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1246
1247 // Report some diagnostics for the initial version of the buffer
1248 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1249 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1250 version: Some(open_notification.text_document.version),
1251 diagnostics: vec![
1252 lsp::Diagnostic {
1253 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1254 severity: Some(DiagnosticSeverity::ERROR),
1255 message: "undefined variable 'A'".to_string(),
1256 source: Some("disk".to_string()),
1257 ..Default::default()
1258 },
1259 lsp::Diagnostic {
1260 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1261 severity: Some(DiagnosticSeverity::ERROR),
1262 message: "undefined variable 'BB'".to_string(),
1263 source: Some("disk".to_string()),
1264 ..Default::default()
1265 },
1266 lsp::Diagnostic {
1267 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1268 severity: Some(DiagnosticSeverity::ERROR),
1269 source: Some("disk".to_string()),
1270 message: "undefined variable 'CCC'".to_string(),
1271 ..Default::default()
1272 },
1273 ],
1274 });
1275
1276 // The diagnostics have moved down since they were created.
1277 buffer.next_notification(cx).await;
1278 cx.foreground().run_until_parked();
1279 buffer.read_with(cx, |buffer, _| {
1280 assert_eq!(
1281 buffer
1282 .snapshot()
1283 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1284 .collect::<Vec<_>>(),
1285 &[
1286 DiagnosticEntry {
1287 range: Point::new(3, 9)..Point::new(3, 11),
1288 diagnostic: Diagnostic {
1289 source: Some("disk".into()),
1290 severity: DiagnosticSeverity::ERROR,
1291 message: "undefined variable 'BB'".to_string(),
1292 is_disk_based: true,
1293 group_id: 1,
1294 is_primary: true,
1295 ..Default::default()
1296 },
1297 },
1298 DiagnosticEntry {
1299 range: Point::new(4, 9)..Point::new(4, 12),
1300 diagnostic: Diagnostic {
1301 source: Some("disk".into()),
1302 severity: DiagnosticSeverity::ERROR,
1303 message: "undefined variable 'CCC'".to_string(),
1304 is_disk_based: true,
1305 group_id: 2,
1306 is_primary: true,
1307 ..Default::default()
1308 }
1309 }
1310 ]
1311 );
1312 assert_eq!(
1313 chunks_with_diagnostics(buffer, 0..buffer.len()),
1314 [
1315 ("\n\nfn a() { ".to_string(), None),
1316 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1317 (" }\nfn b() { ".to_string(), None),
1318 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1319 (" }\nfn c() { ".to_string(), None),
1320 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1321 (" }\n".to_string(), None),
1322 ]
1323 );
1324 assert_eq!(
1325 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1326 [
1327 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1328 (" }\nfn c() { ".to_string(), None),
1329 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1330 ]
1331 );
1332 });
1333
1334 // Ensure overlapping diagnostics are highlighted correctly.
1335 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1336 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1337 version: Some(open_notification.text_document.version),
1338 diagnostics: vec![
1339 lsp::Diagnostic {
1340 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1341 severity: Some(DiagnosticSeverity::ERROR),
1342 message: "undefined variable 'A'".to_string(),
1343 source: Some("disk".to_string()),
1344 ..Default::default()
1345 },
1346 lsp::Diagnostic {
1347 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1348 severity: Some(DiagnosticSeverity::WARNING),
1349 message: "unreachable statement".to_string(),
1350 source: Some("disk".to_string()),
1351 ..Default::default()
1352 },
1353 ],
1354 });
1355
1356 buffer.next_notification(cx).await;
1357 cx.foreground().run_until_parked();
1358 buffer.read_with(cx, |buffer, _| {
1359 assert_eq!(
1360 buffer
1361 .snapshot()
1362 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1363 .collect::<Vec<_>>(),
1364 &[
1365 DiagnosticEntry {
1366 range: Point::new(2, 9)..Point::new(2, 12),
1367 diagnostic: Diagnostic {
1368 source: Some("disk".into()),
1369 severity: DiagnosticSeverity::WARNING,
1370 message: "unreachable statement".to_string(),
1371 is_disk_based: true,
1372 group_id: 4,
1373 is_primary: true,
1374 ..Default::default()
1375 }
1376 },
1377 DiagnosticEntry {
1378 range: Point::new(2, 9)..Point::new(2, 10),
1379 diagnostic: Diagnostic {
1380 source: Some("disk".into()),
1381 severity: DiagnosticSeverity::ERROR,
1382 message: "undefined variable 'A'".to_string(),
1383 is_disk_based: true,
1384 group_id: 3,
1385 is_primary: true,
1386 ..Default::default()
1387 },
1388 }
1389 ]
1390 );
1391 assert_eq!(
1392 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1393 [
1394 ("fn a() { ".to_string(), None),
1395 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1396 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1397 ("\n".to_string(), None),
1398 ]
1399 );
1400 assert_eq!(
1401 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1402 [
1403 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1404 ("\n".to_string(), None),
1405 ]
1406 );
1407 });
1408
1409 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1410 // changes since the last save.
1411 buffer.update(cx, |buffer, cx| {
1412 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1413 buffer.edit(
1414 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1415 None,
1416 cx,
1417 );
1418 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1419 });
1420 let change_notification_2 = fake_server
1421 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1422 .await;
1423 assert!(
1424 change_notification_2.text_document.version > change_notification_1.text_document.version
1425 );
1426
1427 // Handle out-of-order diagnostics
1428 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1429 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1430 version: Some(change_notification_2.text_document.version),
1431 diagnostics: vec![
1432 lsp::Diagnostic {
1433 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1434 severity: Some(DiagnosticSeverity::ERROR),
1435 message: "undefined variable 'BB'".to_string(),
1436 source: Some("disk".to_string()),
1437 ..Default::default()
1438 },
1439 lsp::Diagnostic {
1440 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1441 severity: Some(DiagnosticSeverity::WARNING),
1442 message: "undefined variable 'A'".to_string(),
1443 source: Some("disk".to_string()),
1444 ..Default::default()
1445 },
1446 ],
1447 });
1448
1449 buffer.next_notification(cx).await;
1450 cx.foreground().run_until_parked();
1451 buffer.read_with(cx, |buffer, _| {
1452 assert_eq!(
1453 buffer
1454 .snapshot()
1455 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1456 .collect::<Vec<_>>(),
1457 &[
1458 DiagnosticEntry {
1459 range: Point::new(2, 21)..Point::new(2, 22),
1460 diagnostic: Diagnostic {
1461 source: Some("disk".into()),
1462 severity: DiagnosticSeverity::WARNING,
1463 message: "undefined variable 'A'".to_string(),
1464 is_disk_based: true,
1465 group_id: 6,
1466 is_primary: true,
1467 ..Default::default()
1468 }
1469 },
1470 DiagnosticEntry {
1471 range: Point::new(3, 9)..Point::new(3, 14),
1472 diagnostic: Diagnostic {
1473 source: Some("disk".into()),
1474 severity: DiagnosticSeverity::ERROR,
1475 message: "undefined variable 'BB'".to_string(),
1476 is_disk_based: true,
1477 group_id: 5,
1478 is_primary: true,
1479 ..Default::default()
1480 },
1481 }
1482 ]
1483 );
1484 });
1485}
1486
1487#[gpui::test]
1488async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1489 init_test(cx);
1490
1491 let text = concat!(
1492 "let one = ;\n", //
1493 "let two = \n",
1494 "let three = 3;\n",
1495 );
1496
1497 let fs = FakeFs::new(cx.background());
1498 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1499
1500 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1501 let buffer = project
1502 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1503 .await
1504 .unwrap();
1505
1506 project.update(cx, |project, cx| {
1507 project
1508 .update_buffer_diagnostics(
1509 &buffer,
1510 LanguageServerId(0),
1511 None,
1512 vec![
1513 DiagnosticEntry {
1514 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1515 diagnostic: Diagnostic {
1516 severity: DiagnosticSeverity::ERROR,
1517 message: "syntax error 1".to_string(),
1518 ..Default::default()
1519 },
1520 },
1521 DiagnosticEntry {
1522 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1523 diagnostic: Diagnostic {
1524 severity: DiagnosticSeverity::ERROR,
1525 message: "syntax error 2".to_string(),
1526 ..Default::default()
1527 },
1528 },
1529 ],
1530 cx,
1531 )
1532 .unwrap();
1533 });
1534
1535 // An empty range is extended forward to include the following character.
1536 // At the end of a line, an empty range is extended backward to include
1537 // the preceding character.
1538 buffer.read_with(cx, |buffer, _| {
1539 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1540 assert_eq!(
1541 chunks
1542 .iter()
1543 .map(|(s, d)| (s.as_str(), *d))
1544 .collect::<Vec<_>>(),
1545 &[
1546 ("let one = ", None),
1547 (";", Some(DiagnosticSeverity::ERROR)),
1548 ("\nlet two =", None),
1549 (" ", Some(DiagnosticSeverity::ERROR)),
1550 ("\nlet three = 3;\n", None)
1551 ]
1552 );
1553 });
1554}
1555
1556#[gpui::test]
1557async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1558 init_test(cx);
1559
1560 let fs = FakeFs::new(cx.background());
1561 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1562 .await;
1563
1564 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1565
1566 project.update(cx, |project, cx| {
1567 project
1568 .update_diagnostic_entries(
1569 LanguageServerId(0),
1570 Path::new("/dir/a.rs").to_owned(),
1571 None,
1572 vec![DiagnosticEntry {
1573 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1574 diagnostic: Diagnostic {
1575 severity: DiagnosticSeverity::ERROR,
1576 is_primary: true,
1577 message: "syntax error a1".to_string(),
1578 ..Default::default()
1579 },
1580 }],
1581 cx,
1582 )
1583 .unwrap();
1584 project
1585 .update_diagnostic_entries(
1586 LanguageServerId(1),
1587 Path::new("/dir/a.rs").to_owned(),
1588 None,
1589 vec![DiagnosticEntry {
1590 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1591 diagnostic: Diagnostic {
1592 severity: DiagnosticSeverity::ERROR,
1593 is_primary: true,
1594 message: "syntax error b1".to_string(),
1595 ..Default::default()
1596 },
1597 }],
1598 cx,
1599 )
1600 .unwrap();
1601
1602 assert_eq!(
1603 project.diagnostic_summary(cx),
1604 DiagnosticSummary {
1605 error_count: 2,
1606 warning_count: 0,
1607 }
1608 );
1609 });
1610}
1611
1612#[gpui::test]
1613async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1614 init_test(cx);
1615
1616 let mut language = Language::new(
1617 LanguageConfig {
1618 name: "Rust".into(),
1619 path_suffixes: vec!["rs".to_string()],
1620 ..Default::default()
1621 },
1622 Some(tree_sitter_rust::language()),
1623 );
1624 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1625
1626 let text = "
1627 fn a() {
1628 f1();
1629 }
1630 fn b() {
1631 f2();
1632 }
1633 fn c() {
1634 f3();
1635 }
1636 "
1637 .unindent();
1638
1639 let fs = FakeFs::new(cx.background());
1640 fs.insert_tree(
1641 "/dir",
1642 json!({
1643 "a.rs": text.clone(),
1644 }),
1645 )
1646 .await;
1647
1648 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1649 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1650 let buffer = project
1651 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1652 .await
1653 .unwrap();
1654
1655 let mut fake_server = fake_servers.next().await.unwrap();
1656 let lsp_document_version = fake_server
1657 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1658 .await
1659 .text_document
1660 .version;
1661
1662 // Simulate editing the buffer after the language server computes some edits.
1663 buffer.update(cx, |buffer, cx| {
1664 buffer.edit(
1665 [(
1666 Point::new(0, 0)..Point::new(0, 0),
1667 "// above first function\n",
1668 )],
1669 None,
1670 cx,
1671 );
1672 buffer.edit(
1673 [(
1674 Point::new(2, 0)..Point::new(2, 0),
1675 " // inside first function\n",
1676 )],
1677 None,
1678 cx,
1679 );
1680 buffer.edit(
1681 [(
1682 Point::new(6, 4)..Point::new(6, 4),
1683 "// inside second function ",
1684 )],
1685 None,
1686 cx,
1687 );
1688
1689 assert_eq!(
1690 buffer.text(),
1691 "
1692 // above first function
1693 fn a() {
1694 // inside first function
1695 f1();
1696 }
1697 fn b() {
1698 // inside second function f2();
1699 }
1700 fn c() {
1701 f3();
1702 }
1703 "
1704 .unindent()
1705 );
1706 });
1707
1708 let edits = project
1709 .update(cx, |project, cx| {
1710 project.edits_from_lsp(
1711 &buffer,
1712 vec![
1713 // replace body of first function
1714 lsp::TextEdit {
1715 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1716 new_text: "
1717 fn a() {
1718 f10();
1719 }
1720 "
1721 .unindent(),
1722 },
1723 // edit inside second function
1724 lsp::TextEdit {
1725 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1726 new_text: "00".into(),
1727 },
1728 // edit inside third function via two distinct edits
1729 lsp::TextEdit {
1730 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1731 new_text: "4000".into(),
1732 },
1733 lsp::TextEdit {
1734 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1735 new_text: "".into(),
1736 },
1737 ],
1738 LanguageServerId(0),
1739 Some(lsp_document_version),
1740 cx,
1741 )
1742 })
1743 .await
1744 .unwrap();
1745
1746 buffer.update(cx, |buffer, cx| {
1747 for (range, new_text) in edits {
1748 buffer.edit([(range, new_text)], None, cx);
1749 }
1750 assert_eq!(
1751 buffer.text(),
1752 "
1753 // above first function
1754 fn a() {
1755 // inside first function
1756 f10();
1757 }
1758 fn b() {
1759 // inside second function f200();
1760 }
1761 fn c() {
1762 f4000();
1763 }
1764 "
1765 .unindent()
1766 );
1767 });
1768}
1769
1770#[gpui::test]
1771async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1772 init_test(cx);
1773
1774 let text = "
1775 use a::b;
1776 use a::c;
1777
1778 fn f() {
1779 b();
1780 c();
1781 }
1782 "
1783 .unindent();
1784
1785 let fs = FakeFs::new(cx.background());
1786 fs.insert_tree(
1787 "/dir",
1788 json!({
1789 "a.rs": text.clone(),
1790 }),
1791 )
1792 .await;
1793
1794 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1795 let buffer = project
1796 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1797 .await
1798 .unwrap();
1799
1800 // Simulate the language server sending us a small edit in the form of a very large diff.
1801 // Rust-analyzer does this when performing a merge-imports code action.
1802 let edits = project
1803 .update(cx, |project, cx| {
1804 project.edits_from_lsp(
1805 &buffer,
1806 [
1807 // Replace the first use statement without editing the semicolon.
1808 lsp::TextEdit {
1809 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1810 new_text: "a::{b, c}".into(),
1811 },
1812 // Reinsert the remainder of the file between the semicolon and the final
1813 // newline of the file.
1814 lsp::TextEdit {
1815 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1816 new_text: "\n\n".into(),
1817 },
1818 lsp::TextEdit {
1819 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1820 new_text: "
1821 fn f() {
1822 b();
1823 c();
1824 }"
1825 .unindent(),
1826 },
1827 // Delete everything after the first newline of the file.
1828 lsp::TextEdit {
1829 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1830 new_text: "".into(),
1831 },
1832 ],
1833 LanguageServerId(0),
1834 None,
1835 cx,
1836 )
1837 })
1838 .await
1839 .unwrap();
1840
1841 buffer.update(cx, |buffer, cx| {
1842 let edits = edits
1843 .into_iter()
1844 .map(|(range, text)| {
1845 (
1846 range.start.to_point(buffer)..range.end.to_point(buffer),
1847 text,
1848 )
1849 })
1850 .collect::<Vec<_>>();
1851
1852 assert_eq!(
1853 edits,
1854 [
1855 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1856 (Point::new(1, 0)..Point::new(2, 0), "".into())
1857 ]
1858 );
1859
1860 for (range, new_text) in edits {
1861 buffer.edit([(range, new_text)], None, cx);
1862 }
1863 assert_eq!(
1864 buffer.text(),
1865 "
1866 use a::{b, c};
1867
1868 fn f() {
1869 b();
1870 c();
1871 }
1872 "
1873 .unindent()
1874 );
1875 });
1876}
1877
1878#[gpui::test]
1879async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1880 init_test(cx);
1881
1882 let text = "
1883 use a::b;
1884 use a::c;
1885
1886 fn f() {
1887 b();
1888 c();
1889 }
1890 "
1891 .unindent();
1892
1893 let fs = FakeFs::new(cx.background());
1894 fs.insert_tree(
1895 "/dir",
1896 json!({
1897 "a.rs": text.clone(),
1898 }),
1899 )
1900 .await;
1901
1902 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1903 let buffer = project
1904 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1905 .await
1906 .unwrap();
1907
1908 // Simulate the language server sending us edits in a non-ordered fashion,
1909 // with ranges sometimes being inverted or pointing to invalid locations.
1910 let edits = project
1911 .update(cx, |project, cx| {
1912 project.edits_from_lsp(
1913 &buffer,
1914 [
1915 lsp::TextEdit {
1916 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1917 new_text: "\n\n".into(),
1918 },
1919 lsp::TextEdit {
1920 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1921 new_text: "a::{b, c}".into(),
1922 },
1923 lsp::TextEdit {
1924 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1925 new_text: "".into(),
1926 },
1927 lsp::TextEdit {
1928 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1929 new_text: "
1930 fn f() {
1931 b();
1932 c();
1933 }"
1934 .unindent(),
1935 },
1936 ],
1937 LanguageServerId(0),
1938 None,
1939 cx,
1940 )
1941 })
1942 .await
1943 .unwrap();
1944
1945 buffer.update(cx, |buffer, cx| {
1946 let edits = edits
1947 .into_iter()
1948 .map(|(range, text)| {
1949 (
1950 range.start.to_point(buffer)..range.end.to_point(buffer),
1951 text,
1952 )
1953 })
1954 .collect::<Vec<_>>();
1955
1956 assert_eq!(
1957 edits,
1958 [
1959 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1960 (Point::new(1, 0)..Point::new(2, 0), "".into())
1961 ]
1962 );
1963
1964 for (range, new_text) in edits {
1965 buffer.edit([(range, new_text)], None, cx);
1966 }
1967 assert_eq!(
1968 buffer.text(),
1969 "
1970 use a::{b, c};
1971
1972 fn f() {
1973 b();
1974 c();
1975 }
1976 "
1977 .unindent()
1978 );
1979 });
1980}
1981
1982fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1983 buffer: &Buffer,
1984 range: Range<T>,
1985) -> Vec<(String, Option<DiagnosticSeverity>)> {
1986 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1987 for chunk in buffer.snapshot().chunks(range, true) {
1988 if chunks.last().map_or(false, |prev_chunk| {
1989 prev_chunk.1 == chunk.diagnostic_severity
1990 }) {
1991 chunks.last_mut().unwrap().0.push_str(chunk.text);
1992 } else {
1993 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1994 }
1995 }
1996 chunks
1997}
1998
1999#[gpui::test(iterations = 10)]
2000async fn test_definition(cx: &mut gpui::TestAppContext) {
2001 init_test(cx);
2002
2003 let mut language = Language::new(
2004 LanguageConfig {
2005 name: "Rust".into(),
2006 path_suffixes: vec!["rs".to_string()],
2007 ..Default::default()
2008 },
2009 Some(tree_sitter_rust::language()),
2010 );
2011 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2012
2013 let fs = FakeFs::new(cx.background());
2014 fs.insert_tree(
2015 "/dir",
2016 json!({
2017 "a.rs": "const fn a() { A }",
2018 "b.rs": "const y: i32 = crate::a()",
2019 }),
2020 )
2021 .await;
2022
2023 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2024 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2025
2026 let buffer = project
2027 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2028 .await
2029 .unwrap();
2030
2031 let fake_server = fake_servers.next().await.unwrap();
2032 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2033 let params = params.text_document_position_params;
2034 assert_eq!(
2035 params.text_document.uri.to_file_path().unwrap(),
2036 Path::new("/dir/b.rs"),
2037 );
2038 assert_eq!(params.position, lsp::Position::new(0, 22));
2039
2040 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2041 lsp::Location::new(
2042 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2043 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2044 ),
2045 )))
2046 });
2047
2048 let mut definitions = project
2049 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2050 .await
2051 .unwrap();
2052
2053 // Assert no new language server started
2054 cx.foreground().run_until_parked();
2055 assert!(fake_servers.try_next().is_err());
2056
2057 assert_eq!(definitions.len(), 1);
2058 let definition = definitions.pop().unwrap();
2059 cx.update(|cx| {
2060 let target_buffer = definition.target.buffer.read(cx);
2061 assert_eq!(
2062 target_buffer
2063 .file()
2064 .unwrap()
2065 .as_local()
2066 .unwrap()
2067 .abs_path(cx),
2068 Path::new("/dir/a.rs"),
2069 );
2070 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2071 assert_eq!(
2072 list_worktrees(&project, cx),
2073 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2074 );
2075
2076 drop(definition);
2077 });
2078 cx.read(|cx| {
2079 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2080 });
2081
2082 fn list_worktrees<'a>(
2083 project: &'a ModelHandle<Project>,
2084 cx: &'a AppContext,
2085 ) -> Vec<(&'a Path, bool)> {
2086 project
2087 .read(cx)
2088 .worktrees(cx)
2089 .map(|worktree| {
2090 let worktree = worktree.read(cx);
2091 (
2092 worktree.as_local().unwrap().abs_path().as_ref(),
2093 worktree.is_visible(),
2094 )
2095 })
2096 .collect::<Vec<_>>()
2097 }
2098}
2099
2100#[gpui::test]
2101async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2102 init_test(cx);
2103
2104 let mut language = Language::new(
2105 LanguageConfig {
2106 name: "TypeScript".into(),
2107 path_suffixes: vec!["ts".to_string()],
2108 ..Default::default()
2109 },
2110 Some(tree_sitter_typescript::language_typescript()),
2111 );
2112 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2113
2114 let fs = FakeFs::new(cx.background());
2115 fs.insert_tree(
2116 "/dir",
2117 json!({
2118 "a.ts": "",
2119 }),
2120 )
2121 .await;
2122
2123 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2124 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2125 let buffer = project
2126 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2127 .await
2128 .unwrap();
2129
2130 let fake_server = fake_language_servers.next().await.unwrap();
2131
2132 let text = "let a = b.fqn";
2133 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2134 let completions = project.update(cx, |project, cx| {
2135 project.completions(&buffer, text.len(), cx)
2136 });
2137
2138 fake_server
2139 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2140 Ok(Some(lsp::CompletionResponse::Array(vec![
2141 lsp::CompletionItem {
2142 label: "fullyQualifiedName?".into(),
2143 insert_text: Some("fullyQualifiedName".into()),
2144 ..Default::default()
2145 },
2146 ])))
2147 })
2148 .next()
2149 .await;
2150 let completions = completions.await.unwrap();
2151 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2152 assert_eq!(completions.len(), 1);
2153 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2154 assert_eq!(
2155 completions[0].old_range.to_offset(&snapshot),
2156 text.len() - 3..text.len()
2157 );
2158
2159 let text = "let a = \"atoms/cmp\"";
2160 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2161 let completions = project.update(cx, |project, cx| {
2162 project.completions(&buffer, text.len() - 1, cx)
2163 });
2164
2165 fake_server
2166 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2167 Ok(Some(lsp::CompletionResponse::Array(vec![
2168 lsp::CompletionItem {
2169 label: "component".into(),
2170 ..Default::default()
2171 },
2172 ])))
2173 })
2174 .next()
2175 .await;
2176 let completions = completions.await.unwrap();
2177 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2178 assert_eq!(completions.len(), 1);
2179 assert_eq!(completions[0].new_text, "component");
2180 assert_eq!(
2181 completions[0].old_range.to_offset(&snapshot),
2182 text.len() - 4..text.len() - 1
2183 );
2184}
2185
2186#[gpui::test]
2187async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2188 init_test(cx);
2189
2190 let mut language = Language::new(
2191 LanguageConfig {
2192 name: "TypeScript".into(),
2193 path_suffixes: vec!["ts".to_string()],
2194 ..Default::default()
2195 },
2196 Some(tree_sitter_typescript::language_typescript()),
2197 );
2198 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2199
2200 let fs = FakeFs::new(cx.background());
2201 fs.insert_tree(
2202 "/dir",
2203 json!({
2204 "a.ts": "",
2205 }),
2206 )
2207 .await;
2208
2209 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2210 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2211 let buffer = project
2212 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2213 .await
2214 .unwrap();
2215
2216 let fake_server = fake_language_servers.next().await.unwrap();
2217
2218 let text = "let a = b.fqn";
2219 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2220 let completions = project.update(cx, |project, cx| {
2221 project.completions(&buffer, text.len(), cx)
2222 });
2223
2224 fake_server
2225 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2226 Ok(Some(lsp::CompletionResponse::Array(vec![
2227 lsp::CompletionItem {
2228 label: "fullyQualifiedName?".into(),
2229 insert_text: Some("fully\rQualified\r\nName".into()),
2230 ..Default::default()
2231 },
2232 ])))
2233 })
2234 .next()
2235 .await;
2236 let completions = completions.await.unwrap();
2237 assert_eq!(completions.len(), 1);
2238 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2239}
2240
2241#[gpui::test(iterations = 10)]
2242async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2243 init_test(cx);
2244
2245 let mut language = Language::new(
2246 LanguageConfig {
2247 name: "TypeScript".into(),
2248 path_suffixes: vec!["ts".to_string()],
2249 ..Default::default()
2250 },
2251 None,
2252 );
2253 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2254
2255 let fs = FakeFs::new(cx.background());
2256 fs.insert_tree(
2257 "/dir",
2258 json!({
2259 "a.ts": "a",
2260 }),
2261 )
2262 .await;
2263
2264 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2265 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2266 let buffer = project
2267 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2268 .await
2269 .unwrap();
2270
2271 let fake_server = fake_language_servers.next().await.unwrap();
2272
2273 // Language server returns code actions that contain commands, and not edits.
2274 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2275 fake_server
2276 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2277 Ok(Some(vec![
2278 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2279 title: "The code action".into(),
2280 command: Some(lsp::Command {
2281 title: "The command".into(),
2282 command: "_the/command".into(),
2283 arguments: Some(vec![json!("the-argument")]),
2284 }),
2285 ..Default::default()
2286 }),
2287 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2288 title: "two".into(),
2289 ..Default::default()
2290 }),
2291 ]))
2292 })
2293 .next()
2294 .await;
2295
2296 let action = actions.await.unwrap()[0].clone();
2297 let apply = project.update(cx, |project, cx| {
2298 project.apply_code_action(buffer.clone(), action, true, cx)
2299 });
2300
2301 // Resolving the code action does not populate its edits. In absence of
2302 // edits, we must execute the given command.
2303 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2304 |action, _| async move { Ok(action) },
2305 );
2306
2307 // While executing the command, the language server sends the editor
2308 // a `workspaceEdit` request.
2309 fake_server
2310 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2311 let fake = fake_server.clone();
2312 move |params, _| {
2313 assert_eq!(params.command, "_the/command");
2314 let fake = fake.clone();
2315 async move {
2316 fake.server
2317 .request::<lsp::request::ApplyWorkspaceEdit>(
2318 lsp::ApplyWorkspaceEditParams {
2319 label: None,
2320 edit: lsp::WorkspaceEdit {
2321 changes: Some(
2322 [(
2323 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2324 vec![lsp::TextEdit {
2325 range: lsp::Range::new(
2326 lsp::Position::new(0, 0),
2327 lsp::Position::new(0, 0),
2328 ),
2329 new_text: "X".into(),
2330 }],
2331 )]
2332 .into_iter()
2333 .collect(),
2334 ),
2335 ..Default::default()
2336 },
2337 },
2338 )
2339 .await
2340 .unwrap();
2341 Ok(Some(json!(null)))
2342 }
2343 }
2344 })
2345 .next()
2346 .await;
2347
2348 // Applying the code action returns a project transaction containing the edits
2349 // sent by the language server in its `workspaceEdit` request.
2350 let transaction = apply.await.unwrap();
2351 assert!(transaction.0.contains_key(&buffer));
2352 buffer.update(cx, |buffer, cx| {
2353 assert_eq!(buffer.text(), "Xa");
2354 buffer.undo(cx);
2355 assert_eq!(buffer.text(), "a");
2356 });
2357}
2358
2359#[gpui::test(iterations = 10)]
2360async fn test_save_file(cx: &mut gpui::TestAppContext) {
2361 init_test(cx);
2362
2363 let fs = FakeFs::new(cx.background());
2364 fs.insert_tree(
2365 "/dir",
2366 json!({
2367 "file1": "the old contents",
2368 }),
2369 )
2370 .await;
2371
2372 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2373 let buffer = project
2374 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2375 .await
2376 .unwrap();
2377 buffer.update(cx, |buffer, cx| {
2378 assert_eq!(buffer.text(), "the old contents");
2379 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2380 });
2381
2382 project
2383 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2384 .await
2385 .unwrap();
2386
2387 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2388 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2389}
2390
2391#[gpui::test]
2392async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2393 init_test(cx);
2394
2395 let fs = FakeFs::new(cx.background());
2396 fs.insert_tree(
2397 "/dir",
2398 json!({
2399 "file1": "the old contents",
2400 }),
2401 )
2402 .await;
2403
2404 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2405 let buffer = project
2406 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2407 .await
2408 .unwrap();
2409 buffer.update(cx, |buffer, cx| {
2410 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2411 });
2412
2413 project
2414 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2415 .await
2416 .unwrap();
2417
2418 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2419 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2420}
2421
2422#[gpui::test]
2423async fn test_save_as(cx: &mut gpui::TestAppContext) {
2424 init_test(cx);
2425
2426 let fs = FakeFs::new(cx.background());
2427 fs.insert_tree("/dir", json!({})).await;
2428
2429 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2430
2431 let languages = project.read_with(cx, |project, _| project.languages().clone());
2432 languages.register(
2433 "/some/path",
2434 LanguageConfig {
2435 name: "Rust".into(),
2436 path_suffixes: vec!["rs".into()],
2437 ..Default::default()
2438 },
2439 tree_sitter_rust::language(),
2440 vec![],
2441 |_| Default::default(),
2442 );
2443
2444 let buffer = project.update(cx, |project, cx| {
2445 project.create_buffer("", None, cx).unwrap()
2446 });
2447 buffer.update(cx, |buffer, cx| {
2448 buffer.edit([(0..0, "abc")], None, cx);
2449 assert!(buffer.is_dirty());
2450 assert!(!buffer.has_conflict());
2451 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2452 });
2453 project
2454 .update(cx, |project, cx| {
2455 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2456 })
2457 .await
2458 .unwrap();
2459 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2460
2461 cx.foreground().run_until_parked();
2462 buffer.read_with(cx, |buffer, cx| {
2463 assert_eq!(
2464 buffer.file().unwrap().full_path(cx),
2465 Path::new("dir/file1.rs")
2466 );
2467 assert!(!buffer.is_dirty());
2468 assert!(!buffer.has_conflict());
2469 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2470 });
2471
2472 let opened_buffer = project
2473 .update(cx, |project, cx| {
2474 project.open_local_buffer("/dir/file1.rs", cx)
2475 })
2476 .await
2477 .unwrap();
2478 assert_eq!(opened_buffer, buffer);
2479}
2480
2481#[gpui::test(retries = 5)]
2482async fn test_rescan_and_remote_updates(
2483 deterministic: Arc<Deterministic>,
2484 cx: &mut gpui::TestAppContext,
2485) {
2486 init_test(cx);
2487 cx.foreground().allow_parking();
2488
2489 let dir = temp_tree(json!({
2490 "a": {
2491 "file1": "",
2492 "file2": "",
2493 "file3": "",
2494 },
2495 "b": {
2496 "c": {
2497 "file4": "",
2498 "file5": "",
2499 }
2500 }
2501 }));
2502
2503 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2504 let rpc = project.read_with(cx, |p, _| p.client.clone());
2505
2506 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2507 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2508 async move { buffer.await.unwrap() }
2509 };
2510 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2511 project.read_with(cx, |project, cx| {
2512 let tree = project.worktrees(cx).next().unwrap();
2513 tree.read(cx)
2514 .entry_for_path(path)
2515 .unwrap_or_else(|| panic!("no entry for path {}", path))
2516 .id
2517 })
2518 };
2519
2520 let buffer2 = buffer_for_path("a/file2", cx).await;
2521 let buffer3 = buffer_for_path("a/file3", cx).await;
2522 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2523 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2524
2525 let file2_id = id_for_path("a/file2", cx);
2526 let file3_id = id_for_path("a/file3", cx);
2527 let file4_id = id_for_path("b/c/file4", cx);
2528
2529 // Create a remote copy of this worktree.
2530 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2531
2532 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2533
2534 let updates = Arc::new(Mutex::new(Vec::new()));
2535 tree.update(cx, |tree, cx| {
2536 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2537 let updates = updates.clone();
2538 move |update| {
2539 updates.lock().push(update);
2540 async { true }
2541 }
2542 });
2543 });
2544
2545 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2546 deterministic.run_until_parked();
2547
2548 cx.read(|cx| {
2549 assert!(!buffer2.read(cx).is_dirty());
2550 assert!(!buffer3.read(cx).is_dirty());
2551 assert!(!buffer4.read(cx).is_dirty());
2552 assert!(!buffer5.read(cx).is_dirty());
2553 });
2554
2555 // Rename and delete files and directories.
2556 tree.flush_fs_events(cx).await;
2557 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2558 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2559 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2560 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2561 tree.flush_fs_events(cx).await;
2562
2563 let expected_paths = vec![
2564 "a",
2565 "a/file1",
2566 "a/file2.new",
2567 "b",
2568 "d",
2569 "d/file3",
2570 "d/file4",
2571 ];
2572
2573 cx.read(|app| {
2574 assert_eq!(
2575 tree.read(app)
2576 .paths()
2577 .map(|p| p.to_str().unwrap())
2578 .collect::<Vec<_>>(),
2579 expected_paths
2580 );
2581
2582 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2583 assert_eq!(id_for_path("d/file3", cx), file3_id);
2584 assert_eq!(id_for_path("d/file4", cx), file4_id);
2585
2586 assert_eq!(
2587 buffer2.read(app).file().unwrap().path().as_ref(),
2588 Path::new("a/file2.new")
2589 );
2590 assert_eq!(
2591 buffer3.read(app).file().unwrap().path().as_ref(),
2592 Path::new("d/file3")
2593 );
2594 assert_eq!(
2595 buffer4.read(app).file().unwrap().path().as_ref(),
2596 Path::new("d/file4")
2597 );
2598 assert_eq!(
2599 buffer5.read(app).file().unwrap().path().as_ref(),
2600 Path::new("b/c/file5")
2601 );
2602
2603 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2604 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2605 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2606 assert!(buffer5.read(app).file().unwrap().is_deleted());
2607 });
2608
2609 // Update the remote worktree. Check that it becomes consistent with the
2610 // local worktree.
2611 deterministic.run_until_parked();
2612 remote.update(cx, |remote, _| {
2613 for update in updates.lock().drain(..) {
2614 remote.as_remote_mut().unwrap().update_from_remote(update);
2615 }
2616 });
2617 deterministic.run_until_parked();
2618 remote.read_with(cx, |remote, _| {
2619 assert_eq!(
2620 remote
2621 .paths()
2622 .map(|p| p.to_str().unwrap())
2623 .collect::<Vec<_>>(),
2624 expected_paths
2625 );
2626 });
2627}
2628
2629#[gpui::test(iterations = 10)]
2630async fn test_buffer_identity_across_renames(
2631 deterministic: Arc<Deterministic>,
2632 cx: &mut gpui::TestAppContext,
2633) {
2634 init_test(cx);
2635
2636 let fs = FakeFs::new(cx.background());
2637 fs.insert_tree(
2638 "/dir",
2639 json!({
2640 "a": {
2641 "file1": "",
2642 }
2643 }),
2644 )
2645 .await;
2646
2647 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2648 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2649 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2650
2651 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2652 project.read_with(cx, |project, cx| {
2653 let tree = project.worktrees(cx).next().unwrap();
2654 tree.read(cx)
2655 .entry_for_path(path)
2656 .unwrap_or_else(|| panic!("no entry for path {}", path))
2657 .id
2658 })
2659 };
2660
2661 let dir_id = id_for_path("a", cx);
2662 let file_id = id_for_path("a/file1", cx);
2663 let buffer = project
2664 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2665 .await
2666 .unwrap();
2667 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2668
2669 project
2670 .update(cx, |project, cx| {
2671 project.rename_entry(dir_id, Path::new("b"), cx)
2672 })
2673 .unwrap()
2674 .await
2675 .unwrap();
2676 deterministic.run_until_parked();
2677 assert_eq!(id_for_path("b", cx), dir_id);
2678 assert_eq!(id_for_path("b/file1", cx), file_id);
2679 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2680}
2681
2682#[gpui::test]
2683async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2684 init_test(cx);
2685
2686 let fs = FakeFs::new(cx.background());
2687 fs.insert_tree(
2688 "/dir",
2689 json!({
2690 "a.txt": "a-contents",
2691 "b.txt": "b-contents",
2692 }),
2693 )
2694 .await;
2695
2696 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2697
2698 // Spawn multiple tasks to open paths, repeating some paths.
2699 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2700 (
2701 p.open_local_buffer("/dir/a.txt", cx),
2702 p.open_local_buffer("/dir/b.txt", cx),
2703 p.open_local_buffer("/dir/a.txt", cx),
2704 )
2705 });
2706
2707 let buffer_a_1 = buffer_a_1.await.unwrap();
2708 let buffer_a_2 = buffer_a_2.await.unwrap();
2709 let buffer_b = buffer_b.await.unwrap();
2710 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2711 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2712
2713 // There is only one buffer per path.
2714 let buffer_a_id = buffer_a_1.id();
2715 assert_eq!(buffer_a_2.id(), buffer_a_id);
2716
2717 // Open the same path again while it is still open.
2718 drop(buffer_a_1);
2719 let buffer_a_3 = project
2720 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2721 .await
2722 .unwrap();
2723
2724 // There's still only one buffer per path.
2725 assert_eq!(buffer_a_3.id(), buffer_a_id);
2726}
2727
2728#[gpui::test]
2729async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2730 init_test(cx);
2731
2732 let fs = FakeFs::new(cx.background());
2733 fs.insert_tree(
2734 "/dir",
2735 json!({
2736 "file1": "abc",
2737 "file2": "def",
2738 "file3": "ghi",
2739 }),
2740 )
2741 .await;
2742
2743 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2744
2745 let buffer1 = project
2746 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2747 .await
2748 .unwrap();
2749 let events = Rc::new(RefCell::new(Vec::new()));
2750
2751 // initially, the buffer isn't dirty.
2752 buffer1.update(cx, |buffer, cx| {
2753 cx.subscribe(&buffer1, {
2754 let events = events.clone();
2755 move |_, _, event, _| match event {
2756 BufferEvent::Operation(_) => {}
2757 _ => events.borrow_mut().push(event.clone()),
2758 }
2759 })
2760 .detach();
2761
2762 assert!(!buffer.is_dirty());
2763 assert!(events.borrow().is_empty());
2764
2765 buffer.edit([(1..2, "")], None, cx);
2766 });
2767
2768 // after the first edit, the buffer is dirty, and emits a dirtied event.
2769 buffer1.update(cx, |buffer, cx| {
2770 assert!(buffer.text() == "ac");
2771 assert!(buffer.is_dirty());
2772 assert_eq!(
2773 *events.borrow(),
2774 &[language::Event::Edited, language::Event::DirtyChanged]
2775 );
2776 events.borrow_mut().clear();
2777 buffer.did_save(
2778 buffer.version(),
2779 buffer.as_rope().fingerprint(),
2780 buffer.file().unwrap().mtime(),
2781 cx,
2782 );
2783 });
2784
2785 // after saving, the buffer is not dirty, and emits a saved event.
2786 buffer1.update(cx, |buffer, cx| {
2787 assert!(!buffer.is_dirty());
2788 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2789 events.borrow_mut().clear();
2790
2791 buffer.edit([(1..1, "B")], None, cx);
2792 buffer.edit([(2..2, "D")], None, cx);
2793 });
2794
2795 // after editing again, the buffer is dirty, and emits another dirty event.
2796 buffer1.update(cx, |buffer, cx| {
2797 assert!(buffer.text() == "aBDc");
2798 assert!(buffer.is_dirty());
2799 assert_eq!(
2800 *events.borrow(),
2801 &[
2802 language::Event::Edited,
2803 language::Event::DirtyChanged,
2804 language::Event::Edited,
2805 ],
2806 );
2807 events.borrow_mut().clear();
2808
2809 // After restoring the buffer to its previously-saved state,
2810 // the buffer is not considered dirty anymore.
2811 buffer.edit([(1..3, "")], None, cx);
2812 assert!(buffer.text() == "ac");
2813 assert!(!buffer.is_dirty());
2814 });
2815
2816 assert_eq!(
2817 *events.borrow(),
2818 &[language::Event::Edited, language::Event::DirtyChanged]
2819 );
2820
2821 // When a file is deleted, the buffer is considered dirty.
2822 let events = Rc::new(RefCell::new(Vec::new()));
2823 let buffer2 = project
2824 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2825 .await
2826 .unwrap();
2827 buffer2.update(cx, |_, cx| {
2828 cx.subscribe(&buffer2, {
2829 let events = events.clone();
2830 move |_, _, event, _| events.borrow_mut().push(event.clone())
2831 })
2832 .detach();
2833 });
2834
2835 fs.remove_file("/dir/file2".as_ref(), Default::default())
2836 .await
2837 .unwrap();
2838 cx.foreground().run_until_parked();
2839 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2840 assert_eq!(
2841 *events.borrow(),
2842 &[
2843 language::Event::DirtyChanged,
2844 language::Event::FileHandleChanged
2845 ]
2846 );
2847
2848 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2849 let events = Rc::new(RefCell::new(Vec::new()));
2850 let buffer3 = project
2851 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2852 .await
2853 .unwrap();
2854 buffer3.update(cx, |_, cx| {
2855 cx.subscribe(&buffer3, {
2856 let events = events.clone();
2857 move |_, _, event, _| events.borrow_mut().push(event.clone())
2858 })
2859 .detach();
2860 });
2861
2862 buffer3.update(cx, |buffer, cx| {
2863 buffer.edit([(0..0, "x")], None, cx);
2864 });
2865 events.borrow_mut().clear();
2866 fs.remove_file("/dir/file3".as_ref(), Default::default())
2867 .await
2868 .unwrap();
2869 cx.foreground().run_until_parked();
2870 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2871 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2872}
2873
2874#[gpui::test]
2875async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2876 init_test(cx);
2877
2878 let initial_contents = "aaa\nbbbbb\nc\n";
2879 let fs = FakeFs::new(cx.background());
2880 fs.insert_tree(
2881 "/dir",
2882 json!({
2883 "the-file": initial_contents,
2884 }),
2885 )
2886 .await;
2887 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2888 let buffer = project
2889 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2890 .await
2891 .unwrap();
2892
2893 let anchors = (0..3)
2894 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2895 .collect::<Vec<_>>();
2896
2897 // Change the file on disk, adding two new lines of text, and removing
2898 // one line.
2899 buffer.read_with(cx, |buffer, _| {
2900 assert!(!buffer.is_dirty());
2901 assert!(!buffer.has_conflict());
2902 });
2903 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2904 fs.save(
2905 "/dir/the-file".as_ref(),
2906 &new_contents.into(),
2907 LineEnding::Unix,
2908 )
2909 .await
2910 .unwrap();
2911
2912 // Because the buffer was not modified, it is reloaded from disk. Its
2913 // contents are edited according to the diff between the old and new
2914 // file contents.
2915 cx.foreground().run_until_parked();
2916 buffer.update(cx, |buffer, _| {
2917 assert_eq!(buffer.text(), new_contents);
2918 assert!(!buffer.is_dirty());
2919 assert!(!buffer.has_conflict());
2920
2921 let anchor_positions = anchors
2922 .iter()
2923 .map(|anchor| anchor.to_point(&*buffer))
2924 .collect::<Vec<_>>();
2925 assert_eq!(
2926 anchor_positions,
2927 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2928 );
2929 });
2930
2931 // Modify the buffer
2932 buffer.update(cx, |buffer, cx| {
2933 buffer.edit([(0..0, " ")], None, cx);
2934 assert!(buffer.is_dirty());
2935 assert!(!buffer.has_conflict());
2936 });
2937
2938 // Change the file on disk again, adding blank lines to the beginning.
2939 fs.save(
2940 "/dir/the-file".as_ref(),
2941 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2942 LineEnding::Unix,
2943 )
2944 .await
2945 .unwrap();
2946
2947 // Because the buffer is modified, it doesn't reload from disk, but is
2948 // marked as having a conflict.
2949 cx.foreground().run_until_parked();
2950 buffer.read_with(cx, |buffer, _| {
2951 assert!(buffer.has_conflict());
2952 });
2953}
2954
2955#[gpui::test]
2956async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2957 init_test(cx);
2958
2959 let fs = FakeFs::new(cx.background());
2960 fs.insert_tree(
2961 "/dir",
2962 json!({
2963 "file1": "a\nb\nc\n",
2964 "file2": "one\r\ntwo\r\nthree\r\n",
2965 }),
2966 )
2967 .await;
2968
2969 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2970 let buffer1 = project
2971 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2972 .await
2973 .unwrap();
2974 let buffer2 = project
2975 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2976 .await
2977 .unwrap();
2978
2979 buffer1.read_with(cx, |buffer, _| {
2980 assert_eq!(buffer.text(), "a\nb\nc\n");
2981 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2982 });
2983 buffer2.read_with(cx, |buffer, _| {
2984 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2985 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2986 });
2987
2988 // Change a file's line endings on disk from unix to windows. The buffer's
2989 // state updates correctly.
2990 fs.save(
2991 "/dir/file1".as_ref(),
2992 &"aaa\nb\nc\n".into(),
2993 LineEnding::Windows,
2994 )
2995 .await
2996 .unwrap();
2997 cx.foreground().run_until_parked();
2998 buffer1.read_with(cx, |buffer, _| {
2999 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3000 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3001 });
3002
3003 // Save a file with windows line endings. The file is written correctly.
3004 buffer2.update(cx, |buffer, cx| {
3005 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3006 });
3007 project
3008 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3009 .await
3010 .unwrap();
3011 assert_eq!(
3012 fs.load("/dir/file2".as_ref()).await.unwrap(),
3013 "one\r\ntwo\r\nthree\r\nfour\r\n",
3014 );
3015}
3016
3017#[gpui::test]
3018async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3019 init_test(cx);
3020
3021 let fs = FakeFs::new(cx.background());
3022 fs.insert_tree(
3023 "/the-dir",
3024 json!({
3025 "a.rs": "
3026 fn foo(mut v: Vec<usize>) {
3027 for x in &v {
3028 v.push(1);
3029 }
3030 }
3031 "
3032 .unindent(),
3033 }),
3034 )
3035 .await;
3036
3037 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3038 let buffer = project
3039 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3040 .await
3041 .unwrap();
3042
3043 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3044 let message = lsp::PublishDiagnosticsParams {
3045 uri: buffer_uri.clone(),
3046 diagnostics: vec![
3047 lsp::Diagnostic {
3048 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3049 severity: Some(DiagnosticSeverity::WARNING),
3050 message: "error 1".to_string(),
3051 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3052 location: lsp::Location {
3053 uri: buffer_uri.clone(),
3054 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3055 },
3056 message: "error 1 hint 1".to_string(),
3057 }]),
3058 ..Default::default()
3059 },
3060 lsp::Diagnostic {
3061 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3062 severity: Some(DiagnosticSeverity::HINT),
3063 message: "error 1 hint 1".to_string(),
3064 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3065 location: lsp::Location {
3066 uri: buffer_uri.clone(),
3067 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3068 },
3069 message: "original diagnostic".to_string(),
3070 }]),
3071 ..Default::default()
3072 },
3073 lsp::Diagnostic {
3074 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3075 severity: Some(DiagnosticSeverity::ERROR),
3076 message: "error 2".to_string(),
3077 related_information: Some(vec![
3078 lsp::DiagnosticRelatedInformation {
3079 location: lsp::Location {
3080 uri: buffer_uri.clone(),
3081 range: lsp::Range::new(
3082 lsp::Position::new(1, 13),
3083 lsp::Position::new(1, 15),
3084 ),
3085 },
3086 message: "error 2 hint 1".to_string(),
3087 },
3088 lsp::DiagnosticRelatedInformation {
3089 location: lsp::Location {
3090 uri: buffer_uri.clone(),
3091 range: lsp::Range::new(
3092 lsp::Position::new(1, 13),
3093 lsp::Position::new(1, 15),
3094 ),
3095 },
3096 message: "error 2 hint 2".to_string(),
3097 },
3098 ]),
3099 ..Default::default()
3100 },
3101 lsp::Diagnostic {
3102 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3103 severity: Some(DiagnosticSeverity::HINT),
3104 message: "error 2 hint 1".to_string(),
3105 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3106 location: lsp::Location {
3107 uri: buffer_uri.clone(),
3108 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3109 },
3110 message: "original diagnostic".to_string(),
3111 }]),
3112 ..Default::default()
3113 },
3114 lsp::Diagnostic {
3115 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3116 severity: Some(DiagnosticSeverity::HINT),
3117 message: "error 2 hint 2".to_string(),
3118 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3119 location: lsp::Location {
3120 uri: buffer_uri,
3121 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3122 },
3123 message: "original diagnostic".to_string(),
3124 }]),
3125 ..Default::default()
3126 },
3127 ],
3128 version: None,
3129 };
3130
3131 project
3132 .update(cx, |p, cx| {
3133 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3134 })
3135 .unwrap();
3136 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3137
3138 assert_eq!(
3139 buffer
3140 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3141 .collect::<Vec<_>>(),
3142 &[
3143 DiagnosticEntry {
3144 range: Point::new(1, 8)..Point::new(1, 9),
3145 diagnostic: Diagnostic {
3146 severity: DiagnosticSeverity::WARNING,
3147 message: "error 1".to_string(),
3148 group_id: 1,
3149 is_primary: true,
3150 ..Default::default()
3151 }
3152 },
3153 DiagnosticEntry {
3154 range: Point::new(1, 8)..Point::new(1, 9),
3155 diagnostic: Diagnostic {
3156 severity: DiagnosticSeverity::HINT,
3157 message: "error 1 hint 1".to_string(),
3158 group_id: 1,
3159 is_primary: false,
3160 ..Default::default()
3161 }
3162 },
3163 DiagnosticEntry {
3164 range: Point::new(1, 13)..Point::new(1, 15),
3165 diagnostic: Diagnostic {
3166 severity: DiagnosticSeverity::HINT,
3167 message: "error 2 hint 1".to_string(),
3168 group_id: 0,
3169 is_primary: false,
3170 ..Default::default()
3171 }
3172 },
3173 DiagnosticEntry {
3174 range: Point::new(1, 13)..Point::new(1, 15),
3175 diagnostic: Diagnostic {
3176 severity: DiagnosticSeverity::HINT,
3177 message: "error 2 hint 2".to_string(),
3178 group_id: 0,
3179 is_primary: false,
3180 ..Default::default()
3181 }
3182 },
3183 DiagnosticEntry {
3184 range: Point::new(2, 8)..Point::new(2, 17),
3185 diagnostic: Diagnostic {
3186 severity: DiagnosticSeverity::ERROR,
3187 message: "error 2".to_string(),
3188 group_id: 0,
3189 is_primary: true,
3190 ..Default::default()
3191 }
3192 }
3193 ]
3194 );
3195
3196 assert_eq!(
3197 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3198 &[
3199 DiagnosticEntry {
3200 range: Point::new(1, 13)..Point::new(1, 15),
3201 diagnostic: Diagnostic {
3202 severity: DiagnosticSeverity::HINT,
3203 message: "error 2 hint 1".to_string(),
3204 group_id: 0,
3205 is_primary: false,
3206 ..Default::default()
3207 }
3208 },
3209 DiagnosticEntry {
3210 range: Point::new(1, 13)..Point::new(1, 15),
3211 diagnostic: Diagnostic {
3212 severity: DiagnosticSeverity::HINT,
3213 message: "error 2 hint 2".to_string(),
3214 group_id: 0,
3215 is_primary: false,
3216 ..Default::default()
3217 }
3218 },
3219 DiagnosticEntry {
3220 range: Point::new(2, 8)..Point::new(2, 17),
3221 diagnostic: Diagnostic {
3222 severity: DiagnosticSeverity::ERROR,
3223 message: "error 2".to_string(),
3224 group_id: 0,
3225 is_primary: true,
3226 ..Default::default()
3227 }
3228 }
3229 ]
3230 );
3231
3232 assert_eq!(
3233 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3234 &[
3235 DiagnosticEntry {
3236 range: Point::new(1, 8)..Point::new(1, 9),
3237 diagnostic: Diagnostic {
3238 severity: DiagnosticSeverity::WARNING,
3239 message: "error 1".to_string(),
3240 group_id: 1,
3241 is_primary: true,
3242 ..Default::default()
3243 }
3244 },
3245 DiagnosticEntry {
3246 range: Point::new(1, 8)..Point::new(1, 9),
3247 diagnostic: Diagnostic {
3248 severity: DiagnosticSeverity::HINT,
3249 message: "error 1 hint 1".to_string(),
3250 group_id: 1,
3251 is_primary: false,
3252 ..Default::default()
3253 }
3254 },
3255 ]
3256 );
3257}
3258
3259#[gpui::test]
3260async fn test_rename(cx: &mut gpui::TestAppContext) {
3261 init_test(cx);
3262
3263 let mut language = Language::new(
3264 LanguageConfig {
3265 name: "Rust".into(),
3266 path_suffixes: vec!["rs".to_string()],
3267 ..Default::default()
3268 },
3269 Some(tree_sitter_rust::language()),
3270 );
3271 let mut fake_servers = language
3272 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3273 capabilities: lsp::ServerCapabilities {
3274 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3275 prepare_provider: Some(true),
3276 work_done_progress_options: Default::default(),
3277 })),
3278 ..Default::default()
3279 },
3280 ..Default::default()
3281 }))
3282 .await;
3283
3284 let fs = FakeFs::new(cx.background());
3285 fs.insert_tree(
3286 "/dir",
3287 json!({
3288 "one.rs": "const ONE: usize = 1;",
3289 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3290 }),
3291 )
3292 .await;
3293
3294 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3295 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3296 let buffer = project
3297 .update(cx, |project, cx| {
3298 project.open_local_buffer("/dir/one.rs", cx)
3299 })
3300 .await
3301 .unwrap();
3302
3303 let fake_server = fake_servers.next().await.unwrap();
3304
3305 let response = project.update(cx, |project, cx| {
3306 project.prepare_rename(buffer.clone(), 7, cx)
3307 });
3308 fake_server
3309 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3310 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3311 assert_eq!(params.position, lsp::Position::new(0, 7));
3312 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3313 lsp::Position::new(0, 6),
3314 lsp::Position::new(0, 9),
3315 ))))
3316 })
3317 .next()
3318 .await
3319 .unwrap();
3320 let range = response.await.unwrap().unwrap();
3321 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3322 assert_eq!(range, 6..9);
3323
3324 let response = project.update(cx, |project, cx| {
3325 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3326 });
3327 fake_server
3328 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3329 assert_eq!(
3330 params.text_document_position.text_document.uri.as_str(),
3331 "file:///dir/one.rs"
3332 );
3333 assert_eq!(
3334 params.text_document_position.position,
3335 lsp::Position::new(0, 7)
3336 );
3337 assert_eq!(params.new_name, "THREE");
3338 Ok(Some(lsp::WorkspaceEdit {
3339 changes: Some(
3340 [
3341 (
3342 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3343 vec![lsp::TextEdit::new(
3344 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3345 "THREE".to_string(),
3346 )],
3347 ),
3348 (
3349 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3350 vec![
3351 lsp::TextEdit::new(
3352 lsp::Range::new(
3353 lsp::Position::new(0, 24),
3354 lsp::Position::new(0, 27),
3355 ),
3356 "THREE".to_string(),
3357 ),
3358 lsp::TextEdit::new(
3359 lsp::Range::new(
3360 lsp::Position::new(0, 35),
3361 lsp::Position::new(0, 38),
3362 ),
3363 "THREE".to_string(),
3364 ),
3365 ],
3366 ),
3367 ]
3368 .into_iter()
3369 .collect(),
3370 ),
3371 ..Default::default()
3372 }))
3373 })
3374 .next()
3375 .await
3376 .unwrap();
3377 let mut transaction = response.await.unwrap().0;
3378 assert_eq!(transaction.len(), 2);
3379 assert_eq!(
3380 transaction
3381 .remove_entry(&buffer)
3382 .unwrap()
3383 .0
3384 .read_with(cx, |buffer, _| buffer.text()),
3385 "const THREE: usize = 1;"
3386 );
3387 assert_eq!(
3388 transaction
3389 .into_keys()
3390 .next()
3391 .unwrap()
3392 .read_with(cx, |buffer, _| buffer.text()),
3393 "const TWO: usize = one::THREE + one::THREE;"
3394 );
3395}
3396
3397#[gpui::test]
3398async fn test_search(cx: &mut gpui::TestAppContext) {
3399 init_test(cx);
3400
3401 let fs = FakeFs::new(cx.background());
3402 fs.insert_tree(
3403 "/dir",
3404 json!({
3405 "one.rs": "const ONE: usize = 1;",
3406 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3407 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3408 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3409 }),
3410 )
3411 .await;
3412 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3413 assert_eq!(
3414 search(
3415 &project,
3416 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3417 cx
3418 )
3419 .await
3420 .unwrap(),
3421 HashMap::from_iter([
3422 ("two.rs".to_string(), vec![6..9]),
3423 ("three.rs".to_string(), vec![37..40])
3424 ])
3425 );
3426
3427 let buffer_4 = project
3428 .update(cx, |project, cx| {
3429 project.open_local_buffer("/dir/four.rs", cx)
3430 })
3431 .await
3432 .unwrap();
3433 buffer_4.update(cx, |buffer, cx| {
3434 let text = "two::TWO";
3435 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3436 });
3437
3438 assert_eq!(
3439 search(
3440 &project,
3441 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3442 cx
3443 )
3444 .await
3445 .unwrap(),
3446 HashMap::from_iter([
3447 ("two.rs".to_string(), vec![6..9]),
3448 ("three.rs".to_string(), vec![37..40]),
3449 ("four.rs".to_string(), vec![25..28, 36..39])
3450 ])
3451 );
3452}
3453
3454#[gpui::test]
3455async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3456 init_test(cx);
3457
3458 let search_query = "file";
3459
3460 let fs = FakeFs::new(cx.background());
3461 fs.insert_tree(
3462 "/dir",
3463 json!({
3464 "one.rs": r#"// Rust file one"#,
3465 "one.ts": r#"// TypeScript file one"#,
3466 "two.rs": r#"// Rust file two"#,
3467 "two.ts": r#"// TypeScript file two"#,
3468 }),
3469 )
3470 .await;
3471 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3472
3473 assert!(
3474 search(
3475 &project,
3476 SearchQuery::text(
3477 search_query,
3478 false,
3479 true,
3480 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3481 Vec::new()
3482 ),
3483 cx
3484 )
3485 .await
3486 .unwrap()
3487 .is_empty(),
3488 "If no inclusions match, no files should be returned"
3489 );
3490
3491 assert_eq!(
3492 search(
3493 &project,
3494 SearchQuery::text(
3495 search_query,
3496 false,
3497 true,
3498 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3499 Vec::new()
3500 ),
3501 cx
3502 )
3503 .await
3504 .unwrap(),
3505 HashMap::from_iter([
3506 ("one.rs".to_string(), vec![8..12]),
3507 ("two.rs".to_string(), vec![8..12]),
3508 ]),
3509 "Rust only search should give only Rust files"
3510 );
3511
3512 assert_eq!(
3513 search(
3514 &project,
3515 SearchQuery::text(
3516 search_query,
3517 false,
3518 true,
3519 vec![
3520 Glob::new("*.ts").unwrap().compile_matcher(),
3521 Glob::new("*.odd").unwrap().compile_matcher(),
3522 ],
3523 Vec::new()
3524 ),
3525 cx
3526 )
3527 .await
3528 .unwrap(),
3529 HashMap::from_iter([
3530 ("one.ts".to_string(), vec![14..18]),
3531 ("two.ts".to_string(), vec![14..18]),
3532 ]),
3533 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3534 );
3535
3536 assert_eq!(
3537 search(
3538 &project,
3539 SearchQuery::text(
3540 search_query,
3541 false,
3542 true,
3543 vec![
3544 Glob::new("*.rs").unwrap().compile_matcher(),
3545 Glob::new("*.ts").unwrap().compile_matcher(),
3546 Glob::new("*.odd").unwrap().compile_matcher(),
3547 ],
3548 Vec::new()
3549 ),
3550 cx
3551 )
3552 .await
3553 .unwrap(),
3554 HashMap::from_iter([
3555 ("one.rs".to_string(), vec![8..12]),
3556 ("one.ts".to_string(), vec![14..18]),
3557 ("two.rs".to_string(), vec![8..12]),
3558 ("two.ts".to_string(), vec![14..18]),
3559 ]),
3560 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3561 );
3562}
3563
3564#[gpui::test]
3565async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3566 init_test(cx);
3567
3568 let search_query = "file";
3569
3570 let fs = FakeFs::new(cx.background());
3571 fs.insert_tree(
3572 "/dir",
3573 json!({
3574 "one.rs": r#"// Rust file one"#,
3575 "one.ts": r#"// TypeScript file one"#,
3576 "two.rs": r#"// Rust file two"#,
3577 "two.ts": r#"// TypeScript file two"#,
3578 }),
3579 )
3580 .await;
3581 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3582
3583 assert_eq!(
3584 search(
3585 &project,
3586 SearchQuery::text(
3587 search_query,
3588 false,
3589 true,
3590 Vec::new(),
3591 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3592 ),
3593 cx
3594 )
3595 .await
3596 .unwrap(),
3597 HashMap::from_iter([
3598 ("one.rs".to_string(), vec![8..12]),
3599 ("one.ts".to_string(), vec![14..18]),
3600 ("two.rs".to_string(), vec![8..12]),
3601 ("two.ts".to_string(), vec![14..18]),
3602 ]),
3603 "If no exclusions match, all files should be returned"
3604 );
3605
3606 assert_eq!(
3607 search(
3608 &project,
3609 SearchQuery::text(
3610 search_query,
3611 false,
3612 true,
3613 Vec::new(),
3614 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3615 ),
3616 cx
3617 )
3618 .await
3619 .unwrap(),
3620 HashMap::from_iter([
3621 ("one.ts".to_string(), vec![14..18]),
3622 ("two.ts".to_string(), vec![14..18]),
3623 ]),
3624 "Rust exclusion search should give only TypeScript files"
3625 );
3626
3627 assert_eq!(
3628 search(
3629 &project,
3630 SearchQuery::text(
3631 search_query,
3632 false,
3633 true,
3634 Vec::new(),
3635 vec![
3636 Glob::new("*.ts").unwrap().compile_matcher(),
3637 Glob::new("*.odd").unwrap().compile_matcher(),
3638 ],
3639 ),
3640 cx
3641 )
3642 .await
3643 .unwrap(),
3644 HashMap::from_iter([
3645 ("one.rs".to_string(), vec![8..12]),
3646 ("two.rs".to_string(), vec![8..12]),
3647 ]),
3648 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3649 );
3650
3651 assert!(
3652 search(
3653 &project,
3654 SearchQuery::text(
3655 search_query,
3656 false,
3657 true,
3658 Vec::new(),
3659 vec![
3660 Glob::new("*.rs").unwrap().compile_matcher(),
3661 Glob::new("*.ts").unwrap().compile_matcher(),
3662 Glob::new("*.odd").unwrap().compile_matcher(),
3663 ],
3664 ),
3665 cx
3666 )
3667 .await
3668 .unwrap().is_empty(),
3669 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3670 );
3671}
3672
3673#[gpui::test]
3674async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3675 init_test(cx);
3676
3677 let search_query = "file";
3678
3679 let fs = FakeFs::new(cx.background());
3680 fs.insert_tree(
3681 "/dir",
3682 json!({
3683 "one.rs": r#"// Rust file one"#,
3684 "one.ts": r#"// TypeScript file one"#,
3685 "two.rs": r#"// Rust file two"#,
3686 "two.ts": r#"// TypeScript file two"#,
3687 }),
3688 )
3689 .await;
3690 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3691
3692 assert!(
3693 search(
3694 &project,
3695 SearchQuery::text(
3696 search_query,
3697 false,
3698 true,
3699 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3700 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3701 ),
3702 cx
3703 )
3704 .await
3705 .unwrap()
3706 .is_empty(),
3707 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3708 );
3709
3710 assert!(
3711 search(
3712 &project,
3713 SearchQuery::text(
3714 search_query,
3715 false,
3716 true,
3717 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3718 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3719 ),
3720 cx
3721 )
3722 .await
3723 .unwrap()
3724 .is_empty(),
3725 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3726 );
3727
3728 assert!(
3729 search(
3730 &project,
3731 SearchQuery::text(
3732 search_query,
3733 false,
3734 true,
3735 vec![
3736 Glob::new("*.ts").unwrap().compile_matcher(),
3737 Glob::new("*.odd").unwrap().compile_matcher()
3738 ],
3739 vec![
3740 Glob::new("*.ts").unwrap().compile_matcher(),
3741 Glob::new("*.odd").unwrap().compile_matcher()
3742 ],
3743 ),
3744 cx
3745 )
3746 .await
3747 .unwrap()
3748 .is_empty(),
3749 "Non-matching inclusions and exclusions should not change that."
3750 );
3751
3752 assert_eq!(
3753 search(
3754 &project,
3755 SearchQuery::text(
3756 search_query,
3757 false,
3758 true,
3759 vec![
3760 Glob::new("*.ts").unwrap().compile_matcher(),
3761 Glob::new("*.odd").unwrap().compile_matcher()
3762 ],
3763 vec![
3764 Glob::new("*.rs").unwrap().compile_matcher(),
3765 Glob::new("*.odd").unwrap().compile_matcher()
3766 ],
3767 ),
3768 cx
3769 )
3770 .await
3771 .unwrap(),
3772 HashMap::from_iter([
3773 ("one.ts".to_string(), vec![14..18]),
3774 ("two.ts".to_string(), vec![14..18]),
3775 ]),
3776 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3777 );
3778}
3779
3780async fn search(
3781 project: &ModelHandle<Project>,
3782 query: SearchQuery,
3783 cx: &mut gpui::TestAppContext,
3784) -> Result<HashMap<String, Vec<Range<usize>>>> {
3785 let results = project
3786 .update(cx, |project, cx| project.search(query, cx))
3787 .await?;
3788
3789 Ok(results
3790 .into_iter()
3791 .map(|(buffer, ranges)| {
3792 buffer.read_with(cx, |buffer, _| {
3793 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3794 let ranges = ranges
3795 .into_iter()
3796 .map(|range| range.to_offset(buffer))
3797 .collect::<Vec<_>>();
3798 (path, ranges)
3799 })
3800 })
3801 .collect())
3802}
3803
3804fn init_test(cx: &mut gpui::TestAppContext) {
3805 cx.foreground().forbid_parking();
3806
3807 cx.update(|cx| {
3808 cx.set_global(SettingsStore::test(cx));
3809 language::init(cx);
3810 Project::init_settings(cx);
3811 });
3812}