1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_language_servers(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-rust-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102 let mut fake_json_servers = json_language
103 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
104 name: "the-json-language-server",
105 capabilities: lsp::ServerCapabilities {
106 completion_provider: Some(lsp::CompletionOptions {
107 trigger_characters: Some(vec![":".to_string()]),
108 ..Default::default()
109 }),
110 ..Default::default()
111 },
112 ..Default::default()
113 }))
114 .await;
115
116 let fs = FakeFs::new(cx.background());
117 fs.insert_tree(
118 "/the-root",
119 json!({
120 "test.rs": "const A: i32 = 1;",
121 "test2.rs": "",
122 "Cargo.toml": "a = 1",
123 "package.json": "{\"a\": 1}",
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129
130 // Open a buffer without an associated language server.
131 let toml_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/Cargo.toml", cx)
134 })
135 .await
136 .unwrap();
137
138 // Open a buffer with an associated language server before the language for it has been loaded.
139 let rust_buffer = project
140 .update(cx, |project, cx| {
141 project.open_local_buffer("/the-root/test.rs", cx)
142 })
143 .await
144 .unwrap();
145 rust_buffer.read_with(cx, |buffer, _| {
146 assert_eq!(buffer.language().map(|l| l.name()), None);
147 });
148
149 // Now we add the languages to the project, and ensure they get assigned to all
150 // the relevant open buffers.
151 project.update(cx, |project, _| {
152 project.languages.add(Arc::new(json_language));
153 project.languages.add(Arc::new(rust_language));
154 });
155 deterministic.run_until_parked();
156 rust_buffer.read_with(cx, |buffer, _| {
157 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
158 });
159
160 // A server is started up, and it is notified about Rust files.
161 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
162 assert_eq!(
163 fake_rust_server
164 .receive_notification::<lsp::notification::DidOpenTextDocument>()
165 .await
166 .text_document,
167 lsp::TextDocumentItem {
168 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
169 version: 0,
170 text: "const A: i32 = 1;".to_string(),
171 language_id: Default::default()
172 }
173 );
174
175 // The buffer is configured based on the language server's capabilities.
176 rust_buffer.read_with(cx, |buffer, _| {
177 assert_eq!(
178 buffer.completion_triggers(),
179 &[".".to_string(), "::".to_string()]
180 );
181 });
182 toml_buffer.read_with(cx, |buffer, _| {
183 assert!(buffer.completion_triggers().is_empty());
184 });
185
186 // Edit a buffer. The changes are reported to the language server.
187 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
188 assert_eq!(
189 fake_rust_server
190 .receive_notification::<lsp::notification::DidChangeTextDocument>()
191 .await
192 .text_document,
193 lsp::VersionedTextDocumentIdentifier::new(
194 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
195 1
196 )
197 );
198
199 // Open a third buffer with a different associated language server.
200 let json_buffer = project
201 .update(cx, |project, cx| {
202 project.open_local_buffer("/the-root/package.json", cx)
203 })
204 .await
205 .unwrap();
206
207 // A json language server is started up and is only notified about the json buffer.
208 let mut fake_json_server = fake_json_servers.next().await.unwrap();
209 assert_eq!(
210 fake_json_server
211 .receive_notification::<lsp::notification::DidOpenTextDocument>()
212 .await
213 .text_document,
214 lsp::TextDocumentItem {
215 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
216 version: 0,
217 text: "{\"a\": 1}".to_string(),
218 language_id: Default::default()
219 }
220 );
221
222 // This buffer is configured based on the second language server's
223 // capabilities.
224 json_buffer.read_with(cx, |buffer, _| {
225 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
226 });
227
228 // When opening another buffer whose language server is already running,
229 // it is also configured based on the existing language server's capabilities.
230 let rust_buffer2 = project
231 .update(cx, |project, cx| {
232 project.open_local_buffer("/the-root/test2.rs", cx)
233 })
234 .await
235 .unwrap();
236 rust_buffer2.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242
243 // Changes are reported only to servers matching the buffer's language.
244 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
245 rust_buffer2.update(cx, |buffer, cx| {
246 buffer.edit([(0..0, "let x = 1;")], None, cx)
247 });
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
255 1
256 )
257 );
258
259 // Save notifications are reported to all servers.
260 project
261 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
262 .await
263 .unwrap();
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidSaveTextDocument>()
267 .await
268 .text_document,
269 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
270 );
271 assert_eq!(
272 fake_json_server
273 .receive_notification::<lsp::notification::DidSaveTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
277 );
278
279 // Renames are reported only to servers matching the buffer's language.
280 fs.rename(
281 Path::new("/the-root/test2.rs"),
282 Path::new("/the-root/test3.rs"),
283 Default::default(),
284 )
285 .await
286 .unwrap();
287 assert_eq!(
288 fake_rust_server
289 .receive_notification::<lsp::notification::DidCloseTextDocument>()
290 .await
291 .text_document,
292 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
293 );
294 assert_eq!(
295 fake_rust_server
296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
297 .await
298 .text_document,
299 lsp::TextDocumentItem {
300 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
301 version: 0,
302 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
303 language_id: Default::default()
304 },
305 );
306
307 rust_buffer2.update(cx, |buffer, cx| {
308 buffer.update_diagnostics(
309 LanguageServerId(0),
310 DiagnosticSet::from_sorted_entries(
311 vec![DiagnosticEntry {
312 diagnostic: Default::default(),
313 range: Anchor::MIN..Anchor::MAX,
314 }],
315 &buffer.snapshot(),
316 ),
317 cx,
318 );
319 assert_eq!(
320 buffer
321 .snapshot()
322 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
323 .count(),
324 1
325 );
326 });
327
328 // When the rename changes the extension of the file, the buffer gets closed on the old
329 // language server and gets opened on the new one.
330 fs.rename(
331 Path::new("/the-root/test3.rs"),
332 Path::new("/the-root/test3.json"),
333 Default::default(),
334 )
335 .await
336 .unwrap();
337 assert_eq!(
338 fake_rust_server
339 .receive_notification::<lsp::notification::DidCloseTextDocument>()
340 .await
341 .text_document,
342 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
343 );
344 assert_eq!(
345 fake_json_server
346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
347 .await
348 .text_document,
349 lsp::TextDocumentItem {
350 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
351 version: 0,
352 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
353 language_id: Default::default()
354 },
355 );
356
357 // We clear the diagnostics, since the language has changed.
358 rust_buffer2.read_with(cx, |buffer, _| {
359 assert_eq!(
360 buffer
361 .snapshot()
362 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
363 .count(),
364 0
365 );
366 });
367
368 // The renamed file's version resets after changing language server.
369 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
370 assert_eq!(
371 fake_json_server
372 .receive_notification::<lsp::notification::DidChangeTextDocument>()
373 .await
374 .text_document,
375 lsp::VersionedTextDocumentIdentifier::new(
376 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
377 1
378 )
379 );
380
381 // Restart language servers
382 project.update(cx, |project, cx| {
383 project.restart_language_servers_for_buffers(
384 vec![rust_buffer.clone(), json_buffer.clone()],
385 cx,
386 );
387 });
388
389 let mut rust_shutdown_requests = fake_rust_server
390 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
391 let mut json_shutdown_requests = fake_json_server
392 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
393 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
394
395 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
396 let mut fake_json_server = fake_json_servers.next().await.unwrap();
397
398 // Ensure rust document is reopened in new rust language server
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
406 version: 0,
407 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 }
410 );
411
412 // Ensure json documents are reopened in new json language server
413 assert_set_eq!(
414 [
415 fake_json_server
416 .receive_notification::<lsp::notification::DidOpenTextDocument>()
417 .await
418 .text_document,
419 fake_json_server
420 .receive_notification::<lsp::notification::DidOpenTextDocument>()
421 .await
422 .text_document,
423 ],
424 [
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
427 version: 0,
428 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 0,
434 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 }
437 ]
438 );
439
440 // Close notifications are reported only to servers matching the buffer's language.
441 cx.update(|_| drop(json_buffer));
442 let close_message = lsp::DidCloseTextDocumentParams {
443 text_document: lsp::TextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 ),
446 };
447 assert_eq!(
448 fake_json_server
449 .receive_notification::<lsp::notification::DidCloseTextDocument>()
450 .await,
451 close_message,
452 );
453}
454
455#[gpui::test]
456async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
457 init_test(cx);
458
459 let mut language = Language::new(
460 LanguageConfig {
461 name: "Rust".into(),
462 path_suffixes: vec!["rs".to_string()],
463 ..Default::default()
464 },
465 Some(tree_sitter_rust::language()),
466 );
467 let mut fake_servers = language
468 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
469 name: "the-language-server",
470 ..Default::default()
471 }))
472 .await;
473
474 let fs = FakeFs::new(cx.background());
475 fs.insert_tree(
476 "/the-root",
477 json!({
478 "a.rs": "",
479 "b.rs": "",
480 }),
481 )
482 .await;
483
484 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
485 project.update(cx, |project, _| {
486 project.languages.add(Arc::new(language));
487 });
488 cx.foreground().run_until_parked();
489
490 // Start the language server by opening a buffer with a compatible file extension.
491 let _buffer = project
492 .update(cx, |project, cx| {
493 project.open_local_buffer("/the-root/a.rs", cx)
494 })
495 .await
496 .unwrap();
497
498 // Keep track of the FS events reported to the language server.
499 let fake_server = fake_servers.next().await.unwrap();
500 let file_changes = Arc::new(Mutex::new(Vec::new()));
501 fake_server
502 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
503 registrations: vec![lsp::Registration {
504 id: Default::default(),
505 method: "workspace/didChangeWatchedFiles".to_string(),
506 register_options: serde_json::to_value(
507 lsp::DidChangeWatchedFilesRegistrationOptions {
508 watchers: vec![lsp::FileSystemWatcher {
509 glob_pattern: "/the-root/*.{rs,c}".to_string(),
510 kind: None,
511 }],
512 },
513 )
514 .ok(),
515 }],
516 })
517 .await
518 .unwrap();
519 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
520 let file_changes = file_changes.clone();
521 move |params, _| {
522 let mut file_changes = file_changes.lock();
523 file_changes.extend(params.changes);
524 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
525 }
526 });
527
528 cx.foreground().run_until_parked();
529 assert_eq!(file_changes.lock().len(), 0);
530
531 // Perform some file system mutations, two of which match the watched patterns,
532 // and one of which does not.
533 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
534 .await
535 .unwrap();
536 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
537 .await
538 .unwrap();
539 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
540 .await
541 .unwrap();
542
543 // The language server receives events for the FS mutations that match its watch patterns.
544 cx.foreground().run_until_parked();
545 assert_eq!(
546 &*file_changes.lock(),
547 &[
548 lsp::FileEvent {
549 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
550 typ: lsp::FileChangeType::DELETED,
551 },
552 lsp::FileEvent {
553 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
554 typ: lsp::FileChangeType::CREATED,
555 },
556 ]
557 );
558}
559
560#[gpui::test]
561async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
562 init_test(cx);
563
564 let fs = FakeFs::new(cx.background());
565 fs.insert_tree(
566 "/dir",
567 json!({
568 "a.rs": "let a = 1;",
569 "b.rs": "let b = 2;"
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
575
576 let buffer_a = project
577 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
578 .await
579 .unwrap();
580 let buffer_b = project
581 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
582 .await
583 .unwrap();
584
585 project.update(cx, |project, cx| {
586 project
587 .update_diagnostics(
588 LanguageServerId(0),
589 lsp::PublishDiagnosticsParams {
590 uri: Url::from_file_path("/dir/a.rs").unwrap(),
591 version: None,
592 diagnostics: vec![lsp::Diagnostic {
593 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
594 severity: Some(lsp::DiagnosticSeverity::ERROR),
595 message: "error 1".to_string(),
596 ..Default::default()
597 }],
598 },
599 &[],
600 cx,
601 )
602 .unwrap();
603 project
604 .update_diagnostics(
605 LanguageServerId(0),
606 lsp::PublishDiagnosticsParams {
607 uri: Url::from_file_path("/dir/b.rs").unwrap(),
608 version: None,
609 diagnostics: vec![lsp::Diagnostic {
610 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
611 severity: Some(lsp::DiagnosticSeverity::WARNING),
612 message: "error 2".to_string(),
613 ..Default::default()
614 }],
615 },
616 &[],
617 cx,
618 )
619 .unwrap();
620 });
621
622 buffer_a.read_with(cx, |buffer, _| {
623 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
624 assert_eq!(
625 chunks
626 .iter()
627 .map(|(s, d)| (s.as_str(), *d))
628 .collect::<Vec<_>>(),
629 &[
630 ("let ", None),
631 ("a", Some(DiagnosticSeverity::ERROR)),
632 (" = 1;", None),
633 ]
634 );
635 });
636 buffer_b.read_with(cx, |buffer, _| {
637 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
638 assert_eq!(
639 chunks
640 .iter()
641 .map(|(s, d)| (s.as_str(), *d))
642 .collect::<Vec<_>>(),
643 &[
644 ("let ", None),
645 ("b", Some(DiagnosticSeverity::WARNING)),
646 (" = 2;", None),
647 ]
648 );
649 });
650}
651
652#[gpui::test]
653async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
654 init_test(cx);
655
656 let fs = FakeFs::new(cx.background());
657 fs.insert_tree(
658 "/root",
659 json!({
660 "dir": {
661 "a.rs": "let a = 1;",
662 },
663 "other.rs": "let b = c;"
664 }),
665 )
666 .await;
667
668 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
669
670 let (worktree, _) = project
671 .update(cx, |project, cx| {
672 project.find_or_create_local_worktree("/root/other.rs", false, cx)
673 })
674 .await
675 .unwrap();
676 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
677
678 project.update(cx, |project, cx| {
679 project
680 .update_diagnostics(
681 LanguageServerId(0),
682 lsp::PublishDiagnosticsParams {
683 uri: Url::from_file_path("/root/other.rs").unwrap(),
684 version: None,
685 diagnostics: vec![lsp::Diagnostic {
686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
687 severity: Some(lsp::DiagnosticSeverity::ERROR),
688 message: "unknown variable 'c'".to_string(),
689 ..Default::default()
690 }],
691 },
692 &[],
693 cx,
694 )
695 .unwrap();
696 });
697
698 let buffer = project
699 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
700 .await
701 .unwrap();
702 buffer.read_with(cx, |buffer, _| {
703 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
704 assert_eq!(
705 chunks
706 .iter()
707 .map(|(s, d)| (s.as_str(), *d))
708 .collect::<Vec<_>>(),
709 &[
710 ("let b = ", None),
711 ("c", Some(DiagnosticSeverity::ERROR)),
712 (";", None),
713 ]
714 );
715 });
716
717 project.read_with(cx, |project, cx| {
718 assert_eq!(project.diagnostic_summaries(cx).next(), None);
719 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
720 });
721}
722
723#[gpui::test]
724async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
725 init_test(cx);
726
727 let progress_token = "the-progress-token";
728 let mut language = Language::new(
729 LanguageConfig {
730 name: "Rust".into(),
731 path_suffixes: vec!["rs".to_string()],
732 ..Default::default()
733 },
734 Some(tree_sitter_rust::language()),
735 );
736 let mut fake_servers = language
737 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
738 disk_based_diagnostics_progress_token: Some(progress_token.into()),
739 disk_based_diagnostics_sources: vec!["disk".into()],
740 ..Default::default()
741 }))
742 .await;
743
744 let fs = FakeFs::new(cx.background());
745 fs.insert_tree(
746 "/dir",
747 json!({
748 "a.rs": "fn a() { A }",
749 "b.rs": "const y: i32 = 1",
750 }),
751 )
752 .await;
753
754 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
755 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
756 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
757
758 // Cause worktree to start the fake language server
759 let _buffer = project
760 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
761 .await
762 .unwrap();
763
764 let mut events = subscribe(&project, cx);
765
766 let fake_server = fake_servers.next().await.unwrap();
767 fake_server
768 .start_progress(format!("{}/0", progress_token))
769 .await;
770 assert_eq!(
771 events.next().await.unwrap(),
772 Event::DiskBasedDiagnosticsStarted {
773 language_server_id: LanguageServerId(0),
774 }
775 );
776
777 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
778 uri: Url::from_file_path("/dir/a.rs").unwrap(),
779 version: None,
780 diagnostics: vec![lsp::Diagnostic {
781 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
782 severity: Some(lsp::DiagnosticSeverity::ERROR),
783 message: "undefined variable 'A'".to_string(),
784 ..Default::default()
785 }],
786 });
787 assert_eq!(
788 events.next().await.unwrap(),
789 Event::DiagnosticsUpdated {
790 language_server_id: LanguageServerId(0),
791 path: (worktree_id, Path::new("a.rs")).into()
792 }
793 );
794
795 fake_server.end_progress(format!("{}/0", progress_token));
796 assert_eq!(
797 events.next().await.unwrap(),
798 Event::DiskBasedDiagnosticsFinished {
799 language_server_id: LanguageServerId(0)
800 }
801 );
802
803 let buffer = project
804 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
805 .await
806 .unwrap();
807
808 buffer.read_with(cx, |buffer, _| {
809 let snapshot = buffer.snapshot();
810 let diagnostics = snapshot
811 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
812 .collect::<Vec<_>>();
813 assert_eq!(
814 diagnostics,
815 &[DiagnosticEntry {
816 range: Point::new(0, 9)..Point::new(0, 10),
817 diagnostic: Diagnostic {
818 severity: lsp::DiagnosticSeverity::ERROR,
819 message: "undefined variable 'A'".to_string(),
820 group_id: 0,
821 is_primary: true,
822 ..Default::default()
823 }
824 }]
825 )
826 });
827
828 // Ensure publishing empty diagnostics twice only results in one update event.
829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
830 uri: Url::from_file_path("/dir/a.rs").unwrap(),
831 version: None,
832 diagnostics: Default::default(),
833 });
834 assert_eq!(
835 events.next().await.unwrap(),
836 Event::DiagnosticsUpdated {
837 language_server_id: LanguageServerId(0),
838 path: (worktree_id, Path::new("a.rs")).into()
839 }
840 );
841
842 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
843 uri: Url::from_file_path("/dir/a.rs").unwrap(),
844 version: None,
845 diagnostics: Default::default(),
846 });
847 cx.foreground().run_until_parked();
848 assert_eq!(futures::poll!(events.next()), Poll::Pending);
849}
850
851#[gpui::test]
852async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
853 init_test(cx);
854
855 let progress_token = "the-progress-token";
856 let mut language = Language::new(
857 LanguageConfig {
858 path_suffixes: vec!["rs".to_string()],
859 ..Default::default()
860 },
861 None,
862 );
863 let mut fake_servers = language
864 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
865 disk_based_diagnostics_sources: vec!["disk".into()],
866 disk_based_diagnostics_progress_token: Some(progress_token.into()),
867 ..Default::default()
868 }))
869 .await;
870
871 let fs = FakeFs::new(cx.background());
872 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
873
874 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
875 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
876
877 let buffer = project
878 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
879 .await
880 .unwrap();
881
882 // Simulate diagnostics starting to update.
883 let fake_server = fake_servers.next().await.unwrap();
884 fake_server.start_progress(progress_token).await;
885
886 // Restart the server before the diagnostics finish updating.
887 project.update(cx, |project, cx| {
888 project.restart_language_servers_for_buffers([buffer], cx);
889 });
890 let mut events = subscribe(&project, cx);
891
892 // Simulate the newly started server sending more diagnostics.
893 let fake_server = fake_servers.next().await.unwrap();
894 fake_server.start_progress(progress_token).await;
895 assert_eq!(
896 events.next().await.unwrap(),
897 Event::DiskBasedDiagnosticsStarted {
898 language_server_id: LanguageServerId(1)
899 }
900 );
901 project.read_with(cx, |project, _| {
902 assert_eq!(
903 project
904 .language_servers_running_disk_based_diagnostics()
905 .collect::<Vec<_>>(),
906 [LanguageServerId(1)]
907 );
908 });
909
910 // All diagnostics are considered done, despite the old server's diagnostic
911 // task never completing.
912 fake_server.end_progress(progress_token);
913 assert_eq!(
914 events.next().await.unwrap(),
915 Event::DiskBasedDiagnosticsFinished {
916 language_server_id: LanguageServerId(1)
917 }
918 );
919 project.read_with(cx, |project, _| {
920 assert_eq!(
921 project
922 .language_servers_running_disk_based_diagnostics()
923 .collect::<Vec<_>>(),
924 [LanguageServerId(0); 0]
925 );
926 });
927}
928
929#[gpui::test]
930async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
931 init_test(cx);
932
933 let mut language = Language::new(
934 LanguageConfig {
935 path_suffixes: vec!["rs".to_string()],
936 ..Default::default()
937 },
938 None,
939 );
940 let mut fake_servers = language
941 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
942 ..Default::default()
943 }))
944 .await;
945
946 let fs = FakeFs::new(cx.background());
947 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
948
949 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
950 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
951
952 let buffer = project
953 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
954 .await
955 .unwrap();
956
957 // Publish diagnostics
958 let fake_server = fake_servers.next().await.unwrap();
959 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
960 uri: Url::from_file_path("/dir/a.rs").unwrap(),
961 version: None,
962 diagnostics: vec![lsp::Diagnostic {
963 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
964 severity: Some(lsp::DiagnosticSeverity::ERROR),
965 message: "the message".to_string(),
966 ..Default::default()
967 }],
968 });
969
970 cx.foreground().run_until_parked();
971 buffer.read_with(cx, |buffer, _| {
972 assert_eq!(
973 buffer
974 .snapshot()
975 .diagnostics_in_range::<_, usize>(0..1, false)
976 .map(|entry| entry.diagnostic.message.clone())
977 .collect::<Vec<_>>(),
978 ["the message".to_string()]
979 );
980 });
981 project.read_with(cx, |project, cx| {
982 assert_eq!(
983 project.diagnostic_summary(cx),
984 DiagnosticSummary {
985 error_count: 1,
986 warning_count: 0,
987 }
988 );
989 });
990
991 project.update(cx, |project, cx| {
992 project.restart_language_servers_for_buffers([buffer.clone()], cx);
993 });
994
995 // The diagnostics are cleared.
996 cx.foreground().run_until_parked();
997 buffer.read_with(cx, |buffer, _| {
998 assert_eq!(
999 buffer
1000 .snapshot()
1001 .diagnostics_in_range::<_, usize>(0..1, false)
1002 .map(|entry| entry.diagnostic.message.clone())
1003 .collect::<Vec<_>>(),
1004 Vec::<String>::new(),
1005 );
1006 });
1007 project.read_with(cx, |project, cx| {
1008 assert_eq!(
1009 project.diagnostic_summary(cx),
1010 DiagnosticSummary {
1011 error_count: 0,
1012 warning_count: 0,
1013 }
1014 );
1015 });
1016}
1017
1018#[gpui::test]
1019async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1020 init_test(cx);
1021
1022 let mut language = Language::new(
1023 LanguageConfig {
1024 path_suffixes: vec!["rs".to_string()],
1025 ..Default::default()
1026 },
1027 None,
1028 );
1029 let mut fake_servers = language
1030 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1031 name: "the-lsp",
1032 ..Default::default()
1033 }))
1034 .await;
1035
1036 let fs = FakeFs::new(cx.background());
1037 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1038
1039 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1040 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1041
1042 let buffer = project
1043 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1044 .await
1045 .unwrap();
1046
1047 // Before restarting the server, report diagnostics with an unknown buffer version.
1048 let fake_server = fake_servers.next().await.unwrap();
1049 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1050 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1051 version: Some(10000),
1052 diagnostics: Vec::new(),
1053 });
1054 cx.foreground().run_until_parked();
1055
1056 project.update(cx, |project, cx| {
1057 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1058 });
1059 let mut fake_server = fake_servers.next().await.unwrap();
1060 let notification = fake_server
1061 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1062 .await
1063 .text_document;
1064 assert_eq!(notification.version, 0);
1065}
1066
1067#[gpui::test]
1068async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1069 init_test(cx);
1070
1071 let mut rust = Language::new(
1072 LanguageConfig {
1073 name: Arc::from("Rust"),
1074 path_suffixes: vec!["rs".to_string()],
1075 ..Default::default()
1076 },
1077 None,
1078 );
1079 let mut fake_rust_servers = rust
1080 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1081 name: "rust-lsp",
1082 ..Default::default()
1083 }))
1084 .await;
1085 let mut js = Language::new(
1086 LanguageConfig {
1087 name: Arc::from("JavaScript"),
1088 path_suffixes: vec!["js".to_string()],
1089 ..Default::default()
1090 },
1091 None,
1092 );
1093 let mut fake_js_servers = js
1094 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1095 name: "js-lsp",
1096 ..Default::default()
1097 }))
1098 .await;
1099
1100 let fs = FakeFs::new(cx.background());
1101 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1102 .await;
1103
1104 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1105 project.update(cx, |project, _| {
1106 project.languages.add(Arc::new(rust));
1107 project.languages.add(Arc::new(js));
1108 });
1109
1110 let _rs_buffer = project
1111 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1112 .await
1113 .unwrap();
1114 let _js_buffer = project
1115 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1116 .await
1117 .unwrap();
1118
1119 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1120 assert_eq!(
1121 fake_rust_server_1
1122 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1123 .await
1124 .text_document
1125 .uri
1126 .as_str(),
1127 "file:///dir/a.rs"
1128 );
1129
1130 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1131 assert_eq!(
1132 fake_js_server
1133 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1134 .await
1135 .text_document
1136 .uri
1137 .as_str(),
1138 "file:///dir/b.js"
1139 );
1140
1141 // Disable Rust language server, ensuring only that server gets stopped.
1142 cx.update(|cx| {
1143 cx.update_global(|settings: &mut SettingsStore, cx| {
1144 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1145 settings.languages.insert(
1146 Arc::from("Rust"),
1147 LanguageSettingsContent {
1148 enable_language_server: Some(false),
1149 ..Default::default()
1150 },
1151 );
1152 });
1153 })
1154 });
1155 fake_rust_server_1
1156 .receive_notification::<lsp::notification::Exit>()
1157 .await;
1158
1159 // Enable Rust and disable JavaScript language servers, ensuring that the
1160 // former gets started again and that the latter stops.
1161 cx.update(|cx| {
1162 cx.update_global(|settings: &mut SettingsStore, cx| {
1163 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1164 settings.languages.insert(
1165 Arc::from("Rust"),
1166 LanguageSettingsContent {
1167 enable_language_server: Some(true),
1168 ..Default::default()
1169 },
1170 );
1171 settings.languages.insert(
1172 Arc::from("JavaScript"),
1173 LanguageSettingsContent {
1174 enable_language_server: Some(false),
1175 ..Default::default()
1176 },
1177 );
1178 });
1179 })
1180 });
1181 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1182 assert_eq!(
1183 fake_rust_server_2
1184 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1185 .await
1186 .text_document
1187 .uri
1188 .as_str(),
1189 "file:///dir/a.rs"
1190 );
1191 fake_js_server
1192 .receive_notification::<lsp::notification::Exit>()
1193 .await;
1194}
1195
1196#[gpui::test]
1197async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let mut language = Language::new(
1201 LanguageConfig {
1202 name: "Rust".into(),
1203 path_suffixes: vec!["rs".to_string()],
1204 ..Default::default()
1205 },
1206 Some(tree_sitter_rust::language()),
1207 );
1208 let mut fake_servers = language
1209 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1210 disk_based_diagnostics_sources: vec!["disk".into()],
1211 ..Default::default()
1212 }))
1213 .await;
1214
1215 let text = "
1216 fn a() { A }
1217 fn b() { BB }
1218 fn c() { CCC }
1219 "
1220 .unindent();
1221
1222 let fs = FakeFs::new(cx.background());
1223 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1224
1225 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1226 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1227
1228 let buffer = project
1229 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1230 .await
1231 .unwrap();
1232
1233 let mut fake_server = fake_servers.next().await.unwrap();
1234 let open_notification = fake_server
1235 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1236 .await;
1237
1238 // Edit the buffer, moving the content down
1239 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1240 let change_notification_1 = fake_server
1241 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1242 .await;
1243 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1244
1245 // Report some diagnostics for the initial version of the buffer
1246 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1247 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1248 version: Some(open_notification.text_document.version),
1249 diagnostics: vec![
1250 lsp::Diagnostic {
1251 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1252 severity: Some(DiagnosticSeverity::ERROR),
1253 message: "undefined variable 'A'".to_string(),
1254 source: Some("disk".to_string()),
1255 ..Default::default()
1256 },
1257 lsp::Diagnostic {
1258 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1259 severity: Some(DiagnosticSeverity::ERROR),
1260 message: "undefined variable 'BB'".to_string(),
1261 source: Some("disk".to_string()),
1262 ..Default::default()
1263 },
1264 lsp::Diagnostic {
1265 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1266 severity: Some(DiagnosticSeverity::ERROR),
1267 source: Some("disk".to_string()),
1268 message: "undefined variable 'CCC'".to_string(),
1269 ..Default::default()
1270 },
1271 ],
1272 });
1273
1274 // The diagnostics have moved down since they were created.
1275 buffer.next_notification(cx).await;
1276 buffer.next_notification(cx).await;
1277 buffer.read_with(cx, |buffer, _| {
1278 assert_eq!(
1279 buffer
1280 .snapshot()
1281 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1282 .collect::<Vec<_>>(),
1283 &[
1284 DiagnosticEntry {
1285 range: Point::new(3, 9)..Point::new(3, 11),
1286 diagnostic: Diagnostic {
1287 source: Some("disk".into()),
1288 severity: DiagnosticSeverity::ERROR,
1289 message: "undefined variable 'BB'".to_string(),
1290 is_disk_based: true,
1291 group_id: 1,
1292 is_primary: true,
1293 ..Default::default()
1294 },
1295 },
1296 DiagnosticEntry {
1297 range: Point::new(4, 9)..Point::new(4, 12),
1298 diagnostic: Diagnostic {
1299 source: Some("disk".into()),
1300 severity: DiagnosticSeverity::ERROR,
1301 message: "undefined variable 'CCC'".to_string(),
1302 is_disk_based: true,
1303 group_id: 2,
1304 is_primary: true,
1305 ..Default::default()
1306 }
1307 }
1308 ]
1309 );
1310 assert_eq!(
1311 chunks_with_diagnostics(buffer, 0..buffer.len()),
1312 [
1313 ("\n\nfn a() { ".to_string(), None),
1314 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1315 (" }\nfn b() { ".to_string(), None),
1316 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1317 (" }\nfn c() { ".to_string(), None),
1318 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1319 (" }\n".to_string(), None),
1320 ]
1321 );
1322 assert_eq!(
1323 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1324 [
1325 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1326 (" }\nfn c() { ".to_string(), None),
1327 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1328 ]
1329 );
1330 });
1331
1332 // Ensure overlapping diagnostics are highlighted correctly.
1333 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1334 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1335 version: Some(open_notification.text_document.version),
1336 diagnostics: vec![
1337 lsp::Diagnostic {
1338 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1339 severity: Some(DiagnosticSeverity::ERROR),
1340 message: "undefined variable 'A'".to_string(),
1341 source: Some("disk".to_string()),
1342 ..Default::default()
1343 },
1344 lsp::Diagnostic {
1345 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1346 severity: Some(DiagnosticSeverity::WARNING),
1347 message: "unreachable statement".to_string(),
1348 source: Some("disk".to_string()),
1349 ..Default::default()
1350 },
1351 ],
1352 });
1353
1354 buffer.next_notification(cx).await;
1355 buffer.read_with(cx, |buffer, _| {
1356 assert_eq!(
1357 buffer
1358 .snapshot()
1359 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1360 .collect::<Vec<_>>(),
1361 &[
1362 DiagnosticEntry {
1363 range: Point::new(2, 9)..Point::new(2, 12),
1364 diagnostic: Diagnostic {
1365 source: Some("disk".into()),
1366 severity: DiagnosticSeverity::WARNING,
1367 message: "unreachable statement".to_string(),
1368 is_disk_based: true,
1369 group_id: 4,
1370 is_primary: true,
1371 ..Default::default()
1372 }
1373 },
1374 DiagnosticEntry {
1375 range: Point::new(2, 9)..Point::new(2, 10),
1376 diagnostic: Diagnostic {
1377 source: Some("disk".into()),
1378 severity: DiagnosticSeverity::ERROR,
1379 message: "undefined variable 'A'".to_string(),
1380 is_disk_based: true,
1381 group_id: 3,
1382 is_primary: true,
1383 ..Default::default()
1384 },
1385 }
1386 ]
1387 );
1388 assert_eq!(
1389 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1390 [
1391 ("fn a() { ".to_string(), None),
1392 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1393 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1394 ("\n".to_string(), None),
1395 ]
1396 );
1397 assert_eq!(
1398 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1399 [
1400 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1401 ("\n".to_string(), None),
1402 ]
1403 );
1404 });
1405
1406 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1407 // changes since the last save.
1408 buffer.update(cx, |buffer, cx| {
1409 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1410 buffer.edit(
1411 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1412 None,
1413 cx,
1414 );
1415 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1416 });
1417 let change_notification_2 = fake_server
1418 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1419 .await;
1420 assert!(
1421 change_notification_2.text_document.version > change_notification_1.text_document.version
1422 );
1423
1424 // Handle out-of-order diagnostics
1425 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1426 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1427 version: Some(change_notification_2.text_document.version),
1428 diagnostics: vec![
1429 lsp::Diagnostic {
1430 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1431 severity: Some(DiagnosticSeverity::ERROR),
1432 message: "undefined variable 'BB'".to_string(),
1433 source: Some("disk".to_string()),
1434 ..Default::default()
1435 },
1436 lsp::Diagnostic {
1437 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1438 severity: Some(DiagnosticSeverity::WARNING),
1439 message: "undefined variable 'A'".to_string(),
1440 source: Some("disk".to_string()),
1441 ..Default::default()
1442 },
1443 ],
1444 });
1445
1446 buffer.next_notification(cx).await;
1447 buffer.read_with(cx, |buffer, _| {
1448 assert_eq!(
1449 buffer
1450 .snapshot()
1451 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1452 .collect::<Vec<_>>(),
1453 &[
1454 DiagnosticEntry {
1455 range: Point::new(2, 21)..Point::new(2, 22),
1456 diagnostic: Diagnostic {
1457 source: Some("disk".into()),
1458 severity: DiagnosticSeverity::WARNING,
1459 message: "undefined variable 'A'".to_string(),
1460 is_disk_based: true,
1461 group_id: 6,
1462 is_primary: true,
1463 ..Default::default()
1464 }
1465 },
1466 DiagnosticEntry {
1467 range: Point::new(3, 9)..Point::new(3, 14),
1468 diagnostic: Diagnostic {
1469 source: Some("disk".into()),
1470 severity: DiagnosticSeverity::ERROR,
1471 message: "undefined variable 'BB'".to_string(),
1472 is_disk_based: true,
1473 group_id: 5,
1474 is_primary: true,
1475 ..Default::default()
1476 },
1477 }
1478 ]
1479 );
1480 });
1481}
1482
1483#[gpui::test]
1484async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1485 init_test(cx);
1486
1487 let text = concat!(
1488 "let one = ;\n", //
1489 "let two = \n",
1490 "let three = 3;\n",
1491 );
1492
1493 let fs = FakeFs::new(cx.background());
1494 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1495
1496 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1497 let buffer = project
1498 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1499 .await
1500 .unwrap();
1501
1502 project.update(cx, |project, cx| {
1503 project
1504 .update_buffer_diagnostics(
1505 &buffer,
1506 LanguageServerId(0),
1507 None,
1508 vec![
1509 DiagnosticEntry {
1510 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1511 diagnostic: Diagnostic {
1512 severity: DiagnosticSeverity::ERROR,
1513 message: "syntax error 1".to_string(),
1514 ..Default::default()
1515 },
1516 },
1517 DiagnosticEntry {
1518 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1519 diagnostic: Diagnostic {
1520 severity: DiagnosticSeverity::ERROR,
1521 message: "syntax error 2".to_string(),
1522 ..Default::default()
1523 },
1524 },
1525 ],
1526 cx,
1527 )
1528 .unwrap();
1529 });
1530
1531 // An empty range is extended forward to include the following character.
1532 // At the end of a line, an empty range is extended backward to include
1533 // the preceding character.
1534 buffer.read_with(cx, |buffer, _| {
1535 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1536 assert_eq!(
1537 chunks
1538 .iter()
1539 .map(|(s, d)| (s.as_str(), *d))
1540 .collect::<Vec<_>>(),
1541 &[
1542 ("let one = ", None),
1543 (";", Some(DiagnosticSeverity::ERROR)),
1544 ("\nlet two =", None),
1545 (" ", Some(DiagnosticSeverity::ERROR)),
1546 ("\nlet three = 3;\n", None)
1547 ]
1548 );
1549 });
1550}
1551
1552#[gpui::test]
1553async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1554 init_test(cx);
1555
1556 let fs = FakeFs::new(cx.background());
1557 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1558 .await;
1559
1560 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1561
1562 project.update(cx, |project, cx| {
1563 project
1564 .update_diagnostic_entries(
1565 LanguageServerId(0),
1566 Path::new("/dir/a.rs").to_owned(),
1567 None,
1568 vec![DiagnosticEntry {
1569 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1570 diagnostic: Diagnostic {
1571 severity: DiagnosticSeverity::ERROR,
1572 is_primary: true,
1573 message: "syntax error a1".to_string(),
1574 ..Default::default()
1575 },
1576 }],
1577 cx,
1578 )
1579 .unwrap();
1580 project
1581 .update_diagnostic_entries(
1582 LanguageServerId(1),
1583 Path::new("/dir/a.rs").to_owned(),
1584 None,
1585 vec![DiagnosticEntry {
1586 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1587 diagnostic: Diagnostic {
1588 severity: DiagnosticSeverity::ERROR,
1589 is_primary: true,
1590 message: "syntax error b1".to_string(),
1591 ..Default::default()
1592 },
1593 }],
1594 cx,
1595 )
1596 .unwrap();
1597
1598 assert_eq!(
1599 project.diagnostic_summary(cx),
1600 DiagnosticSummary {
1601 error_count: 2,
1602 warning_count: 0,
1603 }
1604 );
1605 });
1606}
1607
1608#[gpui::test]
1609async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1610 init_test(cx);
1611
1612 let mut language = Language::new(
1613 LanguageConfig {
1614 name: "Rust".into(),
1615 path_suffixes: vec!["rs".to_string()],
1616 ..Default::default()
1617 },
1618 Some(tree_sitter_rust::language()),
1619 );
1620 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1621
1622 let text = "
1623 fn a() {
1624 f1();
1625 }
1626 fn b() {
1627 f2();
1628 }
1629 fn c() {
1630 f3();
1631 }
1632 "
1633 .unindent();
1634
1635 let fs = FakeFs::new(cx.background());
1636 fs.insert_tree(
1637 "/dir",
1638 json!({
1639 "a.rs": text.clone(),
1640 }),
1641 )
1642 .await;
1643
1644 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1645 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1646 let buffer = project
1647 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1648 .await
1649 .unwrap();
1650
1651 let mut fake_server = fake_servers.next().await.unwrap();
1652 let lsp_document_version = fake_server
1653 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1654 .await
1655 .text_document
1656 .version;
1657
1658 // Simulate editing the buffer after the language server computes some edits.
1659 buffer.update(cx, |buffer, cx| {
1660 buffer.edit(
1661 [(
1662 Point::new(0, 0)..Point::new(0, 0),
1663 "// above first function\n",
1664 )],
1665 None,
1666 cx,
1667 );
1668 buffer.edit(
1669 [(
1670 Point::new(2, 0)..Point::new(2, 0),
1671 " // inside first function\n",
1672 )],
1673 None,
1674 cx,
1675 );
1676 buffer.edit(
1677 [(
1678 Point::new(6, 4)..Point::new(6, 4),
1679 "// inside second function ",
1680 )],
1681 None,
1682 cx,
1683 );
1684
1685 assert_eq!(
1686 buffer.text(),
1687 "
1688 // above first function
1689 fn a() {
1690 // inside first function
1691 f1();
1692 }
1693 fn b() {
1694 // inside second function f2();
1695 }
1696 fn c() {
1697 f3();
1698 }
1699 "
1700 .unindent()
1701 );
1702 });
1703
1704 let edits = project
1705 .update(cx, |project, cx| {
1706 project.edits_from_lsp(
1707 &buffer,
1708 vec![
1709 // replace body of first function
1710 lsp::TextEdit {
1711 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1712 new_text: "
1713 fn a() {
1714 f10();
1715 }
1716 "
1717 .unindent(),
1718 },
1719 // edit inside second function
1720 lsp::TextEdit {
1721 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1722 new_text: "00".into(),
1723 },
1724 // edit inside third function via two distinct edits
1725 lsp::TextEdit {
1726 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1727 new_text: "4000".into(),
1728 },
1729 lsp::TextEdit {
1730 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1731 new_text: "".into(),
1732 },
1733 ],
1734 LanguageServerId(0),
1735 Some(lsp_document_version),
1736 cx,
1737 )
1738 })
1739 .await
1740 .unwrap();
1741
1742 buffer.update(cx, |buffer, cx| {
1743 for (range, new_text) in edits {
1744 buffer.edit([(range, new_text)], None, cx);
1745 }
1746 assert_eq!(
1747 buffer.text(),
1748 "
1749 // above first function
1750 fn a() {
1751 // inside first function
1752 f10();
1753 }
1754 fn b() {
1755 // inside second function f200();
1756 }
1757 fn c() {
1758 f4000();
1759 }
1760 "
1761 .unindent()
1762 );
1763 });
1764}
1765
1766#[gpui::test]
1767async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1768 init_test(cx);
1769
1770 let text = "
1771 use a::b;
1772 use a::c;
1773
1774 fn f() {
1775 b();
1776 c();
1777 }
1778 "
1779 .unindent();
1780
1781 let fs = FakeFs::new(cx.background());
1782 fs.insert_tree(
1783 "/dir",
1784 json!({
1785 "a.rs": text.clone(),
1786 }),
1787 )
1788 .await;
1789
1790 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1791 let buffer = project
1792 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1793 .await
1794 .unwrap();
1795
1796 // Simulate the language server sending us a small edit in the form of a very large diff.
1797 // Rust-analyzer does this when performing a merge-imports code action.
1798 let edits = project
1799 .update(cx, |project, cx| {
1800 project.edits_from_lsp(
1801 &buffer,
1802 [
1803 // Replace the first use statement without editing the semicolon.
1804 lsp::TextEdit {
1805 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1806 new_text: "a::{b, c}".into(),
1807 },
1808 // Reinsert the remainder of the file between the semicolon and the final
1809 // newline of the file.
1810 lsp::TextEdit {
1811 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1812 new_text: "\n\n".into(),
1813 },
1814 lsp::TextEdit {
1815 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1816 new_text: "
1817 fn f() {
1818 b();
1819 c();
1820 }"
1821 .unindent(),
1822 },
1823 // Delete everything after the first newline of the file.
1824 lsp::TextEdit {
1825 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1826 new_text: "".into(),
1827 },
1828 ],
1829 LanguageServerId(0),
1830 None,
1831 cx,
1832 )
1833 })
1834 .await
1835 .unwrap();
1836
1837 buffer.update(cx, |buffer, cx| {
1838 let edits = edits
1839 .into_iter()
1840 .map(|(range, text)| {
1841 (
1842 range.start.to_point(buffer)..range.end.to_point(buffer),
1843 text,
1844 )
1845 })
1846 .collect::<Vec<_>>();
1847
1848 assert_eq!(
1849 edits,
1850 [
1851 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1852 (Point::new(1, 0)..Point::new(2, 0), "".into())
1853 ]
1854 );
1855
1856 for (range, new_text) in edits {
1857 buffer.edit([(range, new_text)], None, cx);
1858 }
1859 assert_eq!(
1860 buffer.text(),
1861 "
1862 use a::{b, c};
1863
1864 fn f() {
1865 b();
1866 c();
1867 }
1868 "
1869 .unindent()
1870 );
1871 });
1872}
1873
1874#[gpui::test]
1875async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1876 init_test(cx);
1877
1878 let text = "
1879 use a::b;
1880 use a::c;
1881
1882 fn f() {
1883 b();
1884 c();
1885 }
1886 "
1887 .unindent();
1888
1889 let fs = FakeFs::new(cx.background());
1890 fs.insert_tree(
1891 "/dir",
1892 json!({
1893 "a.rs": text.clone(),
1894 }),
1895 )
1896 .await;
1897
1898 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1899 let buffer = project
1900 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1901 .await
1902 .unwrap();
1903
1904 // Simulate the language server sending us edits in a non-ordered fashion,
1905 // with ranges sometimes being inverted or pointing to invalid locations.
1906 let edits = project
1907 .update(cx, |project, cx| {
1908 project.edits_from_lsp(
1909 &buffer,
1910 [
1911 lsp::TextEdit {
1912 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1913 new_text: "\n\n".into(),
1914 },
1915 lsp::TextEdit {
1916 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1917 new_text: "a::{b, c}".into(),
1918 },
1919 lsp::TextEdit {
1920 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1921 new_text: "".into(),
1922 },
1923 lsp::TextEdit {
1924 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1925 new_text: "
1926 fn f() {
1927 b();
1928 c();
1929 }"
1930 .unindent(),
1931 },
1932 ],
1933 LanguageServerId(0),
1934 None,
1935 cx,
1936 )
1937 })
1938 .await
1939 .unwrap();
1940
1941 buffer.update(cx, |buffer, cx| {
1942 let edits = edits
1943 .into_iter()
1944 .map(|(range, text)| {
1945 (
1946 range.start.to_point(buffer)..range.end.to_point(buffer),
1947 text,
1948 )
1949 })
1950 .collect::<Vec<_>>();
1951
1952 assert_eq!(
1953 edits,
1954 [
1955 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1956 (Point::new(1, 0)..Point::new(2, 0), "".into())
1957 ]
1958 );
1959
1960 for (range, new_text) in edits {
1961 buffer.edit([(range, new_text)], None, cx);
1962 }
1963 assert_eq!(
1964 buffer.text(),
1965 "
1966 use a::{b, c};
1967
1968 fn f() {
1969 b();
1970 c();
1971 }
1972 "
1973 .unindent()
1974 );
1975 });
1976}
1977
1978fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1979 buffer: &Buffer,
1980 range: Range<T>,
1981) -> Vec<(String, Option<DiagnosticSeverity>)> {
1982 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1983 for chunk in buffer.snapshot().chunks(range, true) {
1984 if chunks.last().map_or(false, |prev_chunk| {
1985 prev_chunk.1 == chunk.diagnostic_severity
1986 }) {
1987 chunks.last_mut().unwrap().0.push_str(chunk.text);
1988 } else {
1989 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1990 }
1991 }
1992 chunks
1993}
1994
1995#[gpui::test(iterations = 10)]
1996async fn test_definition(cx: &mut gpui::TestAppContext) {
1997 init_test(cx);
1998
1999 let mut language = Language::new(
2000 LanguageConfig {
2001 name: "Rust".into(),
2002 path_suffixes: vec!["rs".to_string()],
2003 ..Default::default()
2004 },
2005 Some(tree_sitter_rust::language()),
2006 );
2007 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2008
2009 let fs = FakeFs::new(cx.background());
2010 fs.insert_tree(
2011 "/dir",
2012 json!({
2013 "a.rs": "const fn a() { A }",
2014 "b.rs": "const y: i32 = crate::a()",
2015 }),
2016 )
2017 .await;
2018
2019 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2020 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2021
2022 let buffer = project
2023 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2024 .await
2025 .unwrap();
2026
2027 let fake_server = fake_servers.next().await.unwrap();
2028 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2029 let params = params.text_document_position_params;
2030 assert_eq!(
2031 params.text_document.uri.to_file_path().unwrap(),
2032 Path::new("/dir/b.rs"),
2033 );
2034 assert_eq!(params.position, lsp::Position::new(0, 22));
2035
2036 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2037 lsp::Location::new(
2038 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2039 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2040 ),
2041 )))
2042 });
2043
2044 let mut definitions = project
2045 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2046 .await
2047 .unwrap();
2048
2049 // Assert no new language server started
2050 cx.foreground().run_until_parked();
2051 assert!(fake_servers.try_next().is_err());
2052
2053 assert_eq!(definitions.len(), 1);
2054 let definition = definitions.pop().unwrap();
2055 cx.update(|cx| {
2056 let target_buffer = definition.target.buffer.read(cx);
2057 assert_eq!(
2058 target_buffer
2059 .file()
2060 .unwrap()
2061 .as_local()
2062 .unwrap()
2063 .abs_path(cx),
2064 Path::new("/dir/a.rs"),
2065 );
2066 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2067 assert_eq!(
2068 list_worktrees(&project, cx),
2069 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2070 );
2071
2072 drop(definition);
2073 });
2074 cx.read(|cx| {
2075 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2076 });
2077
2078 fn list_worktrees<'a>(
2079 project: &'a ModelHandle<Project>,
2080 cx: &'a AppContext,
2081 ) -> Vec<(&'a Path, bool)> {
2082 project
2083 .read(cx)
2084 .worktrees(cx)
2085 .map(|worktree| {
2086 let worktree = worktree.read(cx);
2087 (
2088 worktree.as_local().unwrap().abs_path().as_ref(),
2089 worktree.is_visible(),
2090 )
2091 })
2092 .collect::<Vec<_>>()
2093 }
2094}
2095
2096#[gpui::test]
2097async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let mut language = Language::new(
2101 LanguageConfig {
2102 name: "TypeScript".into(),
2103 path_suffixes: vec!["ts".to_string()],
2104 ..Default::default()
2105 },
2106 Some(tree_sitter_typescript::language_typescript()),
2107 );
2108 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2109
2110 let fs = FakeFs::new(cx.background());
2111 fs.insert_tree(
2112 "/dir",
2113 json!({
2114 "a.ts": "",
2115 }),
2116 )
2117 .await;
2118
2119 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2120 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2121 let buffer = project
2122 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2123 .await
2124 .unwrap();
2125
2126 let fake_server = fake_language_servers.next().await.unwrap();
2127
2128 let text = "let a = b.fqn";
2129 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2130 let completions = project.update(cx, |project, cx| {
2131 project.completions(&buffer, text.len(), cx)
2132 });
2133
2134 fake_server
2135 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2136 Ok(Some(lsp::CompletionResponse::Array(vec![
2137 lsp::CompletionItem {
2138 label: "fullyQualifiedName?".into(),
2139 insert_text: Some("fullyQualifiedName".into()),
2140 ..Default::default()
2141 },
2142 ])))
2143 })
2144 .next()
2145 .await;
2146 let completions = completions.await.unwrap();
2147 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2148 assert_eq!(completions.len(), 1);
2149 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2150 assert_eq!(
2151 completions[0].old_range.to_offset(&snapshot),
2152 text.len() - 3..text.len()
2153 );
2154
2155 let text = "let a = \"atoms/cmp\"";
2156 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2157 let completions = project.update(cx, |project, cx| {
2158 project.completions(&buffer, text.len() - 1, cx)
2159 });
2160
2161 fake_server
2162 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2163 Ok(Some(lsp::CompletionResponse::Array(vec![
2164 lsp::CompletionItem {
2165 label: "component".into(),
2166 ..Default::default()
2167 },
2168 ])))
2169 })
2170 .next()
2171 .await;
2172 let completions = completions.await.unwrap();
2173 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2174 assert_eq!(completions.len(), 1);
2175 assert_eq!(completions[0].new_text, "component");
2176 assert_eq!(
2177 completions[0].old_range.to_offset(&snapshot),
2178 text.len() - 4..text.len() - 1
2179 );
2180}
2181
2182#[gpui::test]
2183async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2184 init_test(cx);
2185
2186 let mut language = Language::new(
2187 LanguageConfig {
2188 name: "TypeScript".into(),
2189 path_suffixes: vec!["ts".to_string()],
2190 ..Default::default()
2191 },
2192 Some(tree_sitter_typescript::language_typescript()),
2193 );
2194 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2195
2196 let fs = FakeFs::new(cx.background());
2197 fs.insert_tree(
2198 "/dir",
2199 json!({
2200 "a.ts": "",
2201 }),
2202 )
2203 .await;
2204
2205 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2206 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2207 let buffer = project
2208 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2209 .await
2210 .unwrap();
2211
2212 let fake_server = fake_language_servers.next().await.unwrap();
2213
2214 let text = "let a = b.fqn";
2215 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2216 let completions = project.update(cx, |project, cx| {
2217 project.completions(&buffer, text.len(), cx)
2218 });
2219
2220 fake_server
2221 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2222 Ok(Some(lsp::CompletionResponse::Array(vec![
2223 lsp::CompletionItem {
2224 label: "fullyQualifiedName?".into(),
2225 insert_text: Some("fully\rQualified\r\nName".into()),
2226 ..Default::default()
2227 },
2228 ])))
2229 })
2230 .next()
2231 .await;
2232 let completions = completions.await.unwrap();
2233 assert_eq!(completions.len(), 1);
2234 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2235}
2236
2237#[gpui::test(iterations = 10)]
2238async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2239 init_test(cx);
2240
2241 let mut language = Language::new(
2242 LanguageConfig {
2243 name: "TypeScript".into(),
2244 path_suffixes: vec!["ts".to_string()],
2245 ..Default::default()
2246 },
2247 None,
2248 );
2249 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2250
2251 let fs = FakeFs::new(cx.background());
2252 fs.insert_tree(
2253 "/dir",
2254 json!({
2255 "a.ts": "a",
2256 }),
2257 )
2258 .await;
2259
2260 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2261 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2262 let buffer = project
2263 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2264 .await
2265 .unwrap();
2266
2267 let fake_server = fake_language_servers.next().await.unwrap();
2268
2269 // Language server returns code actions that contain commands, and not edits.
2270 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2271 fake_server
2272 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2273 Ok(Some(vec![
2274 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2275 title: "The code action".into(),
2276 command: Some(lsp::Command {
2277 title: "The command".into(),
2278 command: "_the/command".into(),
2279 arguments: Some(vec![json!("the-argument")]),
2280 }),
2281 ..Default::default()
2282 }),
2283 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2284 title: "two".into(),
2285 ..Default::default()
2286 }),
2287 ]))
2288 })
2289 .next()
2290 .await;
2291
2292 let action = actions.await.unwrap()[0].clone();
2293 let apply = project.update(cx, |project, cx| {
2294 project.apply_code_action(buffer.clone(), action, true, cx)
2295 });
2296
2297 // Resolving the code action does not populate its edits. In absence of
2298 // edits, we must execute the given command.
2299 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2300 |action, _| async move { Ok(action) },
2301 );
2302
2303 // While executing the command, the language server sends the editor
2304 // a `workspaceEdit` request.
2305 fake_server
2306 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2307 let fake = fake_server.clone();
2308 move |params, _| {
2309 assert_eq!(params.command, "_the/command");
2310 let fake = fake.clone();
2311 async move {
2312 fake.server
2313 .request::<lsp::request::ApplyWorkspaceEdit>(
2314 lsp::ApplyWorkspaceEditParams {
2315 label: None,
2316 edit: lsp::WorkspaceEdit {
2317 changes: Some(
2318 [(
2319 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2320 vec![lsp::TextEdit {
2321 range: lsp::Range::new(
2322 lsp::Position::new(0, 0),
2323 lsp::Position::new(0, 0),
2324 ),
2325 new_text: "X".into(),
2326 }],
2327 )]
2328 .into_iter()
2329 .collect(),
2330 ),
2331 ..Default::default()
2332 },
2333 },
2334 )
2335 .await
2336 .unwrap();
2337 Ok(Some(json!(null)))
2338 }
2339 }
2340 })
2341 .next()
2342 .await;
2343
2344 // Applying the code action returns a project transaction containing the edits
2345 // sent by the language server in its `workspaceEdit` request.
2346 let transaction = apply.await.unwrap();
2347 assert!(transaction.0.contains_key(&buffer));
2348 buffer.update(cx, |buffer, cx| {
2349 assert_eq!(buffer.text(), "Xa");
2350 buffer.undo(cx);
2351 assert_eq!(buffer.text(), "a");
2352 });
2353}
2354
2355#[gpui::test(iterations = 10)]
2356async fn test_save_file(cx: &mut gpui::TestAppContext) {
2357 init_test(cx);
2358
2359 let fs = FakeFs::new(cx.background());
2360 fs.insert_tree(
2361 "/dir",
2362 json!({
2363 "file1": "the old contents",
2364 }),
2365 )
2366 .await;
2367
2368 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2369 let buffer = project
2370 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2371 .await
2372 .unwrap();
2373 buffer.update(cx, |buffer, cx| {
2374 assert_eq!(buffer.text(), "the old contents");
2375 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2376 });
2377
2378 project
2379 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2380 .await
2381 .unwrap();
2382
2383 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2384 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2385}
2386
2387#[gpui::test]
2388async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2389 init_test(cx);
2390
2391 let fs = FakeFs::new(cx.background());
2392 fs.insert_tree(
2393 "/dir",
2394 json!({
2395 "file1": "the old contents",
2396 }),
2397 )
2398 .await;
2399
2400 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2401 let buffer = project
2402 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2403 .await
2404 .unwrap();
2405 buffer.update(cx, |buffer, cx| {
2406 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2407 });
2408
2409 project
2410 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2411 .await
2412 .unwrap();
2413
2414 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2415 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2416}
2417
2418#[gpui::test]
2419async fn test_save_as(cx: &mut gpui::TestAppContext) {
2420 init_test(cx);
2421
2422 let fs = FakeFs::new(cx.background());
2423 fs.insert_tree("/dir", json!({})).await;
2424
2425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2426
2427 let languages = project.read_with(cx, |project, _| project.languages().clone());
2428 languages.register(
2429 "/some/path",
2430 LanguageConfig {
2431 name: "Rust".into(),
2432 path_suffixes: vec!["rs".into()],
2433 ..Default::default()
2434 },
2435 tree_sitter_rust::language(),
2436 vec![],
2437 |_| Default::default(),
2438 );
2439
2440 let buffer = project.update(cx, |project, cx| {
2441 project.create_buffer("", None, cx).unwrap()
2442 });
2443 buffer.update(cx, |buffer, cx| {
2444 buffer.edit([(0..0, "abc")], None, cx);
2445 assert!(buffer.is_dirty());
2446 assert!(!buffer.has_conflict());
2447 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2448 });
2449 project
2450 .update(cx, |project, cx| {
2451 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2452 })
2453 .await
2454 .unwrap();
2455 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2456
2457 cx.foreground().run_until_parked();
2458 buffer.read_with(cx, |buffer, cx| {
2459 assert_eq!(
2460 buffer.file().unwrap().full_path(cx),
2461 Path::new("dir/file1.rs")
2462 );
2463 assert!(!buffer.is_dirty());
2464 assert!(!buffer.has_conflict());
2465 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2466 });
2467
2468 let opened_buffer = project
2469 .update(cx, |project, cx| {
2470 project.open_local_buffer("/dir/file1.rs", cx)
2471 })
2472 .await
2473 .unwrap();
2474 assert_eq!(opened_buffer, buffer);
2475}
2476
2477#[gpui::test(retries = 5)]
2478async fn test_rescan_and_remote_updates(
2479 deterministic: Arc<Deterministic>,
2480 cx: &mut gpui::TestAppContext,
2481) {
2482 init_test(cx);
2483 cx.foreground().allow_parking();
2484
2485 let dir = temp_tree(json!({
2486 "a": {
2487 "file1": "",
2488 "file2": "",
2489 "file3": "",
2490 },
2491 "b": {
2492 "c": {
2493 "file4": "",
2494 "file5": "",
2495 }
2496 }
2497 }));
2498
2499 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2500 let rpc = project.read_with(cx, |p, _| p.client.clone());
2501
2502 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2503 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2504 async move { buffer.await.unwrap() }
2505 };
2506 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2507 project.read_with(cx, |project, cx| {
2508 let tree = project.worktrees(cx).next().unwrap();
2509 tree.read(cx)
2510 .entry_for_path(path)
2511 .unwrap_or_else(|| panic!("no entry for path {}", path))
2512 .id
2513 })
2514 };
2515
2516 let buffer2 = buffer_for_path("a/file2", cx).await;
2517 let buffer3 = buffer_for_path("a/file3", cx).await;
2518 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2519 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2520
2521 let file2_id = id_for_path("a/file2", cx);
2522 let file3_id = id_for_path("a/file3", cx);
2523 let file4_id = id_for_path("b/c/file4", cx);
2524
2525 // Create a remote copy of this worktree.
2526 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2527
2528 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2529
2530 let updates = Arc::new(Mutex::new(Vec::new()));
2531 tree.update(cx, |tree, cx| {
2532 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2533 let updates = updates.clone();
2534 move |update| {
2535 updates.lock().push(update);
2536 async { true }
2537 }
2538 });
2539 });
2540
2541 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2542 deterministic.run_until_parked();
2543
2544 cx.read(|cx| {
2545 assert!(!buffer2.read(cx).is_dirty());
2546 assert!(!buffer3.read(cx).is_dirty());
2547 assert!(!buffer4.read(cx).is_dirty());
2548 assert!(!buffer5.read(cx).is_dirty());
2549 });
2550
2551 // Rename and delete files and directories.
2552 tree.flush_fs_events(cx).await;
2553 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2554 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2555 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2556 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2557 tree.flush_fs_events(cx).await;
2558
2559 let expected_paths = vec![
2560 "a",
2561 "a/file1",
2562 "a/file2.new",
2563 "b",
2564 "d",
2565 "d/file3",
2566 "d/file4",
2567 ];
2568
2569 cx.read(|app| {
2570 assert_eq!(
2571 tree.read(app)
2572 .paths()
2573 .map(|p| p.to_str().unwrap())
2574 .collect::<Vec<_>>(),
2575 expected_paths
2576 );
2577
2578 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2579 assert_eq!(id_for_path("d/file3", cx), file3_id);
2580 assert_eq!(id_for_path("d/file4", cx), file4_id);
2581
2582 assert_eq!(
2583 buffer2.read(app).file().unwrap().path().as_ref(),
2584 Path::new("a/file2.new")
2585 );
2586 assert_eq!(
2587 buffer3.read(app).file().unwrap().path().as_ref(),
2588 Path::new("d/file3")
2589 );
2590 assert_eq!(
2591 buffer4.read(app).file().unwrap().path().as_ref(),
2592 Path::new("d/file4")
2593 );
2594 assert_eq!(
2595 buffer5.read(app).file().unwrap().path().as_ref(),
2596 Path::new("b/c/file5")
2597 );
2598
2599 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2600 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2601 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2602 assert!(buffer5.read(app).file().unwrap().is_deleted());
2603 });
2604
2605 // Update the remote worktree. Check that it becomes consistent with the
2606 // local worktree.
2607 deterministic.run_until_parked();
2608 remote.update(cx, |remote, _| {
2609 for update in updates.lock().drain(..) {
2610 remote.as_remote_mut().unwrap().update_from_remote(update);
2611 }
2612 });
2613 deterministic.run_until_parked();
2614 remote.read_with(cx, |remote, _| {
2615 assert_eq!(
2616 remote
2617 .paths()
2618 .map(|p| p.to_str().unwrap())
2619 .collect::<Vec<_>>(),
2620 expected_paths
2621 );
2622 });
2623}
2624
2625#[gpui::test(iterations = 10)]
2626async fn test_buffer_identity_across_renames(
2627 deterministic: Arc<Deterministic>,
2628 cx: &mut gpui::TestAppContext,
2629) {
2630 init_test(cx);
2631
2632 let fs = FakeFs::new(cx.background());
2633 fs.insert_tree(
2634 "/dir",
2635 json!({
2636 "a": {
2637 "file1": "",
2638 }
2639 }),
2640 )
2641 .await;
2642
2643 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2644 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2645 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2646
2647 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2648 project.read_with(cx, |project, cx| {
2649 let tree = project.worktrees(cx).next().unwrap();
2650 tree.read(cx)
2651 .entry_for_path(path)
2652 .unwrap_or_else(|| panic!("no entry for path {}", path))
2653 .id
2654 })
2655 };
2656
2657 let dir_id = id_for_path("a", cx);
2658 let file_id = id_for_path("a/file1", cx);
2659 let buffer = project
2660 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2661 .await
2662 .unwrap();
2663 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2664
2665 project
2666 .update(cx, |project, cx| {
2667 project.rename_entry(dir_id, Path::new("b"), cx)
2668 })
2669 .unwrap()
2670 .await
2671 .unwrap();
2672 deterministic.run_until_parked();
2673 assert_eq!(id_for_path("b", cx), dir_id);
2674 assert_eq!(id_for_path("b/file1", cx), file_id);
2675 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2676}
2677
2678#[gpui::test]
2679async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2680 init_test(cx);
2681
2682 let fs = FakeFs::new(cx.background());
2683 fs.insert_tree(
2684 "/dir",
2685 json!({
2686 "a.txt": "a-contents",
2687 "b.txt": "b-contents",
2688 }),
2689 )
2690 .await;
2691
2692 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2693
2694 // Spawn multiple tasks to open paths, repeating some paths.
2695 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2696 (
2697 p.open_local_buffer("/dir/a.txt", cx),
2698 p.open_local_buffer("/dir/b.txt", cx),
2699 p.open_local_buffer("/dir/a.txt", cx),
2700 )
2701 });
2702
2703 let buffer_a_1 = buffer_a_1.await.unwrap();
2704 let buffer_a_2 = buffer_a_2.await.unwrap();
2705 let buffer_b = buffer_b.await.unwrap();
2706 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2707 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2708
2709 // There is only one buffer per path.
2710 let buffer_a_id = buffer_a_1.id();
2711 assert_eq!(buffer_a_2.id(), buffer_a_id);
2712
2713 // Open the same path again while it is still open.
2714 drop(buffer_a_1);
2715 let buffer_a_3 = project
2716 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2717 .await
2718 .unwrap();
2719
2720 // There's still only one buffer per path.
2721 assert_eq!(buffer_a_3.id(), buffer_a_id);
2722}
2723
2724#[gpui::test]
2725async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2726 init_test(cx);
2727
2728 let fs = FakeFs::new(cx.background());
2729 fs.insert_tree(
2730 "/dir",
2731 json!({
2732 "file1": "abc",
2733 "file2": "def",
2734 "file3": "ghi",
2735 }),
2736 )
2737 .await;
2738
2739 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2740
2741 let buffer1 = project
2742 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2743 .await
2744 .unwrap();
2745 let events = Rc::new(RefCell::new(Vec::new()));
2746
2747 // initially, the buffer isn't dirty.
2748 buffer1.update(cx, |buffer, cx| {
2749 cx.subscribe(&buffer1, {
2750 let events = events.clone();
2751 move |_, _, event, _| match event {
2752 BufferEvent::Operation(_) => {}
2753 _ => events.borrow_mut().push(event.clone()),
2754 }
2755 })
2756 .detach();
2757
2758 assert!(!buffer.is_dirty());
2759 assert!(events.borrow().is_empty());
2760
2761 buffer.edit([(1..2, "")], None, cx);
2762 });
2763
2764 // after the first edit, the buffer is dirty, and emits a dirtied event.
2765 buffer1.update(cx, |buffer, cx| {
2766 assert!(buffer.text() == "ac");
2767 assert!(buffer.is_dirty());
2768 assert_eq!(
2769 *events.borrow(),
2770 &[language::Event::Edited, language::Event::DirtyChanged]
2771 );
2772 events.borrow_mut().clear();
2773 buffer.did_save(
2774 buffer.version(),
2775 buffer.as_rope().fingerprint(),
2776 buffer.file().unwrap().mtime(),
2777 cx,
2778 );
2779 });
2780
2781 // after saving, the buffer is not dirty, and emits a saved event.
2782 buffer1.update(cx, |buffer, cx| {
2783 assert!(!buffer.is_dirty());
2784 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2785 events.borrow_mut().clear();
2786
2787 buffer.edit([(1..1, "B")], None, cx);
2788 buffer.edit([(2..2, "D")], None, cx);
2789 });
2790
2791 // after editing again, the buffer is dirty, and emits another dirty event.
2792 buffer1.update(cx, |buffer, cx| {
2793 assert!(buffer.text() == "aBDc");
2794 assert!(buffer.is_dirty());
2795 assert_eq!(
2796 *events.borrow(),
2797 &[
2798 language::Event::Edited,
2799 language::Event::DirtyChanged,
2800 language::Event::Edited,
2801 ],
2802 );
2803 events.borrow_mut().clear();
2804
2805 // After restoring the buffer to its previously-saved state,
2806 // the buffer is not considered dirty anymore.
2807 buffer.edit([(1..3, "")], None, cx);
2808 assert!(buffer.text() == "ac");
2809 assert!(!buffer.is_dirty());
2810 });
2811
2812 assert_eq!(
2813 *events.borrow(),
2814 &[language::Event::Edited, language::Event::DirtyChanged]
2815 );
2816
2817 // When a file is deleted, the buffer is considered dirty.
2818 let events = Rc::new(RefCell::new(Vec::new()));
2819 let buffer2 = project
2820 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2821 .await
2822 .unwrap();
2823 buffer2.update(cx, |_, cx| {
2824 cx.subscribe(&buffer2, {
2825 let events = events.clone();
2826 move |_, _, event, _| events.borrow_mut().push(event.clone())
2827 })
2828 .detach();
2829 });
2830
2831 fs.remove_file("/dir/file2".as_ref(), Default::default())
2832 .await
2833 .unwrap();
2834 cx.foreground().run_until_parked();
2835 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2836 assert_eq!(
2837 *events.borrow(),
2838 &[
2839 language::Event::DirtyChanged,
2840 language::Event::FileHandleChanged
2841 ]
2842 );
2843
2844 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2845 let events = Rc::new(RefCell::new(Vec::new()));
2846 let buffer3 = project
2847 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2848 .await
2849 .unwrap();
2850 buffer3.update(cx, |_, cx| {
2851 cx.subscribe(&buffer3, {
2852 let events = events.clone();
2853 move |_, _, event, _| events.borrow_mut().push(event.clone())
2854 })
2855 .detach();
2856 });
2857
2858 buffer3.update(cx, |buffer, cx| {
2859 buffer.edit([(0..0, "x")], None, cx);
2860 });
2861 events.borrow_mut().clear();
2862 fs.remove_file("/dir/file3".as_ref(), Default::default())
2863 .await
2864 .unwrap();
2865 cx.foreground().run_until_parked();
2866 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2867 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2868}
2869
2870#[gpui::test]
2871async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2872 init_test(cx);
2873
2874 let initial_contents = "aaa\nbbbbb\nc\n";
2875 let fs = FakeFs::new(cx.background());
2876 fs.insert_tree(
2877 "/dir",
2878 json!({
2879 "the-file": initial_contents,
2880 }),
2881 )
2882 .await;
2883 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2884 let buffer = project
2885 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2886 .await
2887 .unwrap();
2888
2889 let anchors = (0..3)
2890 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2891 .collect::<Vec<_>>();
2892
2893 // Change the file on disk, adding two new lines of text, and removing
2894 // one line.
2895 buffer.read_with(cx, |buffer, _| {
2896 assert!(!buffer.is_dirty());
2897 assert!(!buffer.has_conflict());
2898 });
2899 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2900 fs.save(
2901 "/dir/the-file".as_ref(),
2902 &new_contents.into(),
2903 LineEnding::Unix,
2904 )
2905 .await
2906 .unwrap();
2907
2908 // Because the buffer was not modified, it is reloaded from disk. Its
2909 // contents are edited according to the diff between the old and new
2910 // file contents.
2911 cx.foreground().run_until_parked();
2912 buffer.update(cx, |buffer, _| {
2913 assert_eq!(buffer.text(), new_contents);
2914 assert!(!buffer.is_dirty());
2915 assert!(!buffer.has_conflict());
2916
2917 let anchor_positions = anchors
2918 .iter()
2919 .map(|anchor| anchor.to_point(&*buffer))
2920 .collect::<Vec<_>>();
2921 assert_eq!(
2922 anchor_positions,
2923 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2924 );
2925 });
2926
2927 // Modify the buffer
2928 buffer.update(cx, |buffer, cx| {
2929 buffer.edit([(0..0, " ")], None, cx);
2930 assert!(buffer.is_dirty());
2931 assert!(!buffer.has_conflict());
2932 });
2933
2934 // Change the file on disk again, adding blank lines to the beginning.
2935 fs.save(
2936 "/dir/the-file".as_ref(),
2937 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2938 LineEnding::Unix,
2939 )
2940 .await
2941 .unwrap();
2942
2943 // Because the buffer is modified, it doesn't reload from disk, but is
2944 // marked as having a conflict.
2945 cx.foreground().run_until_parked();
2946 buffer.read_with(cx, |buffer, _| {
2947 assert!(buffer.has_conflict());
2948 });
2949}
2950
2951#[gpui::test]
2952async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2953 init_test(cx);
2954
2955 let fs = FakeFs::new(cx.background());
2956 fs.insert_tree(
2957 "/dir",
2958 json!({
2959 "file1": "a\nb\nc\n",
2960 "file2": "one\r\ntwo\r\nthree\r\n",
2961 }),
2962 )
2963 .await;
2964
2965 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2966 let buffer1 = project
2967 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2968 .await
2969 .unwrap();
2970 let buffer2 = project
2971 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2972 .await
2973 .unwrap();
2974
2975 buffer1.read_with(cx, |buffer, _| {
2976 assert_eq!(buffer.text(), "a\nb\nc\n");
2977 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2978 });
2979 buffer2.read_with(cx, |buffer, _| {
2980 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2981 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2982 });
2983
2984 // Change a file's line endings on disk from unix to windows. The buffer's
2985 // state updates correctly.
2986 fs.save(
2987 "/dir/file1".as_ref(),
2988 &"aaa\nb\nc\n".into(),
2989 LineEnding::Windows,
2990 )
2991 .await
2992 .unwrap();
2993 cx.foreground().run_until_parked();
2994 buffer1.read_with(cx, |buffer, _| {
2995 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2996 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2997 });
2998
2999 // Save a file with windows line endings. The file is written correctly.
3000 buffer2.update(cx, |buffer, cx| {
3001 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3002 });
3003 project
3004 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3005 .await
3006 .unwrap();
3007 assert_eq!(
3008 fs.load("/dir/file2".as_ref()).await.unwrap(),
3009 "one\r\ntwo\r\nthree\r\nfour\r\n",
3010 );
3011}
3012
3013#[gpui::test]
3014async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3015 init_test(cx);
3016
3017 let fs = FakeFs::new(cx.background());
3018 fs.insert_tree(
3019 "/the-dir",
3020 json!({
3021 "a.rs": "
3022 fn foo(mut v: Vec<usize>) {
3023 for x in &v {
3024 v.push(1);
3025 }
3026 }
3027 "
3028 .unindent(),
3029 }),
3030 )
3031 .await;
3032
3033 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3034 let buffer = project
3035 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3036 .await
3037 .unwrap();
3038
3039 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3040 let message = lsp::PublishDiagnosticsParams {
3041 uri: buffer_uri.clone(),
3042 diagnostics: vec![
3043 lsp::Diagnostic {
3044 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3045 severity: Some(DiagnosticSeverity::WARNING),
3046 message: "error 1".to_string(),
3047 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3048 location: lsp::Location {
3049 uri: buffer_uri.clone(),
3050 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3051 },
3052 message: "error 1 hint 1".to_string(),
3053 }]),
3054 ..Default::default()
3055 },
3056 lsp::Diagnostic {
3057 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3058 severity: Some(DiagnosticSeverity::HINT),
3059 message: "error 1 hint 1".to_string(),
3060 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3061 location: lsp::Location {
3062 uri: buffer_uri.clone(),
3063 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3064 },
3065 message: "original diagnostic".to_string(),
3066 }]),
3067 ..Default::default()
3068 },
3069 lsp::Diagnostic {
3070 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3071 severity: Some(DiagnosticSeverity::ERROR),
3072 message: "error 2".to_string(),
3073 related_information: Some(vec![
3074 lsp::DiagnosticRelatedInformation {
3075 location: lsp::Location {
3076 uri: buffer_uri.clone(),
3077 range: lsp::Range::new(
3078 lsp::Position::new(1, 13),
3079 lsp::Position::new(1, 15),
3080 ),
3081 },
3082 message: "error 2 hint 1".to_string(),
3083 },
3084 lsp::DiagnosticRelatedInformation {
3085 location: lsp::Location {
3086 uri: buffer_uri.clone(),
3087 range: lsp::Range::new(
3088 lsp::Position::new(1, 13),
3089 lsp::Position::new(1, 15),
3090 ),
3091 },
3092 message: "error 2 hint 2".to_string(),
3093 },
3094 ]),
3095 ..Default::default()
3096 },
3097 lsp::Diagnostic {
3098 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3099 severity: Some(DiagnosticSeverity::HINT),
3100 message: "error 2 hint 1".to_string(),
3101 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3102 location: lsp::Location {
3103 uri: buffer_uri.clone(),
3104 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3105 },
3106 message: "original diagnostic".to_string(),
3107 }]),
3108 ..Default::default()
3109 },
3110 lsp::Diagnostic {
3111 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3112 severity: Some(DiagnosticSeverity::HINT),
3113 message: "error 2 hint 2".to_string(),
3114 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3115 location: lsp::Location {
3116 uri: buffer_uri,
3117 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3118 },
3119 message: "original diagnostic".to_string(),
3120 }]),
3121 ..Default::default()
3122 },
3123 ],
3124 version: None,
3125 };
3126
3127 project
3128 .update(cx, |p, cx| {
3129 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3130 })
3131 .unwrap();
3132 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3133
3134 assert_eq!(
3135 buffer
3136 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3137 .collect::<Vec<_>>(),
3138 &[
3139 DiagnosticEntry {
3140 range: Point::new(1, 8)..Point::new(1, 9),
3141 diagnostic: Diagnostic {
3142 severity: DiagnosticSeverity::WARNING,
3143 message: "error 1".to_string(),
3144 group_id: 1,
3145 is_primary: true,
3146 ..Default::default()
3147 }
3148 },
3149 DiagnosticEntry {
3150 range: Point::new(1, 8)..Point::new(1, 9),
3151 diagnostic: Diagnostic {
3152 severity: DiagnosticSeverity::HINT,
3153 message: "error 1 hint 1".to_string(),
3154 group_id: 1,
3155 is_primary: false,
3156 ..Default::default()
3157 }
3158 },
3159 DiagnosticEntry {
3160 range: Point::new(1, 13)..Point::new(1, 15),
3161 diagnostic: Diagnostic {
3162 severity: DiagnosticSeverity::HINT,
3163 message: "error 2 hint 1".to_string(),
3164 group_id: 0,
3165 is_primary: false,
3166 ..Default::default()
3167 }
3168 },
3169 DiagnosticEntry {
3170 range: Point::new(1, 13)..Point::new(1, 15),
3171 diagnostic: Diagnostic {
3172 severity: DiagnosticSeverity::HINT,
3173 message: "error 2 hint 2".to_string(),
3174 group_id: 0,
3175 is_primary: false,
3176 ..Default::default()
3177 }
3178 },
3179 DiagnosticEntry {
3180 range: Point::new(2, 8)..Point::new(2, 17),
3181 diagnostic: Diagnostic {
3182 severity: DiagnosticSeverity::ERROR,
3183 message: "error 2".to_string(),
3184 group_id: 0,
3185 is_primary: true,
3186 ..Default::default()
3187 }
3188 }
3189 ]
3190 );
3191
3192 assert_eq!(
3193 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3194 &[
3195 DiagnosticEntry {
3196 range: Point::new(1, 13)..Point::new(1, 15),
3197 diagnostic: Diagnostic {
3198 severity: DiagnosticSeverity::HINT,
3199 message: "error 2 hint 1".to_string(),
3200 group_id: 0,
3201 is_primary: false,
3202 ..Default::default()
3203 }
3204 },
3205 DiagnosticEntry {
3206 range: Point::new(1, 13)..Point::new(1, 15),
3207 diagnostic: Diagnostic {
3208 severity: DiagnosticSeverity::HINT,
3209 message: "error 2 hint 2".to_string(),
3210 group_id: 0,
3211 is_primary: false,
3212 ..Default::default()
3213 }
3214 },
3215 DiagnosticEntry {
3216 range: Point::new(2, 8)..Point::new(2, 17),
3217 diagnostic: Diagnostic {
3218 severity: DiagnosticSeverity::ERROR,
3219 message: "error 2".to_string(),
3220 group_id: 0,
3221 is_primary: true,
3222 ..Default::default()
3223 }
3224 }
3225 ]
3226 );
3227
3228 assert_eq!(
3229 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3230 &[
3231 DiagnosticEntry {
3232 range: Point::new(1, 8)..Point::new(1, 9),
3233 diagnostic: Diagnostic {
3234 severity: DiagnosticSeverity::WARNING,
3235 message: "error 1".to_string(),
3236 group_id: 1,
3237 is_primary: true,
3238 ..Default::default()
3239 }
3240 },
3241 DiagnosticEntry {
3242 range: Point::new(1, 8)..Point::new(1, 9),
3243 diagnostic: Diagnostic {
3244 severity: DiagnosticSeverity::HINT,
3245 message: "error 1 hint 1".to_string(),
3246 group_id: 1,
3247 is_primary: false,
3248 ..Default::default()
3249 }
3250 },
3251 ]
3252 );
3253}
3254
3255#[gpui::test]
3256async fn test_rename(cx: &mut gpui::TestAppContext) {
3257 init_test(cx);
3258
3259 let mut language = Language::new(
3260 LanguageConfig {
3261 name: "Rust".into(),
3262 path_suffixes: vec!["rs".to_string()],
3263 ..Default::default()
3264 },
3265 Some(tree_sitter_rust::language()),
3266 );
3267 let mut fake_servers = language
3268 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3269 capabilities: lsp::ServerCapabilities {
3270 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3271 prepare_provider: Some(true),
3272 work_done_progress_options: Default::default(),
3273 })),
3274 ..Default::default()
3275 },
3276 ..Default::default()
3277 }))
3278 .await;
3279
3280 let fs = FakeFs::new(cx.background());
3281 fs.insert_tree(
3282 "/dir",
3283 json!({
3284 "one.rs": "const ONE: usize = 1;",
3285 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3286 }),
3287 )
3288 .await;
3289
3290 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3291 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3292 let buffer = project
3293 .update(cx, |project, cx| {
3294 project.open_local_buffer("/dir/one.rs", cx)
3295 })
3296 .await
3297 .unwrap();
3298
3299 let fake_server = fake_servers.next().await.unwrap();
3300
3301 let response = project.update(cx, |project, cx| {
3302 project.prepare_rename(buffer.clone(), 7, cx)
3303 });
3304 fake_server
3305 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3306 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3307 assert_eq!(params.position, lsp::Position::new(0, 7));
3308 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3309 lsp::Position::new(0, 6),
3310 lsp::Position::new(0, 9),
3311 ))))
3312 })
3313 .next()
3314 .await
3315 .unwrap();
3316 let range = response.await.unwrap().unwrap();
3317 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3318 assert_eq!(range, 6..9);
3319
3320 let response = project.update(cx, |project, cx| {
3321 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3322 });
3323 fake_server
3324 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3325 assert_eq!(
3326 params.text_document_position.text_document.uri.as_str(),
3327 "file:///dir/one.rs"
3328 );
3329 assert_eq!(
3330 params.text_document_position.position,
3331 lsp::Position::new(0, 7)
3332 );
3333 assert_eq!(params.new_name, "THREE");
3334 Ok(Some(lsp::WorkspaceEdit {
3335 changes: Some(
3336 [
3337 (
3338 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3339 vec![lsp::TextEdit::new(
3340 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3341 "THREE".to_string(),
3342 )],
3343 ),
3344 (
3345 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3346 vec![
3347 lsp::TextEdit::new(
3348 lsp::Range::new(
3349 lsp::Position::new(0, 24),
3350 lsp::Position::new(0, 27),
3351 ),
3352 "THREE".to_string(),
3353 ),
3354 lsp::TextEdit::new(
3355 lsp::Range::new(
3356 lsp::Position::new(0, 35),
3357 lsp::Position::new(0, 38),
3358 ),
3359 "THREE".to_string(),
3360 ),
3361 ],
3362 ),
3363 ]
3364 .into_iter()
3365 .collect(),
3366 ),
3367 ..Default::default()
3368 }))
3369 })
3370 .next()
3371 .await
3372 .unwrap();
3373 let mut transaction = response.await.unwrap().0;
3374 assert_eq!(transaction.len(), 2);
3375 assert_eq!(
3376 transaction
3377 .remove_entry(&buffer)
3378 .unwrap()
3379 .0
3380 .read_with(cx, |buffer, _| buffer.text()),
3381 "const THREE: usize = 1;"
3382 );
3383 assert_eq!(
3384 transaction
3385 .into_keys()
3386 .next()
3387 .unwrap()
3388 .read_with(cx, |buffer, _| buffer.text()),
3389 "const TWO: usize = one::THREE + one::THREE;"
3390 );
3391}
3392
3393#[gpui::test]
3394async fn test_search(cx: &mut gpui::TestAppContext) {
3395 init_test(cx);
3396
3397 let fs = FakeFs::new(cx.background());
3398 fs.insert_tree(
3399 "/dir",
3400 json!({
3401 "one.rs": "const ONE: usize = 1;",
3402 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3403 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3404 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3405 }),
3406 )
3407 .await;
3408 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3409 assert_eq!(
3410 search(
3411 &project,
3412 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3413 cx
3414 )
3415 .await
3416 .unwrap(),
3417 HashMap::from_iter([
3418 ("two.rs".to_string(), vec![6..9]),
3419 ("three.rs".to_string(), vec![37..40])
3420 ])
3421 );
3422
3423 let buffer_4 = project
3424 .update(cx, |project, cx| {
3425 project.open_local_buffer("/dir/four.rs", cx)
3426 })
3427 .await
3428 .unwrap();
3429 buffer_4.update(cx, |buffer, cx| {
3430 let text = "two::TWO";
3431 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3432 });
3433
3434 assert_eq!(
3435 search(
3436 &project,
3437 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3438 cx
3439 )
3440 .await
3441 .unwrap(),
3442 HashMap::from_iter([
3443 ("two.rs".to_string(), vec![6..9]),
3444 ("three.rs".to_string(), vec![37..40]),
3445 ("four.rs".to_string(), vec![25..28, 36..39])
3446 ])
3447 );
3448}
3449
3450#[gpui::test]
3451async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3452 init_test(cx);
3453
3454 let search_query = "file";
3455
3456 let fs = FakeFs::new(cx.background());
3457 fs.insert_tree(
3458 "/dir",
3459 json!({
3460 "one.rs": r#"// Rust file one"#,
3461 "one.ts": r#"// TypeScript file one"#,
3462 "two.rs": r#"// Rust file two"#,
3463 "two.ts": r#"// TypeScript file two"#,
3464 }),
3465 )
3466 .await;
3467 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3468
3469 assert!(
3470 search(
3471 &project,
3472 SearchQuery::text(
3473 search_query,
3474 false,
3475 true,
3476 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3477 Vec::new()
3478 ),
3479 cx
3480 )
3481 .await
3482 .unwrap()
3483 .is_empty(),
3484 "If no inclusions match, no files should be returned"
3485 );
3486
3487 assert_eq!(
3488 search(
3489 &project,
3490 SearchQuery::text(
3491 search_query,
3492 false,
3493 true,
3494 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3495 Vec::new()
3496 ),
3497 cx
3498 )
3499 .await
3500 .unwrap(),
3501 HashMap::from_iter([
3502 ("one.rs".to_string(), vec![8..12]),
3503 ("two.rs".to_string(), vec![8..12]),
3504 ]),
3505 "Rust only search should give only Rust files"
3506 );
3507
3508 assert_eq!(
3509 search(
3510 &project,
3511 SearchQuery::text(
3512 search_query,
3513 false,
3514 true,
3515 vec![
3516 Glob::new("*.ts").unwrap().compile_matcher(),
3517 Glob::new("*.odd").unwrap().compile_matcher(),
3518 ],
3519 Vec::new()
3520 ),
3521 cx
3522 )
3523 .await
3524 .unwrap(),
3525 HashMap::from_iter([
3526 ("one.ts".to_string(), vec![14..18]),
3527 ("two.ts".to_string(), vec![14..18]),
3528 ]),
3529 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3530 );
3531
3532 assert_eq!(
3533 search(
3534 &project,
3535 SearchQuery::text(
3536 search_query,
3537 false,
3538 true,
3539 vec![
3540 Glob::new("*.rs").unwrap().compile_matcher(),
3541 Glob::new("*.ts").unwrap().compile_matcher(),
3542 Glob::new("*.odd").unwrap().compile_matcher(),
3543 ],
3544 Vec::new()
3545 ),
3546 cx
3547 )
3548 .await
3549 .unwrap(),
3550 HashMap::from_iter([
3551 ("one.rs".to_string(), vec![8..12]),
3552 ("one.ts".to_string(), vec![14..18]),
3553 ("two.rs".to_string(), vec![8..12]),
3554 ("two.ts".to_string(), vec![14..18]),
3555 ]),
3556 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3557 );
3558}
3559
3560#[gpui::test]
3561async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3562 init_test(cx);
3563
3564 let search_query = "file";
3565
3566 let fs = FakeFs::new(cx.background());
3567 fs.insert_tree(
3568 "/dir",
3569 json!({
3570 "one.rs": r#"// Rust file one"#,
3571 "one.ts": r#"// TypeScript file one"#,
3572 "two.rs": r#"// Rust file two"#,
3573 "two.ts": r#"// TypeScript file two"#,
3574 }),
3575 )
3576 .await;
3577 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3578
3579 assert_eq!(
3580 search(
3581 &project,
3582 SearchQuery::text(
3583 search_query,
3584 false,
3585 true,
3586 Vec::new(),
3587 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3588 ),
3589 cx
3590 )
3591 .await
3592 .unwrap(),
3593 HashMap::from_iter([
3594 ("one.rs".to_string(), vec![8..12]),
3595 ("one.ts".to_string(), vec![14..18]),
3596 ("two.rs".to_string(), vec![8..12]),
3597 ("two.ts".to_string(), vec![14..18]),
3598 ]),
3599 "If no exclusions match, all files should be returned"
3600 );
3601
3602 assert_eq!(
3603 search(
3604 &project,
3605 SearchQuery::text(
3606 search_query,
3607 false,
3608 true,
3609 Vec::new(),
3610 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3611 ),
3612 cx
3613 )
3614 .await
3615 .unwrap(),
3616 HashMap::from_iter([
3617 ("one.ts".to_string(), vec![14..18]),
3618 ("two.ts".to_string(), vec![14..18]),
3619 ]),
3620 "Rust exclusion search should give only TypeScript files"
3621 );
3622
3623 assert_eq!(
3624 search(
3625 &project,
3626 SearchQuery::text(
3627 search_query,
3628 false,
3629 true,
3630 Vec::new(),
3631 vec![
3632 Glob::new("*.ts").unwrap().compile_matcher(),
3633 Glob::new("*.odd").unwrap().compile_matcher(),
3634 ],
3635 ),
3636 cx
3637 )
3638 .await
3639 .unwrap(),
3640 HashMap::from_iter([
3641 ("one.rs".to_string(), vec![8..12]),
3642 ("two.rs".to_string(), vec![8..12]),
3643 ]),
3644 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3645 );
3646
3647 assert!(
3648 search(
3649 &project,
3650 SearchQuery::text(
3651 search_query,
3652 false,
3653 true,
3654 Vec::new(),
3655 vec![
3656 Glob::new("*.rs").unwrap().compile_matcher(),
3657 Glob::new("*.ts").unwrap().compile_matcher(),
3658 Glob::new("*.odd").unwrap().compile_matcher(),
3659 ],
3660 ),
3661 cx
3662 )
3663 .await
3664 .unwrap().is_empty(),
3665 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3666 );
3667}
3668
3669#[gpui::test]
3670async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3671 init_test(cx);
3672
3673 let search_query = "file";
3674
3675 let fs = FakeFs::new(cx.background());
3676 fs.insert_tree(
3677 "/dir",
3678 json!({
3679 "one.rs": r#"// Rust file one"#,
3680 "one.ts": r#"// TypeScript file one"#,
3681 "two.rs": r#"// Rust file two"#,
3682 "two.ts": r#"// TypeScript file two"#,
3683 }),
3684 )
3685 .await;
3686 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3687
3688 assert!(
3689 search(
3690 &project,
3691 SearchQuery::text(
3692 search_query,
3693 false,
3694 true,
3695 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3696 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3697 ),
3698 cx
3699 )
3700 .await
3701 .unwrap()
3702 .is_empty(),
3703 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3704 );
3705
3706 assert!(
3707 search(
3708 &project,
3709 SearchQuery::text(
3710 search_query,
3711 false,
3712 true,
3713 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3714 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3715 ),
3716 cx
3717 )
3718 .await
3719 .unwrap()
3720 .is_empty(),
3721 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3722 );
3723
3724 assert!(
3725 search(
3726 &project,
3727 SearchQuery::text(
3728 search_query,
3729 false,
3730 true,
3731 vec![
3732 Glob::new("*.ts").unwrap().compile_matcher(),
3733 Glob::new("*.odd").unwrap().compile_matcher()
3734 ],
3735 vec![
3736 Glob::new("*.ts").unwrap().compile_matcher(),
3737 Glob::new("*.odd").unwrap().compile_matcher()
3738 ],
3739 ),
3740 cx
3741 )
3742 .await
3743 .unwrap()
3744 .is_empty(),
3745 "Non-matching inclusions and exclusions should not change that."
3746 );
3747
3748 assert_eq!(
3749 search(
3750 &project,
3751 SearchQuery::text(
3752 search_query,
3753 false,
3754 true,
3755 vec![
3756 Glob::new("*.ts").unwrap().compile_matcher(),
3757 Glob::new("*.odd").unwrap().compile_matcher()
3758 ],
3759 vec![
3760 Glob::new("*.rs").unwrap().compile_matcher(),
3761 Glob::new("*.odd").unwrap().compile_matcher()
3762 ],
3763 ),
3764 cx
3765 )
3766 .await
3767 .unwrap(),
3768 HashMap::from_iter([
3769 ("one.ts".to_string(), vec![14..18]),
3770 ("two.ts".to_string(), vec![14..18]),
3771 ]),
3772 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3773 );
3774}
3775
3776async fn search(
3777 project: &ModelHandle<Project>,
3778 query: SearchQuery,
3779 cx: &mut gpui::TestAppContext,
3780) -> Result<HashMap<String, Vec<Range<usize>>>> {
3781 let results = project
3782 .update(cx, |project, cx| project.search(query, cx))
3783 .await?;
3784
3785 Ok(results
3786 .into_iter()
3787 .map(|(buffer, ranges)| {
3788 buffer.read_with(cx, |buffer, _| {
3789 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3790 let ranges = ranges
3791 .into_iter()
3792 .map(|range| range.to_offset(buffer))
3793 .collect::<Vec<_>>();
3794 (path, ranges)
3795 })
3796 })
3797 .collect())
3798}
3799
3800fn init_test(cx: &mut gpui::TestAppContext) {
3801 cx.foreground().forbid_parking();
3802
3803 cx.update(|cx| {
3804 cx.set_global(SettingsStore::test(cx));
3805 language::init(cx);
3806 Project::init_settings(cx);
3807 });
3808}