1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe, AppContext};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, test::temp_tree};
17
18#[cfg(test)]
19#[ctor::ctor]
20fn init_logger() {
21 if std::env::var("RUST_LOG").is_ok() {
22 env_logger::init();
23 }
24}
25
26#[gpui::test]
27async fn test_symlinks(cx: &mut gpui::TestAppContext) {
28 init_test(cx);
29 cx.foreground().allow_parking();
30
31 let dir = temp_tree(json!({
32 "root": {
33 "apple": "",
34 "banana": {
35 "carrot": {
36 "date": "",
37 "endive": "",
38 }
39 },
40 "fennel": {
41 "grape": "",
42 }
43 }
44 }));
45
46 let root_link_path = dir.path().join("root_link");
47 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
48 unix::fs::symlink(
49 &dir.path().join("root/fennel"),
50 &dir.path().join("root/finnochio"),
51 )
52 .unwrap();
53
54 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
55 project.read_with(cx, |project, cx| {
56 let tree = project.worktrees(cx).next().unwrap().read(cx);
57 assert_eq!(tree.file_count(), 5);
58 assert_eq!(
59 tree.inode_for_path("fennel/grape"),
60 tree.inode_for_path("finnochio/grape")
61 );
62 });
63}
64
65#[gpui::test]
66async fn test_managing_language_servers(
67 deterministic: Arc<Deterministic>,
68 cx: &mut gpui::TestAppContext,
69) {
70 init_test(cx);
71
72 let mut rust_language = Language::new(
73 LanguageConfig {
74 name: "Rust".into(),
75 path_suffixes: vec!["rs".to_string()],
76 ..Default::default()
77 },
78 Some(tree_sitter_rust::language()),
79 );
80 let mut json_language = Language::new(
81 LanguageConfig {
82 name: "JSON".into(),
83 path_suffixes: vec!["json".to_string()],
84 ..Default::default()
85 },
86 None,
87 );
88 let mut fake_rust_servers = rust_language
89 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-rust-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }))
100 .await;
101 let mut fake_json_servers = json_language
102 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
103 name: "the-json-language-server",
104 capabilities: lsp::ServerCapabilities {
105 completion_provider: Some(lsp::CompletionOptions {
106 trigger_characters: Some(vec![":".to_string()]),
107 ..Default::default()
108 }),
109 ..Default::default()
110 },
111 ..Default::default()
112 }))
113 .await;
114
115 let fs = FakeFs::new(cx.background());
116 fs.insert_tree(
117 "/the-root",
118 json!({
119 "test.rs": "const A: i32 = 1;",
120 "test2.rs": "",
121 "Cargo.toml": "a = 1",
122 "package.json": "{\"a\": 1}",
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128
129 // Open a buffer without an associated language server.
130 let toml_buffer = project
131 .update(cx, |project, cx| {
132 project.open_local_buffer("/the-root/Cargo.toml", cx)
133 })
134 .await
135 .unwrap();
136
137 // Open a buffer with an associated language server before the language for it has been loaded.
138 let rust_buffer = project
139 .update(cx, |project, cx| {
140 project.open_local_buffer("/the-root/test.rs", cx)
141 })
142 .await
143 .unwrap();
144 rust_buffer.read_with(cx, |buffer, _| {
145 assert_eq!(buffer.language().map(|l| l.name()), None);
146 });
147
148 // Now we add the languages to the project, and ensure they get assigned to all
149 // the relevant open buffers.
150 project.update(cx, |project, _| {
151 project.languages.add(Arc::new(json_language));
152 project.languages.add(Arc::new(rust_language));
153 });
154 deterministic.run_until_parked();
155 rust_buffer.read_with(cx, |buffer, _| {
156 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
157 });
158
159 // A server is started up, and it is notified about Rust files.
160 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
161 assert_eq!(
162 fake_rust_server
163 .receive_notification::<lsp::notification::DidOpenTextDocument>()
164 .await
165 .text_document,
166 lsp::TextDocumentItem {
167 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
168 version: 0,
169 text: "const A: i32 = 1;".to_string(),
170 language_id: Default::default()
171 }
172 );
173
174 // The buffer is configured based on the language server's capabilities.
175 rust_buffer.read_with(cx, |buffer, _| {
176 assert_eq!(
177 buffer.completion_triggers(),
178 &[".".to_string(), "::".to_string()]
179 );
180 });
181 toml_buffer.read_with(cx, |buffer, _| {
182 assert!(buffer.completion_triggers().is_empty());
183 });
184
185 // Edit a buffer. The changes are reported to the language server.
186 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
187 assert_eq!(
188 fake_rust_server
189 .receive_notification::<lsp::notification::DidChangeTextDocument>()
190 .await
191 .text_document,
192 lsp::VersionedTextDocumentIdentifier::new(
193 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
194 1
195 )
196 );
197
198 // Open a third buffer with a different associated language server.
199 let json_buffer = project
200 .update(cx, |project, cx| {
201 project.open_local_buffer("/the-root/package.json", cx)
202 })
203 .await
204 .unwrap();
205
206 // A json language server is started up and is only notified about the json buffer.
207 let mut fake_json_server = fake_json_servers.next().await.unwrap();
208 assert_eq!(
209 fake_json_server
210 .receive_notification::<lsp::notification::DidOpenTextDocument>()
211 .await
212 .text_document,
213 lsp::TextDocumentItem {
214 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
215 version: 0,
216 text: "{\"a\": 1}".to_string(),
217 language_id: Default::default()
218 }
219 );
220
221 // This buffer is configured based on the second language server's
222 // capabilities.
223 json_buffer.read_with(cx, |buffer, _| {
224 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
225 });
226
227 // When opening another buffer whose language server is already running,
228 // it is also configured based on the existing language server's capabilities.
229 let rust_buffer2 = project
230 .update(cx, |project, cx| {
231 project.open_local_buffer("/the-root/test2.rs", cx)
232 })
233 .await
234 .unwrap();
235 rust_buffer2.read_with(cx, |buffer, _| {
236 assert_eq!(
237 buffer.completion_triggers(),
238 &[".".to_string(), "::".to_string()]
239 );
240 });
241
242 // Changes are reported only to servers matching the buffer's language.
243 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
244 rust_buffer2.update(cx, |buffer, cx| {
245 buffer.edit([(0..0, "let x = 1;")], None, cx)
246 });
247 assert_eq!(
248 fake_rust_server
249 .receive_notification::<lsp::notification::DidChangeTextDocument>()
250 .await
251 .text_document,
252 lsp::VersionedTextDocumentIdentifier::new(
253 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
254 1
255 )
256 );
257
258 // Save notifications are reported to all servers.
259 project
260 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
261 .await
262 .unwrap();
263 assert_eq!(
264 fake_rust_server
265 .receive_notification::<lsp::notification::DidSaveTextDocument>()
266 .await
267 .text_document,
268 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
269 );
270 assert_eq!(
271 fake_json_server
272 .receive_notification::<lsp::notification::DidSaveTextDocument>()
273 .await
274 .text_document,
275 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
276 );
277
278 // Renames are reported only to servers matching the buffer's language.
279 fs.rename(
280 Path::new("/the-root/test2.rs"),
281 Path::new("/the-root/test3.rs"),
282 Default::default(),
283 )
284 .await
285 .unwrap();
286 assert_eq!(
287 fake_rust_server
288 .receive_notification::<lsp::notification::DidCloseTextDocument>()
289 .await
290 .text_document,
291 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
292 );
293 assert_eq!(
294 fake_rust_server
295 .receive_notification::<lsp::notification::DidOpenTextDocument>()
296 .await
297 .text_document,
298 lsp::TextDocumentItem {
299 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
300 version: 0,
301 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
302 language_id: Default::default()
303 },
304 );
305
306 rust_buffer2.update(cx, |buffer, cx| {
307 buffer.update_diagnostics(
308 LanguageServerId(0),
309 DiagnosticSet::from_sorted_entries(
310 vec![DiagnosticEntry {
311 diagnostic: Default::default(),
312 range: Anchor::MIN..Anchor::MAX,
313 }],
314 &buffer.snapshot(),
315 ),
316 cx,
317 );
318 assert_eq!(
319 buffer
320 .snapshot()
321 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
322 .count(),
323 1
324 );
325 });
326
327 // When the rename changes the extension of the file, the buffer gets closed on the old
328 // language server and gets opened on the new one.
329 fs.rename(
330 Path::new("/the-root/test3.rs"),
331 Path::new("/the-root/test3.json"),
332 Default::default(),
333 )
334 .await
335 .unwrap();
336 assert_eq!(
337 fake_rust_server
338 .receive_notification::<lsp::notification::DidCloseTextDocument>()
339 .await
340 .text_document,
341 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
342 );
343 assert_eq!(
344 fake_json_server
345 .receive_notification::<lsp::notification::DidOpenTextDocument>()
346 .await
347 .text_document,
348 lsp::TextDocumentItem {
349 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
350 version: 0,
351 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
352 language_id: Default::default()
353 },
354 );
355
356 // We clear the diagnostics, since the language has changed.
357 rust_buffer2.read_with(cx, |buffer, _| {
358 assert_eq!(
359 buffer
360 .snapshot()
361 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
362 .count(),
363 0
364 );
365 });
366
367 // The renamed file's version resets after changing language server.
368 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
369 assert_eq!(
370 fake_json_server
371 .receive_notification::<lsp::notification::DidChangeTextDocument>()
372 .await
373 .text_document,
374 lsp::VersionedTextDocumentIdentifier::new(
375 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
376 1
377 )
378 );
379
380 // Restart language servers
381 project.update(cx, |project, cx| {
382 project.restart_language_servers_for_buffers(
383 vec![rust_buffer.clone(), json_buffer.clone()],
384 cx,
385 );
386 });
387
388 let mut rust_shutdown_requests = fake_rust_server
389 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
390 let mut json_shutdown_requests = fake_json_server
391 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
392 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
393
394 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
395 let mut fake_json_server = fake_json_servers.next().await.unwrap();
396
397 // Ensure rust document is reopened in new rust language server
398 assert_eq!(
399 fake_rust_server
400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
401 .await
402 .text_document,
403 lsp::TextDocumentItem {
404 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
405 version: 0,
406 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
407 language_id: Default::default()
408 }
409 );
410
411 // Ensure json documents are reopened in new json language server
412 assert_set_eq!(
413 [
414 fake_json_server
415 .receive_notification::<lsp::notification::DidOpenTextDocument>()
416 .await
417 .text_document,
418 fake_json_server
419 .receive_notification::<lsp::notification::DidOpenTextDocument>()
420 .await
421 .text_document,
422 ],
423 [
424 lsp::TextDocumentItem {
425 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
426 version: 0,
427 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
428 language_id: Default::default()
429 },
430 lsp::TextDocumentItem {
431 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
432 version: 0,
433 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
434 language_id: Default::default()
435 }
436 ]
437 );
438
439 // Close notifications are reported only to servers matching the buffer's language.
440 cx.update(|_| drop(json_buffer));
441 let close_message = lsp::DidCloseTextDocumentParams {
442 text_document: lsp::TextDocumentIdentifier::new(
443 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
444 ),
445 };
446 assert_eq!(
447 fake_json_server
448 .receive_notification::<lsp::notification::DidCloseTextDocument>()
449 .await,
450 close_message,
451 );
452}
453
454#[gpui::test]
455async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
456 init_test(cx);
457
458 let mut language = Language::new(
459 LanguageConfig {
460 name: "Rust".into(),
461 path_suffixes: vec!["rs".to_string()],
462 ..Default::default()
463 },
464 Some(tree_sitter_rust::language()),
465 );
466 let mut fake_servers = language
467 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
468 name: "the-language-server",
469 ..Default::default()
470 }))
471 .await;
472
473 let fs = FakeFs::new(cx.background());
474 fs.insert_tree(
475 "/the-root",
476 json!({
477 "a.rs": "",
478 "b.rs": "",
479 }),
480 )
481 .await;
482
483 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
484 project.update(cx, |project, _| {
485 project.languages.add(Arc::new(language));
486 });
487 cx.foreground().run_until_parked();
488
489 // Start the language server by opening a buffer with a compatible file extension.
490 let _buffer = project
491 .update(cx, |project, cx| {
492 project.open_local_buffer("/the-root/a.rs", cx)
493 })
494 .await
495 .unwrap();
496
497 // Keep track of the FS events reported to the language server.
498 let fake_server = fake_servers.next().await.unwrap();
499 let file_changes = Arc::new(Mutex::new(Vec::new()));
500 fake_server
501 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
502 registrations: vec![lsp::Registration {
503 id: Default::default(),
504 method: "workspace/didChangeWatchedFiles".to_string(),
505 register_options: serde_json::to_value(
506 lsp::DidChangeWatchedFilesRegistrationOptions {
507 watchers: vec![lsp::FileSystemWatcher {
508 glob_pattern: "*.{rs,c}".to_string(),
509 kind: None,
510 }],
511 },
512 )
513 .ok(),
514 }],
515 })
516 .await
517 .unwrap();
518 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
519 let file_changes = file_changes.clone();
520 move |params, _| {
521 let mut file_changes = file_changes.lock();
522 file_changes.extend(params.changes);
523 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
524 }
525 });
526
527 cx.foreground().run_until_parked();
528 assert_eq!(file_changes.lock().len(), 0);
529
530 // Perform some file system mutations, two of which match the watched patterns,
531 // and one of which does not.
532 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
533 .await
534 .unwrap();
535 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
536 .await
537 .unwrap();
538 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
539 .await
540 .unwrap();
541
542 // The language server receives events for the FS mutations that match its watch patterns.
543 cx.foreground().run_until_parked();
544 assert_eq!(
545 &*file_changes.lock(),
546 &[
547 lsp::FileEvent {
548 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
549 typ: lsp::FileChangeType::DELETED,
550 },
551 lsp::FileEvent {
552 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
553 typ: lsp::FileChangeType::CREATED,
554 },
555 ]
556 );
557}
558
559#[gpui::test]
560async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
561 init_test(cx);
562
563 let fs = FakeFs::new(cx.background());
564 fs.insert_tree(
565 "/dir",
566 json!({
567 "a.rs": "let a = 1;",
568 "b.rs": "let b = 2;"
569 }),
570 )
571 .await;
572
573 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
574
575 let buffer_a = project
576 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
577 .await
578 .unwrap();
579 let buffer_b = project
580 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
581 .await
582 .unwrap();
583
584 project.update(cx, |project, cx| {
585 project
586 .update_diagnostics(
587 LanguageServerId(0),
588 lsp::PublishDiagnosticsParams {
589 uri: Url::from_file_path("/dir/a.rs").unwrap(),
590 version: None,
591 diagnostics: vec![lsp::Diagnostic {
592 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
593 severity: Some(lsp::DiagnosticSeverity::ERROR),
594 message: "error 1".to_string(),
595 ..Default::default()
596 }],
597 },
598 &[],
599 cx,
600 )
601 .unwrap();
602 project
603 .update_diagnostics(
604 LanguageServerId(0),
605 lsp::PublishDiagnosticsParams {
606 uri: Url::from_file_path("/dir/b.rs").unwrap(),
607 version: None,
608 diagnostics: vec![lsp::Diagnostic {
609 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
610 severity: Some(lsp::DiagnosticSeverity::WARNING),
611 message: "error 2".to_string(),
612 ..Default::default()
613 }],
614 },
615 &[],
616 cx,
617 )
618 .unwrap();
619 });
620
621 buffer_a.read_with(cx, |buffer, _| {
622 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
623 assert_eq!(
624 chunks
625 .iter()
626 .map(|(s, d)| (s.as_str(), *d))
627 .collect::<Vec<_>>(),
628 &[
629 ("let ", None),
630 ("a", Some(DiagnosticSeverity::ERROR)),
631 (" = 1;", None),
632 ]
633 );
634 });
635 buffer_b.read_with(cx, |buffer, _| {
636 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
637 assert_eq!(
638 chunks
639 .iter()
640 .map(|(s, d)| (s.as_str(), *d))
641 .collect::<Vec<_>>(),
642 &[
643 ("let ", None),
644 ("b", Some(DiagnosticSeverity::WARNING)),
645 (" = 2;", None),
646 ]
647 );
648 });
649}
650
651#[gpui::test]
652async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
653 init_test(cx);
654
655 let fs = FakeFs::new(cx.background());
656 fs.insert_tree(
657 "/root",
658 json!({
659 "dir": {
660 "a.rs": "let a = 1;",
661 },
662 "other.rs": "let b = c;"
663 }),
664 )
665 .await;
666
667 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
668
669 let (worktree, _) = project
670 .update(cx, |project, cx| {
671 project.find_or_create_local_worktree("/root/other.rs", false, cx)
672 })
673 .await
674 .unwrap();
675 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
676
677 project.update(cx, |project, cx| {
678 project
679 .update_diagnostics(
680 LanguageServerId(0),
681 lsp::PublishDiagnosticsParams {
682 uri: Url::from_file_path("/root/other.rs").unwrap(),
683 version: None,
684 diagnostics: vec![lsp::Diagnostic {
685 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
686 severity: Some(lsp::DiagnosticSeverity::ERROR),
687 message: "unknown variable 'c'".to_string(),
688 ..Default::default()
689 }],
690 },
691 &[],
692 cx,
693 )
694 .unwrap();
695 });
696
697 let buffer = project
698 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
699 .await
700 .unwrap();
701 buffer.read_with(cx, |buffer, _| {
702 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
703 assert_eq!(
704 chunks
705 .iter()
706 .map(|(s, d)| (s.as_str(), *d))
707 .collect::<Vec<_>>(),
708 &[
709 ("let b = ", None),
710 ("c", Some(DiagnosticSeverity::ERROR)),
711 (";", None),
712 ]
713 );
714 });
715
716 project.read_with(cx, |project, cx| {
717 assert_eq!(project.diagnostic_summaries(cx).next(), None);
718 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
719 });
720}
721
722#[gpui::test]
723async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
724 init_test(cx);
725
726 let progress_token = "the-progress-token";
727 let mut language = Language::new(
728 LanguageConfig {
729 name: "Rust".into(),
730 path_suffixes: vec!["rs".to_string()],
731 ..Default::default()
732 },
733 Some(tree_sitter_rust::language()),
734 );
735 let mut fake_servers = language
736 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
737 disk_based_diagnostics_progress_token: Some(progress_token.into()),
738 disk_based_diagnostics_sources: vec!["disk".into()],
739 ..Default::default()
740 }))
741 .await;
742
743 let fs = FakeFs::new(cx.background());
744 fs.insert_tree(
745 "/dir",
746 json!({
747 "a.rs": "fn a() { A }",
748 "b.rs": "const y: i32 = 1",
749 }),
750 )
751 .await;
752
753 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
754 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
755 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
756
757 // Cause worktree to start the fake language server
758 let _buffer = project
759 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
760 .await
761 .unwrap();
762
763 let mut events = subscribe(&project, cx);
764
765 let fake_server = fake_servers.next().await.unwrap();
766 fake_server
767 .start_progress(format!("{}/0", progress_token))
768 .await;
769 assert_eq!(
770 events.next().await.unwrap(),
771 Event::DiskBasedDiagnosticsStarted {
772 language_server_id: LanguageServerId(0),
773 }
774 );
775
776 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
777 uri: Url::from_file_path("/dir/a.rs").unwrap(),
778 version: None,
779 diagnostics: vec![lsp::Diagnostic {
780 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
781 severity: Some(lsp::DiagnosticSeverity::ERROR),
782 message: "undefined variable 'A'".to_string(),
783 ..Default::default()
784 }],
785 });
786 assert_eq!(
787 events.next().await.unwrap(),
788 Event::DiagnosticsUpdated {
789 language_server_id: LanguageServerId(0),
790 path: (worktree_id, Path::new("a.rs")).into()
791 }
792 );
793
794 fake_server.end_progress(format!("{}/0", progress_token));
795 assert_eq!(
796 events.next().await.unwrap(),
797 Event::DiskBasedDiagnosticsFinished {
798 language_server_id: LanguageServerId(0)
799 }
800 );
801
802 let buffer = project
803 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
804 .await
805 .unwrap();
806
807 buffer.read_with(cx, |buffer, _| {
808 let snapshot = buffer.snapshot();
809 let diagnostics = snapshot
810 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
811 .collect::<Vec<_>>();
812 assert_eq!(
813 diagnostics,
814 &[DiagnosticEntry {
815 range: Point::new(0, 9)..Point::new(0, 10),
816 diagnostic: Diagnostic {
817 severity: lsp::DiagnosticSeverity::ERROR,
818 message: "undefined variable 'A'".to_string(),
819 group_id: 0,
820 is_primary: true,
821 ..Default::default()
822 }
823 }]
824 )
825 });
826
827 // Ensure publishing empty diagnostics twice only results in one update event.
828 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
829 uri: Url::from_file_path("/dir/a.rs").unwrap(),
830 version: None,
831 diagnostics: Default::default(),
832 });
833 assert_eq!(
834 events.next().await.unwrap(),
835 Event::DiagnosticsUpdated {
836 language_server_id: LanguageServerId(0),
837 path: (worktree_id, Path::new("a.rs")).into()
838 }
839 );
840
841 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
842 uri: Url::from_file_path("/dir/a.rs").unwrap(),
843 version: None,
844 diagnostics: Default::default(),
845 });
846 cx.foreground().run_until_parked();
847 assert_eq!(futures::poll!(events.next()), Poll::Pending);
848}
849
850#[gpui::test]
851async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
852 init_test(cx);
853
854 let progress_token = "the-progress-token";
855 let mut language = Language::new(
856 LanguageConfig {
857 path_suffixes: vec!["rs".to_string()],
858 ..Default::default()
859 },
860 None,
861 );
862 let mut fake_servers = language
863 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
864 disk_based_diagnostics_sources: vec!["disk".into()],
865 disk_based_diagnostics_progress_token: Some(progress_token.into()),
866 ..Default::default()
867 }))
868 .await;
869
870 let fs = FakeFs::new(cx.background());
871 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
872
873 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
874 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
875
876 let buffer = project
877 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
878 .await
879 .unwrap();
880
881 // Simulate diagnostics starting to update.
882 let fake_server = fake_servers.next().await.unwrap();
883 fake_server.start_progress(progress_token).await;
884
885 // Restart the server before the diagnostics finish updating.
886 project.update(cx, |project, cx| {
887 project.restart_language_servers_for_buffers([buffer], cx);
888 });
889 let mut events = subscribe(&project, cx);
890
891 // Simulate the newly started server sending more diagnostics.
892 let fake_server = fake_servers.next().await.unwrap();
893 fake_server.start_progress(progress_token).await;
894 assert_eq!(
895 events.next().await.unwrap(),
896 Event::DiskBasedDiagnosticsStarted {
897 language_server_id: LanguageServerId(1)
898 }
899 );
900 project.read_with(cx, |project, _| {
901 assert_eq!(
902 project
903 .language_servers_running_disk_based_diagnostics()
904 .collect::<Vec<_>>(),
905 [LanguageServerId(1)]
906 );
907 });
908
909 // All diagnostics are considered done, despite the old server's diagnostic
910 // task never completing.
911 fake_server.end_progress(progress_token);
912 assert_eq!(
913 events.next().await.unwrap(),
914 Event::DiskBasedDiagnosticsFinished {
915 language_server_id: LanguageServerId(1)
916 }
917 );
918 project.read_with(cx, |project, _| {
919 assert_eq!(
920 project
921 .language_servers_running_disk_based_diagnostics()
922 .collect::<Vec<_>>(),
923 [LanguageServerId(0); 0]
924 );
925 });
926}
927
928#[gpui::test]
929async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
930 init_test(cx);
931
932 let mut language = Language::new(
933 LanguageConfig {
934 path_suffixes: vec!["rs".to_string()],
935 ..Default::default()
936 },
937 None,
938 );
939 let mut fake_servers = language
940 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
941 name: "the-lsp",
942 ..Default::default()
943 }))
944 .await;
945
946 let fs = FakeFs::new(cx.background());
947 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
948
949 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
950 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
951
952 let buffer = project
953 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
954 .await
955 .unwrap();
956
957 // Before restarting the server, report diagnostics with an unknown buffer version.
958 let fake_server = fake_servers.next().await.unwrap();
959 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
960 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
961 version: Some(10000),
962 diagnostics: Vec::new(),
963 });
964 cx.foreground().run_until_parked();
965
966 project.update(cx, |project, cx| {
967 project.restart_language_servers_for_buffers([buffer.clone()], cx);
968 });
969 let mut fake_server = fake_servers.next().await.unwrap();
970 let notification = fake_server
971 .receive_notification::<lsp::notification::DidOpenTextDocument>()
972 .await
973 .text_document;
974 assert_eq!(notification.version, 0);
975}
976
977#[gpui::test]
978async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
979 init_test(cx);
980
981 let mut rust = Language::new(
982 LanguageConfig {
983 name: Arc::from("Rust"),
984 path_suffixes: vec!["rs".to_string()],
985 ..Default::default()
986 },
987 None,
988 );
989 let mut fake_rust_servers = rust
990 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
991 name: "rust-lsp",
992 ..Default::default()
993 }))
994 .await;
995 let mut js = Language::new(
996 LanguageConfig {
997 name: Arc::from("JavaScript"),
998 path_suffixes: vec!["js".to_string()],
999 ..Default::default()
1000 },
1001 None,
1002 );
1003 let mut fake_js_servers = js
1004 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1005 name: "js-lsp",
1006 ..Default::default()
1007 }))
1008 .await;
1009
1010 let fs = FakeFs::new(cx.background());
1011 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1012 .await;
1013
1014 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1015 project.update(cx, |project, _| {
1016 project.languages.add(Arc::new(rust));
1017 project.languages.add(Arc::new(js));
1018 });
1019
1020 let _rs_buffer = project
1021 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1022 .await
1023 .unwrap();
1024 let _js_buffer = project
1025 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1026 .await
1027 .unwrap();
1028
1029 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1030 assert_eq!(
1031 fake_rust_server_1
1032 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1033 .await
1034 .text_document
1035 .uri
1036 .as_str(),
1037 "file:///dir/a.rs"
1038 );
1039
1040 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1041 assert_eq!(
1042 fake_js_server
1043 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1044 .await
1045 .text_document
1046 .uri
1047 .as_str(),
1048 "file:///dir/b.js"
1049 );
1050
1051 // Disable Rust language server, ensuring only that server gets stopped.
1052 cx.update(|cx| {
1053 cx.update_global(|settings: &mut SettingsStore, cx| {
1054 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1055 settings.languages.insert(
1056 Arc::from("Rust"),
1057 LanguageSettingsContent {
1058 enable_language_server: Some(false),
1059 ..Default::default()
1060 },
1061 );
1062 });
1063 })
1064 });
1065 fake_rust_server_1
1066 .receive_notification::<lsp::notification::Exit>()
1067 .await;
1068
1069 // Enable Rust and disable JavaScript language servers, ensuring that the
1070 // former gets started again and that the latter stops.
1071 cx.update(|cx| {
1072 cx.update_global(|settings: &mut SettingsStore, cx| {
1073 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1074 settings.languages.insert(
1075 Arc::from("Rust"),
1076 LanguageSettingsContent {
1077 enable_language_server: Some(true),
1078 ..Default::default()
1079 },
1080 );
1081 settings.languages.insert(
1082 Arc::from("JavaScript"),
1083 LanguageSettingsContent {
1084 enable_language_server: Some(false),
1085 ..Default::default()
1086 },
1087 );
1088 });
1089 })
1090 });
1091 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1092 assert_eq!(
1093 fake_rust_server_2
1094 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1095 .await
1096 .text_document
1097 .uri
1098 .as_str(),
1099 "file:///dir/a.rs"
1100 );
1101 fake_js_server
1102 .receive_notification::<lsp::notification::Exit>()
1103 .await;
1104}
1105
1106#[gpui::test]
1107async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1108 init_test(cx);
1109
1110 let mut language = Language::new(
1111 LanguageConfig {
1112 name: "Rust".into(),
1113 path_suffixes: vec!["rs".to_string()],
1114 ..Default::default()
1115 },
1116 Some(tree_sitter_rust::language()),
1117 );
1118 let mut fake_servers = language
1119 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1120 disk_based_diagnostics_sources: vec!["disk".into()],
1121 ..Default::default()
1122 }))
1123 .await;
1124
1125 let text = "
1126 fn a() { A }
1127 fn b() { BB }
1128 fn c() { CCC }
1129 "
1130 .unindent();
1131
1132 let fs = FakeFs::new(cx.background());
1133 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1134
1135 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1136 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1137
1138 let buffer = project
1139 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1140 .await
1141 .unwrap();
1142
1143 let mut fake_server = fake_servers.next().await.unwrap();
1144 let open_notification = fake_server
1145 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1146 .await;
1147
1148 // Edit the buffer, moving the content down
1149 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1150 let change_notification_1 = fake_server
1151 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1152 .await;
1153 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1154
1155 // Report some diagnostics for the initial version of the buffer
1156 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1157 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1158 version: Some(open_notification.text_document.version),
1159 diagnostics: vec![
1160 lsp::Diagnostic {
1161 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1162 severity: Some(DiagnosticSeverity::ERROR),
1163 message: "undefined variable 'A'".to_string(),
1164 source: Some("disk".to_string()),
1165 ..Default::default()
1166 },
1167 lsp::Diagnostic {
1168 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1169 severity: Some(DiagnosticSeverity::ERROR),
1170 message: "undefined variable 'BB'".to_string(),
1171 source: Some("disk".to_string()),
1172 ..Default::default()
1173 },
1174 lsp::Diagnostic {
1175 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1176 severity: Some(DiagnosticSeverity::ERROR),
1177 source: Some("disk".to_string()),
1178 message: "undefined variable 'CCC'".to_string(),
1179 ..Default::default()
1180 },
1181 ],
1182 });
1183
1184 // The diagnostics have moved down since they were created.
1185 buffer.next_notification(cx).await;
1186 buffer.read_with(cx, |buffer, _| {
1187 assert_eq!(
1188 buffer
1189 .snapshot()
1190 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1191 .collect::<Vec<_>>(),
1192 &[
1193 DiagnosticEntry {
1194 range: Point::new(3, 9)..Point::new(3, 11),
1195 diagnostic: Diagnostic {
1196 source: Some("disk".into()),
1197 severity: DiagnosticSeverity::ERROR,
1198 message: "undefined variable 'BB'".to_string(),
1199 is_disk_based: true,
1200 group_id: 1,
1201 is_primary: true,
1202 ..Default::default()
1203 },
1204 },
1205 DiagnosticEntry {
1206 range: Point::new(4, 9)..Point::new(4, 12),
1207 diagnostic: Diagnostic {
1208 source: Some("disk".into()),
1209 severity: DiagnosticSeverity::ERROR,
1210 message: "undefined variable 'CCC'".to_string(),
1211 is_disk_based: true,
1212 group_id: 2,
1213 is_primary: true,
1214 ..Default::default()
1215 }
1216 }
1217 ]
1218 );
1219 assert_eq!(
1220 chunks_with_diagnostics(buffer, 0..buffer.len()),
1221 [
1222 ("\n\nfn a() { ".to_string(), None),
1223 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1224 (" }\nfn b() { ".to_string(), None),
1225 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1226 (" }\nfn c() { ".to_string(), None),
1227 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1228 (" }\n".to_string(), None),
1229 ]
1230 );
1231 assert_eq!(
1232 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1233 [
1234 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1235 (" }\nfn c() { ".to_string(), None),
1236 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1237 ]
1238 );
1239 });
1240
1241 // Ensure overlapping diagnostics are highlighted correctly.
1242 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1243 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1244 version: Some(open_notification.text_document.version),
1245 diagnostics: vec![
1246 lsp::Diagnostic {
1247 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1248 severity: Some(DiagnosticSeverity::ERROR),
1249 message: "undefined variable 'A'".to_string(),
1250 source: Some("disk".to_string()),
1251 ..Default::default()
1252 },
1253 lsp::Diagnostic {
1254 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1255 severity: Some(DiagnosticSeverity::WARNING),
1256 message: "unreachable statement".to_string(),
1257 source: Some("disk".to_string()),
1258 ..Default::default()
1259 },
1260 ],
1261 });
1262
1263 buffer.next_notification(cx).await;
1264 buffer.read_with(cx, |buffer, _| {
1265 assert_eq!(
1266 buffer
1267 .snapshot()
1268 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1269 .collect::<Vec<_>>(),
1270 &[
1271 DiagnosticEntry {
1272 range: Point::new(2, 9)..Point::new(2, 12),
1273 diagnostic: Diagnostic {
1274 source: Some("disk".into()),
1275 severity: DiagnosticSeverity::WARNING,
1276 message: "unreachable statement".to_string(),
1277 is_disk_based: true,
1278 group_id: 4,
1279 is_primary: true,
1280 ..Default::default()
1281 }
1282 },
1283 DiagnosticEntry {
1284 range: Point::new(2, 9)..Point::new(2, 10),
1285 diagnostic: Diagnostic {
1286 source: Some("disk".into()),
1287 severity: DiagnosticSeverity::ERROR,
1288 message: "undefined variable 'A'".to_string(),
1289 is_disk_based: true,
1290 group_id: 3,
1291 is_primary: true,
1292 ..Default::default()
1293 },
1294 }
1295 ]
1296 );
1297 assert_eq!(
1298 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1299 [
1300 ("fn a() { ".to_string(), None),
1301 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1302 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1303 ("\n".to_string(), None),
1304 ]
1305 );
1306 assert_eq!(
1307 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1308 [
1309 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1310 ("\n".to_string(), None),
1311 ]
1312 );
1313 });
1314
1315 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1316 // changes since the last save.
1317 buffer.update(cx, |buffer, cx| {
1318 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1319 buffer.edit(
1320 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1321 None,
1322 cx,
1323 );
1324 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1325 });
1326 let change_notification_2 = fake_server
1327 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1328 .await;
1329 assert!(
1330 change_notification_2.text_document.version > change_notification_1.text_document.version
1331 );
1332
1333 // Handle out-of-order diagnostics
1334 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1335 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1336 version: Some(change_notification_2.text_document.version),
1337 diagnostics: vec![
1338 lsp::Diagnostic {
1339 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1340 severity: Some(DiagnosticSeverity::ERROR),
1341 message: "undefined variable 'BB'".to_string(),
1342 source: Some("disk".to_string()),
1343 ..Default::default()
1344 },
1345 lsp::Diagnostic {
1346 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1347 severity: Some(DiagnosticSeverity::WARNING),
1348 message: "undefined variable 'A'".to_string(),
1349 source: Some("disk".to_string()),
1350 ..Default::default()
1351 },
1352 ],
1353 });
1354
1355 buffer.next_notification(cx).await;
1356 buffer.read_with(cx, |buffer, _| {
1357 assert_eq!(
1358 buffer
1359 .snapshot()
1360 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1361 .collect::<Vec<_>>(),
1362 &[
1363 DiagnosticEntry {
1364 range: Point::new(2, 21)..Point::new(2, 22),
1365 diagnostic: Diagnostic {
1366 source: Some("disk".into()),
1367 severity: DiagnosticSeverity::WARNING,
1368 message: "undefined variable 'A'".to_string(),
1369 is_disk_based: true,
1370 group_id: 6,
1371 is_primary: true,
1372 ..Default::default()
1373 }
1374 },
1375 DiagnosticEntry {
1376 range: Point::new(3, 9)..Point::new(3, 14),
1377 diagnostic: Diagnostic {
1378 source: Some("disk".into()),
1379 severity: DiagnosticSeverity::ERROR,
1380 message: "undefined variable 'BB'".to_string(),
1381 is_disk_based: true,
1382 group_id: 5,
1383 is_primary: true,
1384 ..Default::default()
1385 },
1386 }
1387 ]
1388 );
1389 });
1390}
1391
1392#[gpui::test]
1393async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1394 init_test(cx);
1395
1396 let text = concat!(
1397 "let one = ;\n", //
1398 "let two = \n",
1399 "let three = 3;\n",
1400 );
1401
1402 let fs = FakeFs::new(cx.background());
1403 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1404
1405 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1406 let buffer = project
1407 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1408 .await
1409 .unwrap();
1410
1411 project.update(cx, |project, cx| {
1412 project
1413 .update_buffer_diagnostics(
1414 &buffer,
1415 LanguageServerId(0),
1416 None,
1417 vec![
1418 DiagnosticEntry {
1419 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1420 diagnostic: Diagnostic {
1421 severity: DiagnosticSeverity::ERROR,
1422 message: "syntax error 1".to_string(),
1423 ..Default::default()
1424 },
1425 },
1426 DiagnosticEntry {
1427 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1428 diagnostic: Diagnostic {
1429 severity: DiagnosticSeverity::ERROR,
1430 message: "syntax error 2".to_string(),
1431 ..Default::default()
1432 },
1433 },
1434 ],
1435 cx,
1436 )
1437 .unwrap();
1438 });
1439
1440 // An empty range is extended forward to include the following character.
1441 // At the end of a line, an empty range is extended backward to include
1442 // the preceding character.
1443 buffer.read_with(cx, |buffer, _| {
1444 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1445 assert_eq!(
1446 chunks
1447 .iter()
1448 .map(|(s, d)| (s.as_str(), *d))
1449 .collect::<Vec<_>>(),
1450 &[
1451 ("let one = ", None),
1452 (";", Some(DiagnosticSeverity::ERROR)),
1453 ("\nlet two =", None),
1454 (" ", Some(DiagnosticSeverity::ERROR)),
1455 ("\nlet three = 3;\n", None)
1456 ]
1457 );
1458 });
1459}
1460
1461#[gpui::test]
1462async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1463 init_test(cx);
1464
1465 let fs = FakeFs::new(cx.background());
1466 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1467 .await;
1468
1469 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1470
1471 project.update(cx, |project, cx| {
1472 project
1473 .update_diagnostic_entries(
1474 LanguageServerId(0),
1475 Path::new("/dir/a.rs").to_owned(),
1476 None,
1477 vec![DiagnosticEntry {
1478 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1479 diagnostic: Diagnostic {
1480 severity: DiagnosticSeverity::ERROR,
1481 is_primary: true,
1482 message: "syntax error a1".to_string(),
1483 ..Default::default()
1484 },
1485 }],
1486 cx,
1487 )
1488 .unwrap();
1489 project
1490 .update_diagnostic_entries(
1491 LanguageServerId(1),
1492 Path::new("/dir/a.rs").to_owned(),
1493 None,
1494 vec![DiagnosticEntry {
1495 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1496 diagnostic: Diagnostic {
1497 severity: DiagnosticSeverity::ERROR,
1498 is_primary: true,
1499 message: "syntax error b1".to_string(),
1500 ..Default::default()
1501 },
1502 }],
1503 cx,
1504 )
1505 .unwrap();
1506
1507 assert_eq!(
1508 project.diagnostic_summary(cx),
1509 DiagnosticSummary {
1510 error_count: 2,
1511 warning_count: 0,
1512 }
1513 );
1514 });
1515}
1516
1517#[gpui::test]
1518async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1519 init_test(cx);
1520
1521 let mut language = Language::new(
1522 LanguageConfig {
1523 name: "Rust".into(),
1524 path_suffixes: vec!["rs".to_string()],
1525 ..Default::default()
1526 },
1527 Some(tree_sitter_rust::language()),
1528 );
1529 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1530
1531 let text = "
1532 fn a() {
1533 f1();
1534 }
1535 fn b() {
1536 f2();
1537 }
1538 fn c() {
1539 f3();
1540 }
1541 "
1542 .unindent();
1543
1544 let fs = FakeFs::new(cx.background());
1545 fs.insert_tree(
1546 "/dir",
1547 json!({
1548 "a.rs": text.clone(),
1549 }),
1550 )
1551 .await;
1552
1553 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1554 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1555 let buffer = project
1556 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1557 .await
1558 .unwrap();
1559
1560 let mut fake_server = fake_servers.next().await.unwrap();
1561 let lsp_document_version = fake_server
1562 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1563 .await
1564 .text_document
1565 .version;
1566
1567 // Simulate editing the buffer after the language server computes some edits.
1568 buffer.update(cx, |buffer, cx| {
1569 buffer.edit(
1570 [(
1571 Point::new(0, 0)..Point::new(0, 0),
1572 "// above first function\n",
1573 )],
1574 None,
1575 cx,
1576 );
1577 buffer.edit(
1578 [(
1579 Point::new(2, 0)..Point::new(2, 0),
1580 " // inside first function\n",
1581 )],
1582 None,
1583 cx,
1584 );
1585 buffer.edit(
1586 [(
1587 Point::new(6, 4)..Point::new(6, 4),
1588 "// inside second function ",
1589 )],
1590 None,
1591 cx,
1592 );
1593
1594 assert_eq!(
1595 buffer.text(),
1596 "
1597 // above first function
1598 fn a() {
1599 // inside first function
1600 f1();
1601 }
1602 fn b() {
1603 // inside second function f2();
1604 }
1605 fn c() {
1606 f3();
1607 }
1608 "
1609 .unindent()
1610 );
1611 });
1612
1613 let edits = project
1614 .update(cx, |project, cx| {
1615 project.edits_from_lsp(
1616 &buffer,
1617 vec![
1618 // replace body of first function
1619 lsp::TextEdit {
1620 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1621 new_text: "
1622 fn a() {
1623 f10();
1624 }
1625 "
1626 .unindent(),
1627 },
1628 // edit inside second function
1629 lsp::TextEdit {
1630 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1631 new_text: "00".into(),
1632 },
1633 // edit inside third function via two distinct edits
1634 lsp::TextEdit {
1635 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1636 new_text: "4000".into(),
1637 },
1638 lsp::TextEdit {
1639 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1640 new_text: "".into(),
1641 },
1642 ],
1643 LanguageServerId(0),
1644 Some(lsp_document_version),
1645 cx,
1646 )
1647 })
1648 .await
1649 .unwrap();
1650
1651 buffer.update(cx, |buffer, cx| {
1652 for (range, new_text) in edits {
1653 buffer.edit([(range, new_text)], None, cx);
1654 }
1655 assert_eq!(
1656 buffer.text(),
1657 "
1658 // above first function
1659 fn a() {
1660 // inside first function
1661 f10();
1662 }
1663 fn b() {
1664 // inside second function f200();
1665 }
1666 fn c() {
1667 f4000();
1668 }
1669 "
1670 .unindent()
1671 );
1672 });
1673}
1674
1675#[gpui::test]
1676async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1677 init_test(cx);
1678
1679 let text = "
1680 use a::b;
1681 use a::c;
1682
1683 fn f() {
1684 b();
1685 c();
1686 }
1687 "
1688 .unindent();
1689
1690 let fs = FakeFs::new(cx.background());
1691 fs.insert_tree(
1692 "/dir",
1693 json!({
1694 "a.rs": text.clone(),
1695 }),
1696 )
1697 .await;
1698
1699 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1700 let buffer = project
1701 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1702 .await
1703 .unwrap();
1704
1705 // Simulate the language server sending us a small edit in the form of a very large diff.
1706 // Rust-analyzer does this when performing a merge-imports code action.
1707 let edits = project
1708 .update(cx, |project, cx| {
1709 project.edits_from_lsp(
1710 &buffer,
1711 [
1712 // Replace the first use statement without editing the semicolon.
1713 lsp::TextEdit {
1714 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1715 new_text: "a::{b, c}".into(),
1716 },
1717 // Reinsert the remainder of the file between the semicolon and the final
1718 // newline of the file.
1719 lsp::TextEdit {
1720 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1721 new_text: "\n\n".into(),
1722 },
1723 lsp::TextEdit {
1724 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1725 new_text: "
1726 fn f() {
1727 b();
1728 c();
1729 }"
1730 .unindent(),
1731 },
1732 // Delete everything after the first newline of the file.
1733 lsp::TextEdit {
1734 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1735 new_text: "".into(),
1736 },
1737 ],
1738 LanguageServerId(0),
1739 None,
1740 cx,
1741 )
1742 })
1743 .await
1744 .unwrap();
1745
1746 buffer.update(cx, |buffer, cx| {
1747 let edits = edits
1748 .into_iter()
1749 .map(|(range, text)| {
1750 (
1751 range.start.to_point(buffer)..range.end.to_point(buffer),
1752 text,
1753 )
1754 })
1755 .collect::<Vec<_>>();
1756
1757 assert_eq!(
1758 edits,
1759 [
1760 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1761 (Point::new(1, 0)..Point::new(2, 0), "".into())
1762 ]
1763 );
1764
1765 for (range, new_text) in edits {
1766 buffer.edit([(range, new_text)], None, cx);
1767 }
1768 assert_eq!(
1769 buffer.text(),
1770 "
1771 use a::{b, c};
1772
1773 fn f() {
1774 b();
1775 c();
1776 }
1777 "
1778 .unindent()
1779 );
1780 });
1781}
1782
1783#[gpui::test]
1784async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1785 init_test(cx);
1786
1787 let text = "
1788 use a::b;
1789 use a::c;
1790
1791 fn f() {
1792 b();
1793 c();
1794 }
1795 "
1796 .unindent();
1797
1798 let fs = FakeFs::new(cx.background());
1799 fs.insert_tree(
1800 "/dir",
1801 json!({
1802 "a.rs": text.clone(),
1803 }),
1804 )
1805 .await;
1806
1807 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1808 let buffer = project
1809 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1810 .await
1811 .unwrap();
1812
1813 // Simulate the language server sending us edits in a non-ordered fashion,
1814 // with ranges sometimes being inverted or pointing to invalid locations.
1815 let edits = project
1816 .update(cx, |project, cx| {
1817 project.edits_from_lsp(
1818 &buffer,
1819 [
1820 lsp::TextEdit {
1821 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1822 new_text: "\n\n".into(),
1823 },
1824 lsp::TextEdit {
1825 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1826 new_text: "a::{b, c}".into(),
1827 },
1828 lsp::TextEdit {
1829 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1830 new_text: "".into(),
1831 },
1832 lsp::TextEdit {
1833 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1834 new_text: "
1835 fn f() {
1836 b();
1837 c();
1838 }"
1839 .unindent(),
1840 },
1841 ],
1842 LanguageServerId(0),
1843 None,
1844 cx,
1845 )
1846 })
1847 .await
1848 .unwrap();
1849
1850 buffer.update(cx, |buffer, cx| {
1851 let edits = edits
1852 .into_iter()
1853 .map(|(range, text)| {
1854 (
1855 range.start.to_point(buffer)..range.end.to_point(buffer),
1856 text,
1857 )
1858 })
1859 .collect::<Vec<_>>();
1860
1861 assert_eq!(
1862 edits,
1863 [
1864 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1865 (Point::new(1, 0)..Point::new(2, 0), "".into())
1866 ]
1867 );
1868
1869 for (range, new_text) in edits {
1870 buffer.edit([(range, new_text)], None, cx);
1871 }
1872 assert_eq!(
1873 buffer.text(),
1874 "
1875 use a::{b, c};
1876
1877 fn f() {
1878 b();
1879 c();
1880 }
1881 "
1882 .unindent()
1883 );
1884 });
1885}
1886
1887fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1888 buffer: &Buffer,
1889 range: Range<T>,
1890) -> Vec<(String, Option<DiagnosticSeverity>)> {
1891 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1892 for chunk in buffer.snapshot().chunks(range, true) {
1893 if chunks.last().map_or(false, |prev_chunk| {
1894 prev_chunk.1 == chunk.diagnostic_severity
1895 }) {
1896 chunks.last_mut().unwrap().0.push_str(chunk.text);
1897 } else {
1898 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1899 }
1900 }
1901 chunks
1902}
1903
1904#[gpui::test(iterations = 10)]
1905async fn test_definition(cx: &mut gpui::TestAppContext) {
1906 init_test(cx);
1907
1908 let mut language = Language::new(
1909 LanguageConfig {
1910 name: "Rust".into(),
1911 path_suffixes: vec!["rs".to_string()],
1912 ..Default::default()
1913 },
1914 Some(tree_sitter_rust::language()),
1915 );
1916 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1917
1918 let fs = FakeFs::new(cx.background());
1919 fs.insert_tree(
1920 "/dir",
1921 json!({
1922 "a.rs": "const fn a() { A }",
1923 "b.rs": "const y: i32 = crate::a()",
1924 }),
1925 )
1926 .await;
1927
1928 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1929 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1930
1931 let buffer = project
1932 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1933 .await
1934 .unwrap();
1935
1936 let fake_server = fake_servers.next().await.unwrap();
1937 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1938 let params = params.text_document_position_params;
1939 assert_eq!(
1940 params.text_document.uri.to_file_path().unwrap(),
1941 Path::new("/dir/b.rs"),
1942 );
1943 assert_eq!(params.position, lsp::Position::new(0, 22));
1944
1945 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1946 lsp::Location::new(
1947 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1948 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1949 ),
1950 )))
1951 });
1952
1953 let mut definitions = project
1954 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1955 .await
1956 .unwrap();
1957
1958 // Assert no new language server started
1959 cx.foreground().run_until_parked();
1960 assert!(fake_servers.try_next().is_err());
1961
1962 assert_eq!(definitions.len(), 1);
1963 let definition = definitions.pop().unwrap();
1964 cx.update(|cx| {
1965 let target_buffer = definition.target.buffer.read(cx);
1966 assert_eq!(
1967 target_buffer
1968 .file()
1969 .unwrap()
1970 .as_local()
1971 .unwrap()
1972 .abs_path(cx),
1973 Path::new("/dir/a.rs"),
1974 );
1975 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1976 assert_eq!(
1977 list_worktrees(&project, cx),
1978 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1979 );
1980
1981 drop(definition);
1982 });
1983 cx.read(|cx| {
1984 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1985 });
1986
1987 fn list_worktrees<'a>(
1988 project: &'a ModelHandle<Project>,
1989 cx: &'a AppContext,
1990 ) -> Vec<(&'a Path, bool)> {
1991 project
1992 .read(cx)
1993 .worktrees(cx)
1994 .map(|worktree| {
1995 let worktree = worktree.read(cx);
1996 (
1997 worktree.as_local().unwrap().abs_path().as_ref(),
1998 worktree.is_visible(),
1999 )
2000 })
2001 .collect::<Vec<_>>()
2002 }
2003}
2004
2005#[gpui::test]
2006async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2007 init_test(cx);
2008
2009 let mut language = Language::new(
2010 LanguageConfig {
2011 name: "TypeScript".into(),
2012 path_suffixes: vec!["ts".to_string()],
2013 ..Default::default()
2014 },
2015 Some(tree_sitter_typescript::language_typescript()),
2016 );
2017 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2018
2019 let fs = FakeFs::new(cx.background());
2020 fs.insert_tree(
2021 "/dir",
2022 json!({
2023 "a.ts": "",
2024 }),
2025 )
2026 .await;
2027
2028 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2029 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2030 let buffer = project
2031 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2032 .await
2033 .unwrap();
2034
2035 let fake_server = fake_language_servers.next().await.unwrap();
2036
2037 let text = "let a = b.fqn";
2038 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2039 let completions = project.update(cx, |project, cx| {
2040 project.completions(&buffer, text.len(), cx)
2041 });
2042
2043 fake_server
2044 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2045 Ok(Some(lsp::CompletionResponse::Array(vec![
2046 lsp::CompletionItem {
2047 label: "fullyQualifiedName?".into(),
2048 insert_text: Some("fullyQualifiedName".into()),
2049 ..Default::default()
2050 },
2051 ])))
2052 })
2053 .next()
2054 .await;
2055 let completions = completions.await.unwrap();
2056 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2057 assert_eq!(completions.len(), 1);
2058 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2059 assert_eq!(
2060 completions[0].old_range.to_offset(&snapshot),
2061 text.len() - 3..text.len()
2062 );
2063
2064 let text = "let a = \"atoms/cmp\"";
2065 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2066 let completions = project.update(cx, |project, cx| {
2067 project.completions(&buffer, text.len() - 1, cx)
2068 });
2069
2070 fake_server
2071 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2072 Ok(Some(lsp::CompletionResponse::Array(vec![
2073 lsp::CompletionItem {
2074 label: "component".into(),
2075 ..Default::default()
2076 },
2077 ])))
2078 })
2079 .next()
2080 .await;
2081 let completions = completions.await.unwrap();
2082 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2083 assert_eq!(completions.len(), 1);
2084 assert_eq!(completions[0].new_text, "component");
2085 assert_eq!(
2086 completions[0].old_range.to_offset(&snapshot),
2087 text.len() - 4..text.len() - 1
2088 );
2089}
2090
2091#[gpui::test]
2092async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2093 init_test(cx);
2094
2095 let mut language = Language::new(
2096 LanguageConfig {
2097 name: "TypeScript".into(),
2098 path_suffixes: vec!["ts".to_string()],
2099 ..Default::default()
2100 },
2101 Some(tree_sitter_typescript::language_typescript()),
2102 );
2103 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2104
2105 let fs = FakeFs::new(cx.background());
2106 fs.insert_tree(
2107 "/dir",
2108 json!({
2109 "a.ts": "",
2110 }),
2111 )
2112 .await;
2113
2114 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2115 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2116 let buffer = project
2117 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2118 .await
2119 .unwrap();
2120
2121 let fake_server = fake_language_servers.next().await.unwrap();
2122
2123 let text = "let a = b.fqn";
2124 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2125 let completions = project.update(cx, |project, cx| {
2126 project.completions(&buffer, text.len(), cx)
2127 });
2128
2129 fake_server
2130 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2131 Ok(Some(lsp::CompletionResponse::Array(vec![
2132 lsp::CompletionItem {
2133 label: "fullyQualifiedName?".into(),
2134 insert_text: Some("fully\rQualified\r\nName".into()),
2135 ..Default::default()
2136 },
2137 ])))
2138 })
2139 .next()
2140 .await;
2141 let completions = completions.await.unwrap();
2142 assert_eq!(completions.len(), 1);
2143 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2144}
2145
2146#[gpui::test(iterations = 10)]
2147async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2148 init_test(cx);
2149
2150 let mut language = Language::new(
2151 LanguageConfig {
2152 name: "TypeScript".into(),
2153 path_suffixes: vec!["ts".to_string()],
2154 ..Default::default()
2155 },
2156 None,
2157 );
2158 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2159
2160 let fs = FakeFs::new(cx.background());
2161 fs.insert_tree(
2162 "/dir",
2163 json!({
2164 "a.ts": "a",
2165 }),
2166 )
2167 .await;
2168
2169 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2170 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2171 let buffer = project
2172 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2173 .await
2174 .unwrap();
2175
2176 let fake_server = fake_language_servers.next().await.unwrap();
2177
2178 // Language server returns code actions that contain commands, and not edits.
2179 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2180 fake_server
2181 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2182 Ok(Some(vec![
2183 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2184 title: "The code action".into(),
2185 command: Some(lsp::Command {
2186 title: "The command".into(),
2187 command: "_the/command".into(),
2188 arguments: Some(vec![json!("the-argument")]),
2189 }),
2190 ..Default::default()
2191 }),
2192 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2193 title: "two".into(),
2194 ..Default::default()
2195 }),
2196 ]))
2197 })
2198 .next()
2199 .await;
2200
2201 let action = actions.await.unwrap()[0].clone();
2202 let apply = project.update(cx, |project, cx| {
2203 project.apply_code_action(buffer.clone(), action, true, cx)
2204 });
2205
2206 // Resolving the code action does not populate its edits. In absence of
2207 // edits, we must execute the given command.
2208 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2209 |action, _| async move { Ok(action) },
2210 );
2211
2212 // While executing the command, the language server sends the editor
2213 // a `workspaceEdit` request.
2214 fake_server
2215 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2216 let fake = fake_server.clone();
2217 move |params, _| {
2218 assert_eq!(params.command, "_the/command");
2219 let fake = fake.clone();
2220 async move {
2221 fake.server
2222 .request::<lsp::request::ApplyWorkspaceEdit>(
2223 lsp::ApplyWorkspaceEditParams {
2224 label: None,
2225 edit: lsp::WorkspaceEdit {
2226 changes: Some(
2227 [(
2228 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2229 vec![lsp::TextEdit {
2230 range: lsp::Range::new(
2231 lsp::Position::new(0, 0),
2232 lsp::Position::new(0, 0),
2233 ),
2234 new_text: "X".into(),
2235 }],
2236 )]
2237 .into_iter()
2238 .collect(),
2239 ),
2240 ..Default::default()
2241 },
2242 },
2243 )
2244 .await
2245 .unwrap();
2246 Ok(Some(json!(null)))
2247 }
2248 }
2249 })
2250 .next()
2251 .await;
2252
2253 // Applying the code action returns a project transaction containing the edits
2254 // sent by the language server in its `workspaceEdit` request.
2255 let transaction = apply.await.unwrap();
2256 assert!(transaction.0.contains_key(&buffer));
2257 buffer.update(cx, |buffer, cx| {
2258 assert_eq!(buffer.text(), "Xa");
2259 buffer.undo(cx);
2260 assert_eq!(buffer.text(), "a");
2261 });
2262}
2263
2264#[gpui::test(iterations = 10)]
2265async fn test_save_file(cx: &mut gpui::TestAppContext) {
2266 init_test(cx);
2267
2268 let fs = FakeFs::new(cx.background());
2269 fs.insert_tree(
2270 "/dir",
2271 json!({
2272 "file1": "the old contents",
2273 }),
2274 )
2275 .await;
2276
2277 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2278 let buffer = project
2279 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2280 .await
2281 .unwrap();
2282 buffer.update(cx, |buffer, cx| {
2283 assert_eq!(buffer.text(), "the old contents");
2284 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2285 });
2286
2287 project
2288 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2289 .await
2290 .unwrap();
2291
2292 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2293 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2294}
2295
2296#[gpui::test]
2297async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2298 init_test(cx);
2299
2300 let fs = FakeFs::new(cx.background());
2301 fs.insert_tree(
2302 "/dir",
2303 json!({
2304 "file1": "the old contents",
2305 }),
2306 )
2307 .await;
2308
2309 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2310 let buffer = project
2311 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2312 .await
2313 .unwrap();
2314 buffer.update(cx, |buffer, cx| {
2315 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2316 });
2317
2318 project
2319 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2320 .await
2321 .unwrap();
2322
2323 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2324 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2325}
2326
2327#[gpui::test]
2328async fn test_save_as(cx: &mut gpui::TestAppContext) {
2329 init_test(cx);
2330
2331 let fs = FakeFs::new(cx.background());
2332 fs.insert_tree("/dir", json!({})).await;
2333
2334 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2335
2336 let languages = project.read_with(cx, |project, _| project.languages().clone());
2337 languages.register(
2338 "/some/path",
2339 LanguageConfig {
2340 name: "Rust".into(),
2341 path_suffixes: vec!["rs".into()],
2342 ..Default::default()
2343 },
2344 tree_sitter_rust::language(),
2345 vec![],
2346 |_| Default::default(),
2347 );
2348
2349 let buffer = project.update(cx, |project, cx| {
2350 project.create_buffer("", None, cx).unwrap()
2351 });
2352 buffer.update(cx, |buffer, cx| {
2353 buffer.edit([(0..0, "abc")], None, cx);
2354 assert!(buffer.is_dirty());
2355 assert!(!buffer.has_conflict());
2356 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2357 });
2358 project
2359 .update(cx, |project, cx| {
2360 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2361 })
2362 .await
2363 .unwrap();
2364 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2365
2366 cx.foreground().run_until_parked();
2367 buffer.read_with(cx, |buffer, cx| {
2368 assert_eq!(
2369 buffer.file().unwrap().full_path(cx),
2370 Path::new("dir/file1.rs")
2371 );
2372 assert!(!buffer.is_dirty());
2373 assert!(!buffer.has_conflict());
2374 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2375 });
2376
2377 let opened_buffer = project
2378 .update(cx, |project, cx| {
2379 project.open_local_buffer("/dir/file1.rs", cx)
2380 })
2381 .await
2382 .unwrap();
2383 assert_eq!(opened_buffer, buffer);
2384}
2385
2386#[gpui::test(retries = 5)]
2387async fn test_rescan_and_remote_updates(
2388 deterministic: Arc<Deterministic>,
2389 cx: &mut gpui::TestAppContext,
2390) {
2391 init_test(cx);
2392 cx.foreground().allow_parking();
2393
2394 let dir = temp_tree(json!({
2395 "a": {
2396 "file1": "",
2397 "file2": "",
2398 "file3": "",
2399 },
2400 "b": {
2401 "c": {
2402 "file4": "",
2403 "file5": "",
2404 }
2405 }
2406 }));
2407
2408 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2409 let rpc = project.read_with(cx, |p, _| p.client.clone());
2410
2411 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2412 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2413 async move { buffer.await.unwrap() }
2414 };
2415 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2416 project.read_with(cx, |project, cx| {
2417 let tree = project.worktrees(cx).next().unwrap();
2418 tree.read(cx)
2419 .entry_for_path(path)
2420 .unwrap_or_else(|| panic!("no entry for path {}", path))
2421 .id
2422 })
2423 };
2424
2425 let buffer2 = buffer_for_path("a/file2", cx).await;
2426 let buffer3 = buffer_for_path("a/file3", cx).await;
2427 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2428 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2429
2430 let file2_id = id_for_path("a/file2", cx);
2431 let file3_id = id_for_path("a/file3", cx);
2432 let file4_id = id_for_path("b/c/file4", cx);
2433
2434 // Create a remote copy of this worktree.
2435 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2436 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2437 let remote = cx.update(|cx| {
2438 Worktree::remote(
2439 1,
2440 1,
2441 proto::WorktreeMetadata {
2442 id: initial_snapshot.id().to_proto(),
2443 root_name: initial_snapshot.root_name().into(),
2444 abs_path: initial_snapshot
2445 .abs_path()
2446 .as_os_str()
2447 .to_string_lossy()
2448 .into(),
2449 visible: true,
2450 },
2451 rpc.clone(),
2452 cx,
2453 )
2454 });
2455 remote.update(cx, |remote, _| {
2456 let update = initial_snapshot.build_initial_update(1);
2457 remote.as_remote_mut().unwrap().update_from_remote(update);
2458 });
2459 deterministic.run_until_parked();
2460
2461 cx.read(|cx| {
2462 assert!(!buffer2.read(cx).is_dirty());
2463 assert!(!buffer3.read(cx).is_dirty());
2464 assert!(!buffer4.read(cx).is_dirty());
2465 assert!(!buffer5.read(cx).is_dirty());
2466 });
2467
2468 // Rename and delete files and directories.
2469 tree.flush_fs_events(cx).await;
2470 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2471 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2472 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2473 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2474 tree.flush_fs_events(cx).await;
2475
2476 let expected_paths = vec![
2477 "a",
2478 "a/file1",
2479 "a/file2.new",
2480 "b",
2481 "d",
2482 "d/file3",
2483 "d/file4",
2484 ];
2485
2486 cx.read(|app| {
2487 assert_eq!(
2488 tree.read(app)
2489 .paths()
2490 .map(|p| p.to_str().unwrap())
2491 .collect::<Vec<_>>(),
2492 expected_paths
2493 );
2494
2495 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2496 assert_eq!(id_for_path("d/file3", cx), file3_id);
2497 assert_eq!(id_for_path("d/file4", cx), file4_id);
2498
2499 assert_eq!(
2500 buffer2.read(app).file().unwrap().path().as_ref(),
2501 Path::new("a/file2.new")
2502 );
2503 assert_eq!(
2504 buffer3.read(app).file().unwrap().path().as_ref(),
2505 Path::new("d/file3")
2506 );
2507 assert_eq!(
2508 buffer4.read(app).file().unwrap().path().as_ref(),
2509 Path::new("d/file4")
2510 );
2511 assert_eq!(
2512 buffer5.read(app).file().unwrap().path().as_ref(),
2513 Path::new("b/c/file5")
2514 );
2515
2516 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2517 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2518 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2519 assert!(buffer5.read(app).file().unwrap().is_deleted());
2520 });
2521
2522 // Update the remote worktree. Check that it becomes consistent with the
2523 // local worktree.
2524 remote.update(cx, |remote, cx| {
2525 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2526 &initial_snapshot,
2527 1,
2528 1,
2529 true,
2530 );
2531 remote.as_remote_mut().unwrap().update_from_remote(update);
2532 });
2533 deterministic.run_until_parked();
2534 remote.read_with(cx, |remote, _| {
2535 assert_eq!(
2536 remote
2537 .paths()
2538 .map(|p| p.to_str().unwrap())
2539 .collect::<Vec<_>>(),
2540 expected_paths
2541 );
2542 });
2543}
2544
2545#[gpui::test(iterations = 10)]
2546async fn test_buffer_identity_across_renames(
2547 deterministic: Arc<Deterministic>,
2548 cx: &mut gpui::TestAppContext,
2549) {
2550 init_test(cx);
2551
2552 let fs = FakeFs::new(cx.background());
2553 fs.insert_tree(
2554 "/dir",
2555 json!({
2556 "a": {
2557 "file1": "",
2558 }
2559 }),
2560 )
2561 .await;
2562
2563 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2564 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2565 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2566
2567 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2568 project.read_with(cx, |project, cx| {
2569 let tree = project.worktrees(cx).next().unwrap();
2570 tree.read(cx)
2571 .entry_for_path(path)
2572 .unwrap_or_else(|| panic!("no entry for path {}", path))
2573 .id
2574 })
2575 };
2576
2577 let dir_id = id_for_path("a", cx);
2578 let file_id = id_for_path("a/file1", cx);
2579 let buffer = project
2580 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2581 .await
2582 .unwrap();
2583 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2584
2585 project
2586 .update(cx, |project, cx| {
2587 project.rename_entry(dir_id, Path::new("b"), cx)
2588 })
2589 .unwrap()
2590 .await
2591 .unwrap();
2592 deterministic.run_until_parked();
2593 assert_eq!(id_for_path("b", cx), dir_id);
2594 assert_eq!(id_for_path("b/file1", cx), file_id);
2595 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2596}
2597
2598#[gpui::test]
2599async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2600 init_test(cx);
2601
2602 let fs = FakeFs::new(cx.background());
2603 fs.insert_tree(
2604 "/dir",
2605 json!({
2606 "a.txt": "a-contents",
2607 "b.txt": "b-contents",
2608 }),
2609 )
2610 .await;
2611
2612 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2613
2614 // Spawn multiple tasks to open paths, repeating some paths.
2615 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2616 (
2617 p.open_local_buffer("/dir/a.txt", cx),
2618 p.open_local_buffer("/dir/b.txt", cx),
2619 p.open_local_buffer("/dir/a.txt", cx),
2620 )
2621 });
2622
2623 let buffer_a_1 = buffer_a_1.await.unwrap();
2624 let buffer_a_2 = buffer_a_2.await.unwrap();
2625 let buffer_b = buffer_b.await.unwrap();
2626 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2627 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2628
2629 // There is only one buffer per path.
2630 let buffer_a_id = buffer_a_1.id();
2631 assert_eq!(buffer_a_2.id(), buffer_a_id);
2632
2633 // Open the same path again while it is still open.
2634 drop(buffer_a_1);
2635 let buffer_a_3 = project
2636 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2637 .await
2638 .unwrap();
2639
2640 // There's still only one buffer per path.
2641 assert_eq!(buffer_a_3.id(), buffer_a_id);
2642}
2643
2644#[gpui::test]
2645async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2646 init_test(cx);
2647
2648 let fs = FakeFs::new(cx.background());
2649 fs.insert_tree(
2650 "/dir",
2651 json!({
2652 "file1": "abc",
2653 "file2": "def",
2654 "file3": "ghi",
2655 }),
2656 )
2657 .await;
2658
2659 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2660
2661 let buffer1 = project
2662 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2663 .await
2664 .unwrap();
2665 let events = Rc::new(RefCell::new(Vec::new()));
2666
2667 // initially, the buffer isn't dirty.
2668 buffer1.update(cx, |buffer, cx| {
2669 cx.subscribe(&buffer1, {
2670 let events = events.clone();
2671 move |_, _, event, _| match event {
2672 BufferEvent::Operation(_) => {}
2673 _ => events.borrow_mut().push(event.clone()),
2674 }
2675 })
2676 .detach();
2677
2678 assert!(!buffer.is_dirty());
2679 assert!(events.borrow().is_empty());
2680
2681 buffer.edit([(1..2, "")], None, cx);
2682 });
2683
2684 // after the first edit, the buffer is dirty, and emits a dirtied event.
2685 buffer1.update(cx, |buffer, cx| {
2686 assert!(buffer.text() == "ac");
2687 assert!(buffer.is_dirty());
2688 assert_eq!(
2689 *events.borrow(),
2690 &[language::Event::Edited, language::Event::DirtyChanged]
2691 );
2692 events.borrow_mut().clear();
2693 buffer.did_save(
2694 buffer.version(),
2695 buffer.as_rope().fingerprint(),
2696 buffer.file().unwrap().mtime(),
2697 cx,
2698 );
2699 });
2700
2701 // after saving, the buffer is not dirty, and emits a saved event.
2702 buffer1.update(cx, |buffer, cx| {
2703 assert!(!buffer.is_dirty());
2704 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2705 events.borrow_mut().clear();
2706
2707 buffer.edit([(1..1, "B")], None, cx);
2708 buffer.edit([(2..2, "D")], None, cx);
2709 });
2710
2711 // after editing again, the buffer is dirty, and emits another dirty event.
2712 buffer1.update(cx, |buffer, cx| {
2713 assert!(buffer.text() == "aBDc");
2714 assert!(buffer.is_dirty());
2715 assert_eq!(
2716 *events.borrow(),
2717 &[
2718 language::Event::Edited,
2719 language::Event::DirtyChanged,
2720 language::Event::Edited,
2721 ],
2722 );
2723 events.borrow_mut().clear();
2724
2725 // After restoring the buffer to its previously-saved state,
2726 // the buffer is not considered dirty anymore.
2727 buffer.edit([(1..3, "")], None, cx);
2728 assert!(buffer.text() == "ac");
2729 assert!(!buffer.is_dirty());
2730 });
2731
2732 assert_eq!(
2733 *events.borrow(),
2734 &[language::Event::Edited, language::Event::DirtyChanged]
2735 );
2736
2737 // When a file is deleted, the buffer is considered dirty.
2738 let events = Rc::new(RefCell::new(Vec::new()));
2739 let buffer2 = project
2740 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2741 .await
2742 .unwrap();
2743 buffer2.update(cx, |_, cx| {
2744 cx.subscribe(&buffer2, {
2745 let events = events.clone();
2746 move |_, _, event, _| events.borrow_mut().push(event.clone())
2747 })
2748 .detach();
2749 });
2750
2751 fs.remove_file("/dir/file2".as_ref(), Default::default())
2752 .await
2753 .unwrap();
2754 cx.foreground().run_until_parked();
2755 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2756 assert_eq!(
2757 *events.borrow(),
2758 &[
2759 language::Event::DirtyChanged,
2760 language::Event::FileHandleChanged
2761 ]
2762 );
2763
2764 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2765 let events = Rc::new(RefCell::new(Vec::new()));
2766 let buffer3 = project
2767 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2768 .await
2769 .unwrap();
2770 buffer3.update(cx, |_, cx| {
2771 cx.subscribe(&buffer3, {
2772 let events = events.clone();
2773 move |_, _, event, _| events.borrow_mut().push(event.clone())
2774 })
2775 .detach();
2776 });
2777
2778 buffer3.update(cx, |buffer, cx| {
2779 buffer.edit([(0..0, "x")], None, cx);
2780 });
2781 events.borrow_mut().clear();
2782 fs.remove_file("/dir/file3".as_ref(), Default::default())
2783 .await
2784 .unwrap();
2785 cx.foreground().run_until_parked();
2786 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2787 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2788}
2789
2790#[gpui::test]
2791async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2792 init_test(cx);
2793
2794 let initial_contents = "aaa\nbbbbb\nc\n";
2795 let fs = FakeFs::new(cx.background());
2796 fs.insert_tree(
2797 "/dir",
2798 json!({
2799 "the-file": initial_contents,
2800 }),
2801 )
2802 .await;
2803 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2804 let buffer = project
2805 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2806 .await
2807 .unwrap();
2808
2809 let anchors = (0..3)
2810 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2811 .collect::<Vec<_>>();
2812
2813 // Change the file on disk, adding two new lines of text, and removing
2814 // one line.
2815 buffer.read_with(cx, |buffer, _| {
2816 assert!(!buffer.is_dirty());
2817 assert!(!buffer.has_conflict());
2818 });
2819 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2820 fs.save(
2821 "/dir/the-file".as_ref(),
2822 &new_contents.into(),
2823 LineEnding::Unix,
2824 )
2825 .await
2826 .unwrap();
2827
2828 // Because the buffer was not modified, it is reloaded from disk. Its
2829 // contents are edited according to the diff between the old and new
2830 // file contents.
2831 cx.foreground().run_until_parked();
2832 buffer.update(cx, |buffer, _| {
2833 assert_eq!(buffer.text(), new_contents);
2834 assert!(!buffer.is_dirty());
2835 assert!(!buffer.has_conflict());
2836
2837 let anchor_positions = anchors
2838 .iter()
2839 .map(|anchor| anchor.to_point(&*buffer))
2840 .collect::<Vec<_>>();
2841 assert_eq!(
2842 anchor_positions,
2843 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2844 );
2845 });
2846
2847 // Modify the buffer
2848 buffer.update(cx, |buffer, cx| {
2849 buffer.edit([(0..0, " ")], None, cx);
2850 assert!(buffer.is_dirty());
2851 assert!(!buffer.has_conflict());
2852 });
2853
2854 // Change the file on disk again, adding blank lines to the beginning.
2855 fs.save(
2856 "/dir/the-file".as_ref(),
2857 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2858 LineEnding::Unix,
2859 )
2860 .await
2861 .unwrap();
2862
2863 // Because the buffer is modified, it doesn't reload from disk, but is
2864 // marked as having a conflict.
2865 cx.foreground().run_until_parked();
2866 buffer.read_with(cx, |buffer, _| {
2867 assert!(buffer.has_conflict());
2868 });
2869}
2870
2871#[gpui::test]
2872async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2873 init_test(cx);
2874
2875 let fs = FakeFs::new(cx.background());
2876 fs.insert_tree(
2877 "/dir",
2878 json!({
2879 "file1": "a\nb\nc\n",
2880 "file2": "one\r\ntwo\r\nthree\r\n",
2881 }),
2882 )
2883 .await;
2884
2885 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2886 let buffer1 = project
2887 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2888 .await
2889 .unwrap();
2890 let buffer2 = project
2891 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2892 .await
2893 .unwrap();
2894
2895 buffer1.read_with(cx, |buffer, _| {
2896 assert_eq!(buffer.text(), "a\nb\nc\n");
2897 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2898 });
2899 buffer2.read_with(cx, |buffer, _| {
2900 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2901 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2902 });
2903
2904 // Change a file's line endings on disk from unix to windows. The buffer's
2905 // state updates correctly.
2906 fs.save(
2907 "/dir/file1".as_ref(),
2908 &"aaa\nb\nc\n".into(),
2909 LineEnding::Windows,
2910 )
2911 .await
2912 .unwrap();
2913 cx.foreground().run_until_parked();
2914 buffer1.read_with(cx, |buffer, _| {
2915 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2916 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2917 });
2918
2919 // Save a file with windows line endings. The file is written correctly.
2920 buffer2.update(cx, |buffer, cx| {
2921 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2922 });
2923 project
2924 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
2925 .await
2926 .unwrap();
2927 assert_eq!(
2928 fs.load("/dir/file2".as_ref()).await.unwrap(),
2929 "one\r\ntwo\r\nthree\r\nfour\r\n",
2930 );
2931}
2932
2933#[gpui::test]
2934async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2935 init_test(cx);
2936
2937 let fs = FakeFs::new(cx.background());
2938 fs.insert_tree(
2939 "/the-dir",
2940 json!({
2941 "a.rs": "
2942 fn foo(mut v: Vec<usize>) {
2943 for x in &v {
2944 v.push(1);
2945 }
2946 }
2947 "
2948 .unindent(),
2949 }),
2950 )
2951 .await;
2952
2953 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2954 let buffer = project
2955 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2956 .await
2957 .unwrap();
2958
2959 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2960 let message = lsp::PublishDiagnosticsParams {
2961 uri: buffer_uri.clone(),
2962 diagnostics: vec![
2963 lsp::Diagnostic {
2964 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2965 severity: Some(DiagnosticSeverity::WARNING),
2966 message: "error 1".to_string(),
2967 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2968 location: lsp::Location {
2969 uri: buffer_uri.clone(),
2970 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2971 },
2972 message: "error 1 hint 1".to_string(),
2973 }]),
2974 ..Default::default()
2975 },
2976 lsp::Diagnostic {
2977 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2978 severity: Some(DiagnosticSeverity::HINT),
2979 message: "error 1 hint 1".to_string(),
2980 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2981 location: lsp::Location {
2982 uri: buffer_uri.clone(),
2983 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2984 },
2985 message: "original diagnostic".to_string(),
2986 }]),
2987 ..Default::default()
2988 },
2989 lsp::Diagnostic {
2990 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2991 severity: Some(DiagnosticSeverity::ERROR),
2992 message: "error 2".to_string(),
2993 related_information: Some(vec![
2994 lsp::DiagnosticRelatedInformation {
2995 location: lsp::Location {
2996 uri: buffer_uri.clone(),
2997 range: lsp::Range::new(
2998 lsp::Position::new(1, 13),
2999 lsp::Position::new(1, 15),
3000 ),
3001 },
3002 message: "error 2 hint 1".to_string(),
3003 },
3004 lsp::DiagnosticRelatedInformation {
3005 location: lsp::Location {
3006 uri: buffer_uri.clone(),
3007 range: lsp::Range::new(
3008 lsp::Position::new(1, 13),
3009 lsp::Position::new(1, 15),
3010 ),
3011 },
3012 message: "error 2 hint 2".to_string(),
3013 },
3014 ]),
3015 ..Default::default()
3016 },
3017 lsp::Diagnostic {
3018 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3019 severity: Some(DiagnosticSeverity::HINT),
3020 message: "error 2 hint 1".to_string(),
3021 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3022 location: lsp::Location {
3023 uri: buffer_uri.clone(),
3024 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3025 },
3026 message: "original diagnostic".to_string(),
3027 }]),
3028 ..Default::default()
3029 },
3030 lsp::Diagnostic {
3031 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3032 severity: Some(DiagnosticSeverity::HINT),
3033 message: "error 2 hint 2".to_string(),
3034 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3035 location: lsp::Location {
3036 uri: buffer_uri,
3037 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3038 },
3039 message: "original diagnostic".to_string(),
3040 }]),
3041 ..Default::default()
3042 },
3043 ],
3044 version: None,
3045 };
3046
3047 project
3048 .update(cx, |p, cx| {
3049 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3050 })
3051 .unwrap();
3052 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3053
3054 assert_eq!(
3055 buffer
3056 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3057 .collect::<Vec<_>>(),
3058 &[
3059 DiagnosticEntry {
3060 range: Point::new(1, 8)..Point::new(1, 9),
3061 diagnostic: Diagnostic {
3062 severity: DiagnosticSeverity::WARNING,
3063 message: "error 1".to_string(),
3064 group_id: 1,
3065 is_primary: true,
3066 ..Default::default()
3067 }
3068 },
3069 DiagnosticEntry {
3070 range: Point::new(1, 8)..Point::new(1, 9),
3071 diagnostic: Diagnostic {
3072 severity: DiagnosticSeverity::HINT,
3073 message: "error 1 hint 1".to_string(),
3074 group_id: 1,
3075 is_primary: false,
3076 ..Default::default()
3077 }
3078 },
3079 DiagnosticEntry {
3080 range: Point::new(1, 13)..Point::new(1, 15),
3081 diagnostic: Diagnostic {
3082 severity: DiagnosticSeverity::HINT,
3083 message: "error 2 hint 1".to_string(),
3084 group_id: 0,
3085 is_primary: false,
3086 ..Default::default()
3087 }
3088 },
3089 DiagnosticEntry {
3090 range: Point::new(1, 13)..Point::new(1, 15),
3091 diagnostic: Diagnostic {
3092 severity: DiagnosticSeverity::HINT,
3093 message: "error 2 hint 2".to_string(),
3094 group_id: 0,
3095 is_primary: false,
3096 ..Default::default()
3097 }
3098 },
3099 DiagnosticEntry {
3100 range: Point::new(2, 8)..Point::new(2, 17),
3101 diagnostic: Diagnostic {
3102 severity: DiagnosticSeverity::ERROR,
3103 message: "error 2".to_string(),
3104 group_id: 0,
3105 is_primary: true,
3106 ..Default::default()
3107 }
3108 }
3109 ]
3110 );
3111
3112 assert_eq!(
3113 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3114 &[
3115 DiagnosticEntry {
3116 range: Point::new(1, 13)..Point::new(1, 15),
3117 diagnostic: Diagnostic {
3118 severity: DiagnosticSeverity::HINT,
3119 message: "error 2 hint 1".to_string(),
3120 group_id: 0,
3121 is_primary: false,
3122 ..Default::default()
3123 }
3124 },
3125 DiagnosticEntry {
3126 range: Point::new(1, 13)..Point::new(1, 15),
3127 diagnostic: Diagnostic {
3128 severity: DiagnosticSeverity::HINT,
3129 message: "error 2 hint 2".to_string(),
3130 group_id: 0,
3131 is_primary: false,
3132 ..Default::default()
3133 }
3134 },
3135 DiagnosticEntry {
3136 range: Point::new(2, 8)..Point::new(2, 17),
3137 diagnostic: Diagnostic {
3138 severity: DiagnosticSeverity::ERROR,
3139 message: "error 2".to_string(),
3140 group_id: 0,
3141 is_primary: true,
3142 ..Default::default()
3143 }
3144 }
3145 ]
3146 );
3147
3148 assert_eq!(
3149 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3150 &[
3151 DiagnosticEntry {
3152 range: Point::new(1, 8)..Point::new(1, 9),
3153 diagnostic: Diagnostic {
3154 severity: DiagnosticSeverity::WARNING,
3155 message: "error 1".to_string(),
3156 group_id: 1,
3157 is_primary: true,
3158 ..Default::default()
3159 }
3160 },
3161 DiagnosticEntry {
3162 range: Point::new(1, 8)..Point::new(1, 9),
3163 diagnostic: Diagnostic {
3164 severity: DiagnosticSeverity::HINT,
3165 message: "error 1 hint 1".to_string(),
3166 group_id: 1,
3167 is_primary: false,
3168 ..Default::default()
3169 }
3170 },
3171 ]
3172 );
3173}
3174
3175#[gpui::test]
3176async fn test_rename(cx: &mut gpui::TestAppContext) {
3177 init_test(cx);
3178
3179 let mut language = Language::new(
3180 LanguageConfig {
3181 name: "Rust".into(),
3182 path_suffixes: vec!["rs".to_string()],
3183 ..Default::default()
3184 },
3185 Some(tree_sitter_rust::language()),
3186 );
3187 let mut fake_servers = language
3188 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3189 capabilities: lsp::ServerCapabilities {
3190 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3191 prepare_provider: Some(true),
3192 work_done_progress_options: Default::default(),
3193 })),
3194 ..Default::default()
3195 },
3196 ..Default::default()
3197 }))
3198 .await;
3199
3200 let fs = FakeFs::new(cx.background());
3201 fs.insert_tree(
3202 "/dir",
3203 json!({
3204 "one.rs": "const ONE: usize = 1;",
3205 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3206 }),
3207 )
3208 .await;
3209
3210 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3211 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3212 let buffer = project
3213 .update(cx, |project, cx| {
3214 project.open_local_buffer("/dir/one.rs", cx)
3215 })
3216 .await
3217 .unwrap();
3218
3219 let fake_server = fake_servers.next().await.unwrap();
3220
3221 let response = project.update(cx, |project, cx| {
3222 project.prepare_rename(buffer.clone(), 7, cx)
3223 });
3224 fake_server
3225 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3226 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3227 assert_eq!(params.position, lsp::Position::new(0, 7));
3228 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3229 lsp::Position::new(0, 6),
3230 lsp::Position::new(0, 9),
3231 ))))
3232 })
3233 .next()
3234 .await
3235 .unwrap();
3236 let range = response.await.unwrap().unwrap();
3237 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3238 assert_eq!(range, 6..9);
3239
3240 let response = project.update(cx, |project, cx| {
3241 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3242 });
3243 fake_server
3244 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3245 assert_eq!(
3246 params.text_document_position.text_document.uri.as_str(),
3247 "file:///dir/one.rs"
3248 );
3249 assert_eq!(
3250 params.text_document_position.position,
3251 lsp::Position::new(0, 7)
3252 );
3253 assert_eq!(params.new_name, "THREE");
3254 Ok(Some(lsp::WorkspaceEdit {
3255 changes: Some(
3256 [
3257 (
3258 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3259 vec![lsp::TextEdit::new(
3260 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3261 "THREE".to_string(),
3262 )],
3263 ),
3264 (
3265 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3266 vec![
3267 lsp::TextEdit::new(
3268 lsp::Range::new(
3269 lsp::Position::new(0, 24),
3270 lsp::Position::new(0, 27),
3271 ),
3272 "THREE".to_string(),
3273 ),
3274 lsp::TextEdit::new(
3275 lsp::Range::new(
3276 lsp::Position::new(0, 35),
3277 lsp::Position::new(0, 38),
3278 ),
3279 "THREE".to_string(),
3280 ),
3281 ],
3282 ),
3283 ]
3284 .into_iter()
3285 .collect(),
3286 ),
3287 ..Default::default()
3288 }))
3289 })
3290 .next()
3291 .await
3292 .unwrap();
3293 let mut transaction = response.await.unwrap().0;
3294 assert_eq!(transaction.len(), 2);
3295 assert_eq!(
3296 transaction
3297 .remove_entry(&buffer)
3298 .unwrap()
3299 .0
3300 .read_with(cx, |buffer, _| buffer.text()),
3301 "const THREE: usize = 1;"
3302 );
3303 assert_eq!(
3304 transaction
3305 .into_keys()
3306 .next()
3307 .unwrap()
3308 .read_with(cx, |buffer, _| buffer.text()),
3309 "const TWO: usize = one::THREE + one::THREE;"
3310 );
3311}
3312
3313#[gpui::test]
3314async fn test_search(cx: &mut gpui::TestAppContext) {
3315 init_test(cx);
3316
3317 let fs = FakeFs::new(cx.background());
3318 fs.insert_tree(
3319 "/dir",
3320 json!({
3321 "one.rs": "const ONE: usize = 1;",
3322 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3323 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3324 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3325 }),
3326 )
3327 .await;
3328 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3329 assert_eq!(
3330 search(
3331 &project,
3332 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3333 cx
3334 )
3335 .await
3336 .unwrap(),
3337 HashMap::from_iter([
3338 ("two.rs".to_string(), vec![6..9]),
3339 ("three.rs".to_string(), vec![37..40])
3340 ])
3341 );
3342
3343 let buffer_4 = project
3344 .update(cx, |project, cx| {
3345 project.open_local_buffer("/dir/four.rs", cx)
3346 })
3347 .await
3348 .unwrap();
3349 buffer_4.update(cx, |buffer, cx| {
3350 let text = "two::TWO";
3351 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3352 });
3353
3354 assert_eq!(
3355 search(
3356 &project,
3357 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3358 cx
3359 )
3360 .await
3361 .unwrap(),
3362 HashMap::from_iter([
3363 ("two.rs".to_string(), vec![6..9]),
3364 ("three.rs".to_string(), vec![37..40]),
3365 ("four.rs".to_string(), vec![25..28, 36..39])
3366 ])
3367 );
3368}
3369
3370#[gpui::test]
3371async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3372 init_test(cx);
3373
3374 let search_query = "file";
3375
3376 let fs = FakeFs::new(cx.background());
3377 fs.insert_tree(
3378 "/dir",
3379 json!({
3380 "one.rs": r#"// Rust file one"#,
3381 "one.ts": r#"// TypeScript file one"#,
3382 "two.rs": r#"// Rust file two"#,
3383 "two.ts": r#"// TypeScript file two"#,
3384 }),
3385 )
3386 .await;
3387 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3388
3389 assert!(
3390 search(
3391 &project,
3392 SearchQuery::text(
3393 search_query,
3394 false,
3395 true,
3396 vec![glob::Pattern::new("*.odd").unwrap()],
3397 Vec::new()
3398 ),
3399 cx
3400 )
3401 .await
3402 .unwrap()
3403 .is_empty(),
3404 "If no inclusions match, no files should be returned"
3405 );
3406
3407 assert_eq!(
3408 search(
3409 &project,
3410 SearchQuery::text(
3411 search_query,
3412 false,
3413 true,
3414 vec![glob::Pattern::new("*.rs").unwrap()],
3415 Vec::new()
3416 ),
3417 cx
3418 )
3419 .await
3420 .unwrap(),
3421 HashMap::from_iter([
3422 ("one.rs".to_string(), vec![8..12]),
3423 ("two.rs".to_string(), vec![8..12]),
3424 ]),
3425 "Rust only search should give only Rust files"
3426 );
3427
3428 assert_eq!(
3429 search(
3430 &project,
3431 SearchQuery::text(
3432 search_query,
3433 false,
3434 true,
3435 vec![
3436 glob::Pattern::new("*.ts").unwrap(),
3437 glob::Pattern::new("*.odd").unwrap(),
3438 ],
3439 Vec::new()
3440 ),
3441 cx
3442 )
3443 .await
3444 .unwrap(),
3445 HashMap::from_iter([
3446 ("one.ts".to_string(), vec![14..18]),
3447 ("two.ts".to_string(), vec![14..18]),
3448 ]),
3449 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3450 );
3451
3452 assert_eq!(
3453 search(
3454 &project,
3455 SearchQuery::text(
3456 search_query,
3457 false,
3458 true,
3459 vec![
3460 glob::Pattern::new("*.rs").unwrap(),
3461 glob::Pattern::new("*.ts").unwrap(),
3462 glob::Pattern::new("*.odd").unwrap(),
3463 ],
3464 Vec::new()
3465 ),
3466 cx
3467 )
3468 .await
3469 .unwrap(),
3470 HashMap::from_iter([
3471 ("one.rs".to_string(), vec![8..12]),
3472 ("one.ts".to_string(), vec![14..18]),
3473 ("two.rs".to_string(), vec![8..12]),
3474 ("two.ts".to_string(), vec![14..18]),
3475 ]),
3476 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3477 );
3478}
3479
3480#[gpui::test]
3481async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3482 init_test(cx);
3483
3484 let search_query = "file";
3485
3486 let fs = FakeFs::new(cx.background());
3487 fs.insert_tree(
3488 "/dir",
3489 json!({
3490 "one.rs": r#"// Rust file one"#,
3491 "one.ts": r#"// TypeScript file one"#,
3492 "two.rs": r#"// Rust file two"#,
3493 "two.ts": r#"// TypeScript file two"#,
3494 }),
3495 )
3496 .await;
3497 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3498
3499 assert_eq!(
3500 search(
3501 &project,
3502 SearchQuery::text(
3503 search_query,
3504 false,
3505 true,
3506 Vec::new(),
3507 vec![glob::Pattern::new("*.odd").unwrap()],
3508 ),
3509 cx
3510 )
3511 .await
3512 .unwrap(),
3513 HashMap::from_iter([
3514 ("one.rs".to_string(), vec![8..12]),
3515 ("one.ts".to_string(), vec![14..18]),
3516 ("two.rs".to_string(), vec![8..12]),
3517 ("two.ts".to_string(), vec![14..18]),
3518 ]),
3519 "If no exclusions match, all files should be returned"
3520 );
3521
3522 assert_eq!(
3523 search(
3524 &project,
3525 SearchQuery::text(
3526 search_query,
3527 false,
3528 true,
3529 Vec::new(),
3530 vec![glob::Pattern::new("*.rs").unwrap()],
3531 ),
3532 cx
3533 )
3534 .await
3535 .unwrap(),
3536 HashMap::from_iter([
3537 ("one.ts".to_string(), vec![14..18]),
3538 ("two.ts".to_string(), vec![14..18]),
3539 ]),
3540 "Rust exclusion search should give only TypeScript files"
3541 );
3542
3543 assert_eq!(
3544 search(
3545 &project,
3546 SearchQuery::text(
3547 search_query,
3548 false,
3549 true,
3550 Vec::new(),
3551 vec![
3552 glob::Pattern::new("*.ts").unwrap(),
3553 glob::Pattern::new("*.odd").unwrap(),
3554 ],
3555 ),
3556 cx
3557 )
3558 .await
3559 .unwrap(),
3560 HashMap::from_iter([
3561 ("one.rs".to_string(), vec![8..12]),
3562 ("two.rs".to_string(), vec![8..12]),
3563 ]),
3564 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3565 );
3566
3567 assert!(
3568 search(
3569 &project,
3570 SearchQuery::text(
3571 search_query,
3572 false,
3573 true,
3574 Vec::new(),
3575 vec![
3576 glob::Pattern::new("*.rs").unwrap(),
3577 glob::Pattern::new("*.ts").unwrap(),
3578 glob::Pattern::new("*.odd").unwrap(),
3579 ],
3580 ),
3581 cx
3582 )
3583 .await
3584 .unwrap().is_empty(),
3585 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3586 );
3587}
3588
3589#[gpui::test]
3590async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3591 init_test(cx);
3592
3593 let search_query = "file";
3594
3595 let fs = FakeFs::new(cx.background());
3596 fs.insert_tree(
3597 "/dir",
3598 json!({
3599 "one.rs": r#"// Rust file one"#,
3600 "one.ts": r#"// TypeScript file one"#,
3601 "two.rs": r#"// Rust file two"#,
3602 "two.ts": r#"// TypeScript file two"#,
3603 }),
3604 )
3605 .await;
3606 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3607
3608 assert!(
3609 search(
3610 &project,
3611 SearchQuery::text(
3612 search_query,
3613 false,
3614 true,
3615 vec![glob::Pattern::new("*.odd").unwrap()],
3616 vec![glob::Pattern::new("*.odd").unwrap()],
3617 ),
3618 cx
3619 )
3620 .await
3621 .unwrap()
3622 .is_empty(),
3623 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3624 );
3625
3626 assert!(
3627 search(
3628 &project,
3629 SearchQuery::text(
3630 search_query,
3631 false,
3632 true,
3633 vec![glob::Pattern::new("*.ts").unwrap()],
3634 vec![glob::Pattern::new("*.ts").unwrap()],
3635 ),
3636 cx
3637 )
3638 .await
3639 .unwrap()
3640 .is_empty(),
3641 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3642 );
3643
3644 assert!(
3645 search(
3646 &project,
3647 SearchQuery::text(
3648 search_query,
3649 false,
3650 true,
3651 vec![
3652 glob::Pattern::new("*.ts").unwrap(),
3653 glob::Pattern::new("*.odd").unwrap()
3654 ],
3655 vec![
3656 glob::Pattern::new("*.ts").unwrap(),
3657 glob::Pattern::new("*.odd").unwrap()
3658 ],
3659 ),
3660 cx
3661 )
3662 .await
3663 .unwrap()
3664 .is_empty(),
3665 "Non-matching inclusions and exclusions should not change that."
3666 );
3667
3668 assert_eq!(
3669 search(
3670 &project,
3671 SearchQuery::text(
3672 search_query,
3673 false,
3674 true,
3675 vec![
3676 glob::Pattern::new("*.ts").unwrap(),
3677 glob::Pattern::new("*.odd").unwrap()
3678 ],
3679 vec![
3680 glob::Pattern::new("*.rs").unwrap(),
3681 glob::Pattern::new("*.odd").unwrap()
3682 ],
3683 ),
3684 cx
3685 )
3686 .await
3687 .unwrap(),
3688 HashMap::from_iter([
3689 ("one.ts".to_string(), vec![14..18]),
3690 ("two.ts".to_string(), vec![14..18]),
3691 ]),
3692 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3693 );
3694}
3695
3696async fn search(
3697 project: &ModelHandle<Project>,
3698 query: SearchQuery,
3699 cx: &mut gpui::TestAppContext,
3700) -> Result<HashMap<String, Vec<Range<usize>>>> {
3701 let results = project
3702 .update(cx, |project, cx| project.search(query, cx))
3703 .await?;
3704
3705 Ok(results
3706 .into_iter()
3707 .map(|(buffer, ranges)| {
3708 buffer.read_with(cx, |buffer, _| {
3709 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3710 let ranges = ranges
3711 .into_iter()
3712 .map(|range| range.to_offset(buffer))
3713 .collect::<Vec<_>>();
3714 (path, ranges)
3715 })
3716 })
3717 .collect())
3718}
3719
3720fn init_test(cx: &mut gpui::TestAppContext) {
3721 cx.foreground().forbid_parking();
3722
3723 cx.update(|cx| {
3724 cx.set_global(SettingsStore::test(cx));
3725 language::init(cx);
3726 Project::init_settings(cx);
3727 });
3728}