1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_language_servers(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-rust-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102 let mut fake_json_servers = json_language
103 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
104 name: "the-json-language-server",
105 capabilities: lsp::ServerCapabilities {
106 completion_provider: Some(lsp::CompletionOptions {
107 trigger_characters: Some(vec![":".to_string()]),
108 ..Default::default()
109 }),
110 ..Default::default()
111 },
112 ..Default::default()
113 }))
114 .await;
115
116 let fs = FakeFs::new(cx.background());
117 fs.insert_tree(
118 "/the-root",
119 json!({
120 "test.rs": "const A: i32 = 1;",
121 "test2.rs": "",
122 "Cargo.toml": "a = 1",
123 "package.json": "{\"a\": 1}",
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129
130 // Open a buffer without an associated language server.
131 let toml_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/Cargo.toml", cx)
134 })
135 .await
136 .unwrap();
137
138 // Open a buffer with an associated language server before the language for it has been loaded.
139 let rust_buffer = project
140 .update(cx, |project, cx| {
141 project.open_local_buffer("/the-root/test.rs", cx)
142 })
143 .await
144 .unwrap();
145 rust_buffer.read_with(cx, |buffer, _| {
146 assert_eq!(buffer.language().map(|l| l.name()), None);
147 });
148
149 // Now we add the languages to the project, and ensure they get assigned to all
150 // the relevant open buffers.
151 project.update(cx, |project, _| {
152 project.languages.add(Arc::new(json_language));
153 project.languages.add(Arc::new(rust_language));
154 });
155 deterministic.run_until_parked();
156 rust_buffer.read_with(cx, |buffer, _| {
157 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
158 });
159
160 // A server is started up, and it is notified about Rust files.
161 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
162 assert_eq!(
163 fake_rust_server
164 .receive_notification::<lsp::notification::DidOpenTextDocument>()
165 .await
166 .text_document,
167 lsp::TextDocumentItem {
168 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
169 version: 0,
170 text: "const A: i32 = 1;".to_string(),
171 language_id: Default::default()
172 }
173 );
174
175 // The buffer is configured based on the language server's capabilities.
176 rust_buffer.read_with(cx, |buffer, _| {
177 assert_eq!(
178 buffer.completion_triggers(),
179 &[".".to_string(), "::".to_string()]
180 );
181 });
182 toml_buffer.read_with(cx, |buffer, _| {
183 assert!(buffer.completion_triggers().is_empty());
184 });
185
186 // Edit a buffer. The changes are reported to the language server.
187 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
188 assert_eq!(
189 fake_rust_server
190 .receive_notification::<lsp::notification::DidChangeTextDocument>()
191 .await
192 .text_document,
193 lsp::VersionedTextDocumentIdentifier::new(
194 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
195 1
196 )
197 );
198
199 // Open a third buffer with a different associated language server.
200 let json_buffer = project
201 .update(cx, |project, cx| {
202 project.open_local_buffer("/the-root/package.json", cx)
203 })
204 .await
205 .unwrap();
206
207 // A json language server is started up and is only notified about the json buffer.
208 let mut fake_json_server = fake_json_servers.next().await.unwrap();
209 assert_eq!(
210 fake_json_server
211 .receive_notification::<lsp::notification::DidOpenTextDocument>()
212 .await
213 .text_document,
214 lsp::TextDocumentItem {
215 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
216 version: 0,
217 text: "{\"a\": 1}".to_string(),
218 language_id: Default::default()
219 }
220 );
221
222 // This buffer is configured based on the second language server's
223 // capabilities.
224 json_buffer.read_with(cx, |buffer, _| {
225 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
226 });
227
228 // When opening another buffer whose language server is already running,
229 // it is also configured based on the existing language server's capabilities.
230 let rust_buffer2 = project
231 .update(cx, |project, cx| {
232 project.open_local_buffer("/the-root/test2.rs", cx)
233 })
234 .await
235 .unwrap();
236 rust_buffer2.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242
243 // Changes are reported only to servers matching the buffer's language.
244 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
245 rust_buffer2.update(cx, |buffer, cx| {
246 buffer.edit([(0..0, "let x = 1;")], None, cx)
247 });
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
255 1
256 )
257 );
258
259 // Save notifications are reported to all servers.
260 project
261 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
262 .await
263 .unwrap();
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidSaveTextDocument>()
267 .await
268 .text_document,
269 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
270 );
271 assert_eq!(
272 fake_json_server
273 .receive_notification::<lsp::notification::DidSaveTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
277 );
278
279 // Renames are reported only to servers matching the buffer's language.
280 fs.rename(
281 Path::new("/the-root/test2.rs"),
282 Path::new("/the-root/test3.rs"),
283 Default::default(),
284 )
285 .await
286 .unwrap();
287 assert_eq!(
288 fake_rust_server
289 .receive_notification::<lsp::notification::DidCloseTextDocument>()
290 .await
291 .text_document,
292 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
293 );
294 assert_eq!(
295 fake_rust_server
296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
297 .await
298 .text_document,
299 lsp::TextDocumentItem {
300 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
301 version: 0,
302 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
303 language_id: Default::default()
304 },
305 );
306
307 rust_buffer2.update(cx, |buffer, cx| {
308 buffer.update_diagnostics(
309 LanguageServerId(0),
310 DiagnosticSet::from_sorted_entries(
311 vec![DiagnosticEntry {
312 diagnostic: Default::default(),
313 range: Anchor::MIN..Anchor::MAX,
314 }],
315 &buffer.snapshot(),
316 ),
317 cx,
318 );
319 assert_eq!(
320 buffer
321 .snapshot()
322 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
323 .count(),
324 1
325 );
326 });
327
328 // When the rename changes the extension of the file, the buffer gets closed on the old
329 // language server and gets opened on the new one.
330 fs.rename(
331 Path::new("/the-root/test3.rs"),
332 Path::new("/the-root/test3.json"),
333 Default::default(),
334 )
335 .await
336 .unwrap();
337 assert_eq!(
338 fake_rust_server
339 .receive_notification::<lsp::notification::DidCloseTextDocument>()
340 .await
341 .text_document,
342 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
343 );
344 assert_eq!(
345 fake_json_server
346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
347 .await
348 .text_document,
349 lsp::TextDocumentItem {
350 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
351 version: 0,
352 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
353 language_id: Default::default()
354 },
355 );
356
357 // We clear the diagnostics, since the language has changed.
358 rust_buffer2.read_with(cx, |buffer, _| {
359 assert_eq!(
360 buffer
361 .snapshot()
362 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
363 .count(),
364 0
365 );
366 });
367
368 // The renamed file's version resets after changing language server.
369 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
370 assert_eq!(
371 fake_json_server
372 .receive_notification::<lsp::notification::DidChangeTextDocument>()
373 .await
374 .text_document,
375 lsp::VersionedTextDocumentIdentifier::new(
376 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
377 1
378 )
379 );
380
381 // Restart language servers
382 project.update(cx, |project, cx| {
383 project.restart_language_servers_for_buffers(
384 vec![rust_buffer.clone(), json_buffer.clone()],
385 cx,
386 );
387 });
388
389 let mut rust_shutdown_requests = fake_rust_server
390 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
391 let mut json_shutdown_requests = fake_json_server
392 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
393 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
394
395 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
396 let mut fake_json_server = fake_json_servers.next().await.unwrap();
397
398 // Ensure rust document is reopened in new rust language server
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
406 version: 0,
407 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 }
410 );
411
412 // Ensure json documents are reopened in new json language server
413 assert_set_eq!(
414 [
415 fake_json_server
416 .receive_notification::<lsp::notification::DidOpenTextDocument>()
417 .await
418 .text_document,
419 fake_json_server
420 .receive_notification::<lsp::notification::DidOpenTextDocument>()
421 .await
422 .text_document,
423 ],
424 [
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
427 version: 0,
428 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 0,
434 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 }
437 ]
438 );
439
440 // Close notifications are reported only to servers matching the buffer's language.
441 cx.update(|_| drop(json_buffer));
442 let close_message = lsp::DidCloseTextDocumentParams {
443 text_document: lsp::TextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 ),
446 };
447 assert_eq!(
448 fake_json_server
449 .receive_notification::<lsp::notification::DidCloseTextDocument>()
450 .await,
451 close_message,
452 );
453}
454
455#[gpui::test]
456async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
457 init_test(cx);
458
459 let mut language = Language::new(
460 LanguageConfig {
461 name: "Rust".into(),
462 path_suffixes: vec!["rs".to_string()],
463 ..Default::default()
464 },
465 Some(tree_sitter_rust::language()),
466 );
467 let mut fake_servers = language
468 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
469 name: "the-language-server",
470 ..Default::default()
471 }))
472 .await;
473
474 let fs = FakeFs::new(cx.background());
475 fs.insert_tree(
476 "/the-root",
477 json!({
478 "a.rs": "",
479 "b.rs": "",
480 }),
481 )
482 .await;
483
484 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
485 project.update(cx, |project, _| {
486 project.languages.add(Arc::new(language));
487 });
488 cx.foreground().run_until_parked();
489
490 // Start the language server by opening a buffer with a compatible file extension.
491 let _buffer = project
492 .update(cx, |project, cx| {
493 project.open_local_buffer("/the-root/a.rs", cx)
494 })
495 .await
496 .unwrap();
497
498 // Keep track of the FS events reported to the language server.
499 let fake_server = fake_servers.next().await.unwrap();
500 let file_changes = Arc::new(Mutex::new(Vec::new()));
501 fake_server
502 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
503 registrations: vec![lsp::Registration {
504 id: Default::default(),
505 method: "workspace/didChangeWatchedFiles".to_string(),
506 register_options: serde_json::to_value(
507 lsp::DidChangeWatchedFilesRegistrationOptions {
508 watchers: vec![lsp::FileSystemWatcher {
509 glob_pattern: "/the-root/*.{rs,c}".to_string(),
510 kind: None,
511 }],
512 },
513 )
514 .ok(),
515 }],
516 })
517 .await
518 .unwrap();
519 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
520 let file_changes = file_changes.clone();
521 move |params, _| {
522 let mut file_changes = file_changes.lock();
523 file_changes.extend(params.changes);
524 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
525 }
526 });
527
528 cx.foreground().run_until_parked();
529 assert_eq!(file_changes.lock().len(), 0);
530
531 // Perform some file system mutations, two of which match the watched patterns,
532 // and one of which does not.
533 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
534 .await
535 .unwrap();
536 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
537 .await
538 .unwrap();
539 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
540 .await
541 .unwrap();
542
543 // The language server receives events for the FS mutations that match its watch patterns.
544 cx.foreground().run_until_parked();
545 assert_eq!(
546 &*file_changes.lock(),
547 &[
548 lsp::FileEvent {
549 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
550 typ: lsp::FileChangeType::DELETED,
551 },
552 lsp::FileEvent {
553 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
554 typ: lsp::FileChangeType::CREATED,
555 },
556 ]
557 );
558}
559
560#[gpui::test]
561async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
562 init_test(cx);
563
564 let fs = FakeFs::new(cx.background());
565 fs.insert_tree(
566 "/dir",
567 json!({
568 "a.rs": "let a = 1;",
569 "b.rs": "let b = 2;"
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
575
576 let buffer_a = project
577 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
578 .await
579 .unwrap();
580 let buffer_b = project
581 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
582 .await
583 .unwrap();
584
585 project.update(cx, |project, cx| {
586 project
587 .update_diagnostics(
588 LanguageServerId(0),
589 lsp::PublishDiagnosticsParams {
590 uri: Url::from_file_path("/dir/a.rs").unwrap(),
591 version: None,
592 diagnostics: vec![lsp::Diagnostic {
593 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
594 severity: Some(lsp::DiagnosticSeverity::ERROR),
595 message: "error 1".to_string(),
596 ..Default::default()
597 }],
598 },
599 &[],
600 cx,
601 )
602 .unwrap();
603 project
604 .update_diagnostics(
605 LanguageServerId(0),
606 lsp::PublishDiagnosticsParams {
607 uri: Url::from_file_path("/dir/b.rs").unwrap(),
608 version: None,
609 diagnostics: vec![lsp::Diagnostic {
610 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
611 severity: Some(lsp::DiagnosticSeverity::WARNING),
612 message: "error 2".to_string(),
613 ..Default::default()
614 }],
615 },
616 &[],
617 cx,
618 )
619 .unwrap();
620 });
621
622 buffer_a.read_with(cx, |buffer, _| {
623 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
624 assert_eq!(
625 chunks
626 .iter()
627 .map(|(s, d)| (s.as_str(), *d))
628 .collect::<Vec<_>>(),
629 &[
630 ("let ", None),
631 ("a", Some(DiagnosticSeverity::ERROR)),
632 (" = 1;", None),
633 ]
634 );
635 });
636 buffer_b.read_with(cx, |buffer, _| {
637 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
638 assert_eq!(
639 chunks
640 .iter()
641 .map(|(s, d)| (s.as_str(), *d))
642 .collect::<Vec<_>>(),
643 &[
644 ("let ", None),
645 ("b", Some(DiagnosticSeverity::WARNING)),
646 (" = 2;", None),
647 ]
648 );
649 });
650}
651
652#[gpui::test]
653async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
654 init_test(cx);
655
656 let fs = FakeFs::new(cx.background());
657 fs.insert_tree(
658 "/root",
659 json!({
660 "dir": {
661 "a.rs": "let a = 1;",
662 },
663 "other.rs": "let b = c;"
664 }),
665 )
666 .await;
667
668 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
669
670 let (worktree, _) = project
671 .update(cx, |project, cx| {
672 project.find_or_create_local_worktree("/root/other.rs", false, cx)
673 })
674 .await
675 .unwrap();
676 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
677
678 project.update(cx, |project, cx| {
679 project
680 .update_diagnostics(
681 LanguageServerId(0),
682 lsp::PublishDiagnosticsParams {
683 uri: Url::from_file_path("/root/other.rs").unwrap(),
684 version: None,
685 diagnostics: vec![lsp::Diagnostic {
686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
687 severity: Some(lsp::DiagnosticSeverity::ERROR),
688 message: "unknown variable 'c'".to_string(),
689 ..Default::default()
690 }],
691 },
692 &[],
693 cx,
694 )
695 .unwrap();
696 });
697
698 let buffer = project
699 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
700 .await
701 .unwrap();
702 buffer.read_with(cx, |buffer, _| {
703 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
704 assert_eq!(
705 chunks
706 .iter()
707 .map(|(s, d)| (s.as_str(), *d))
708 .collect::<Vec<_>>(),
709 &[
710 ("let b = ", None),
711 ("c", Some(DiagnosticSeverity::ERROR)),
712 (";", None),
713 ]
714 );
715 });
716
717 project.read_with(cx, |project, cx| {
718 assert_eq!(project.diagnostic_summaries(cx).next(), None);
719 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
720 });
721}
722
723#[gpui::test]
724async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
725 init_test(cx);
726
727 let progress_token = "the-progress-token";
728 let mut language = Language::new(
729 LanguageConfig {
730 name: "Rust".into(),
731 path_suffixes: vec!["rs".to_string()],
732 ..Default::default()
733 },
734 Some(tree_sitter_rust::language()),
735 );
736 let mut fake_servers = language
737 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
738 disk_based_diagnostics_progress_token: Some(progress_token.into()),
739 disk_based_diagnostics_sources: vec!["disk".into()],
740 ..Default::default()
741 }))
742 .await;
743
744 let fs = FakeFs::new(cx.background());
745 fs.insert_tree(
746 "/dir",
747 json!({
748 "a.rs": "fn a() { A }",
749 "b.rs": "const y: i32 = 1",
750 }),
751 )
752 .await;
753
754 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
755 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
756 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
757
758 // Cause worktree to start the fake language server
759 let _buffer = project
760 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
761 .await
762 .unwrap();
763
764 let mut events = subscribe(&project, cx);
765
766 let fake_server = fake_servers.next().await.unwrap();
767 fake_server
768 .start_progress(format!("{}/0", progress_token))
769 .await;
770 assert_eq!(
771 events.next().await.unwrap(),
772 Event::DiskBasedDiagnosticsStarted {
773 language_server_id: LanguageServerId(0),
774 }
775 );
776
777 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
778 uri: Url::from_file_path("/dir/a.rs").unwrap(),
779 version: None,
780 diagnostics: vec![lsp::Diagnostic {
781 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
782 severity: Some(lsp::DiagnosticSeverity::ERROR),
783 message: "undefined variable 'A'".to_string(),
784 ..Default::default()
785 }],
786 });
787 assert_eq!(
788 events.next().await.unwrap(),
789 Event::DiagnosticsUpdated {
790 language_server_id: LanguageServerId(0),
791 path: (worktree_id, Path::new("a.rs")).into()
792 }
793 );
794
795 fake_server.end_progress(format!("{}/0", progress_token));
796 assert_eq!(
797 events.next().await.unwrap(),
798 Event::DiskBasedDiagnosticsFinished {
799 language_server_id: LanguageServerId(0)
800 }
801 );
802
803 let buffer = project
804 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
805 .await
806 .unwrap();
807
808 buffer.read_with(cx, |buffer, _| {
809 let snapshot = buffer.snapshot();
810 let diagnostics = snapshot
811 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
812 .collect::<Vec<_>>();
813 assert_eq!(
814 diagnostics,
815 &[DiagnosticEntry {
816 range: Point::new(0, 9)..Point::new(0, 10),
817 diagnostic: Diagnostic {
818 severity: lsp::DiagnosticSeverity::ERROR,
819 message: "undefined variable 'A'".to_string(),
820 group_id: 0,
821 is_primary: true,
822 ..Default::default()
823 }
824 }]
825 )
826 });
827
828 // Ensure publishing empty diagnostics twice only results in one update event.
829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
830 uri: Url::from_file_path("/dir/a.rs").unwrap(),
831 version: None,
832 diagnostics: Default::default(),
833 });
834 assert_eq!(
835 events.next().await.unwrap(),
836 Event::DiagnosticsUpdated {
837 language_server_id: LanguageServerId(0),
838 path: (worktree_id, Path::new("a.rs")).into()
839 }
840 );
841
842 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
843 uri: Url::from_file_path("/dir/a.rs").unwrap(),
844 version: None,
845 diagnostics: Default::default(),
846 });
847 cx.foreground().run_until_parked();
848 assert_eq!(futures::poll!(events.next()), Poll::Pending);
849}
850
851#[gpui::test]
852async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
853 init_test(cx);
854
855 let progress_token = "the-progress-token";
856 let mut language = Language::new(
857 LanguageConfig {
858 path_suffixes: vec!["rs".to_string()],
859 ..Default::default()
860 },
861 None,
862 );
863 let mut fake_servers = language
864 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
865 disk_based_diagnostics_sources: vec!["disk".into()],
866 disk_based_diagnostics_progress_token: Some(progress_token.into()),
867 ..Default::default()
868 }))
869 .await;
870
871 let fs = FakeFs::new(cx.background());
872 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
873
874 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
875 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
876
877 let buffer = project
878 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
879 .await
880 .unwrap();
881
882 // Simulate diagnostics starting to update.
883 let fake_server = fake_servers.next().await.unwrap();
884 fake_server.start_progress(progress_token).await;
885
886 // Restart the server before the diagnostics finish updating.
887 project.update(cx, |project, cx| {
888 project.restart_language_servers_for_buffers([buffer], cx);
889 });
890 let mut events = subscribe(&project, cx);
891
892 // Simulate the newly started server sending more diagnostics.
893 let fake_server = fake_servers.next().await.unwrap();
894 fake_server.start_progress(progress_token).await;
895 assert_eq!(
896 events.next().await.unwrap(),
897 Event::DiskBasedDiagnosticsStarted {
898 language_server_id: LanguageServerId(1)
899 }
900 );
901 project.read_with(cx, |project, _| {
902 assert_eq!(
903 project
904 .language_servers_running_disk_based_diagnostics()
905 .collect::<Vec<_>>(),
906 [LanguageServerId(1)]
907 );
908 });
909
910 // All diagnostics are considered done, despite the old server's diagnostic
911 // task never completing.
912 fake_server.end_progress(progress_token);
913 assert_eq!(
914 events.next().await.unwrap(),
915 Event::DiskBasedDiagnosticsFinished {
916 language_server_id: LanguageServerId(1)
917 }
918 );
919 project.read_with(cx, |project, _| {
920 assert_eq!(
921 project
922 .language_servers_running_disk_based_diagnostics()
923 .collect::<Vec<_>>(),
924 [LanguageServerId(0); 0]
925 );
926 });
927}
928
929#[gpui::test]
930async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
931 init_test(cx);
932
933 let mut language = Language::new(
934 LanguageConfig {
935 path_suffixes: vec!["rs".to_string()],
936 ..Default::default()
937 },
938 None,
939 );
940 let mut fake_servers = language
941 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
942 name: "the-lsp",
943 ..Default::default()
944 }))
945 .await;
946
947 let fs = FakeFs::new(cx.background());
948 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
949
950 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
951 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
952
953 let buffer = project
954 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
955 .await
956 .unwrap();
957
958 // Before restarting the server, report diagnostics with an unknown buffer version.
959 let fake_server = fake_servers.next().await.unwrap();
960 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
961 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
962 version: Some(10000),
963 diagnostics: Vec::new(),
964 });
965 cx.foreground().run_until_parked();
966
967 project.update(cx, |project, cx| {
968 project.restart_language_servers_for_buffers([buffer.clone()], cx);
969 });
970 let mut fake_server = fake_servers.next().await.unwrap();
971 let notification = fake_server
972 .receive_notification::<lsp::notification::DidOpenTextDocument>()
973 .await
974 .text_document;
975 assert_eq!(notification.version, 0);
976}
977
978#[gpui::test]
979async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
980 init_test(cx);
981
982 let mut rust = Language::new(
983 LanguageConfig {
984 name: Arc::from("Rust"),
985 path_suffixes: vec!["rs".to_string()],
986 ..Default::default()
987 },
988 None,
989 );
990 let mut fake_rust_servers = rust
991 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
992 name: "rust-lsp",
993 ..Default::default()
994 }))
995 .await;
996 let mut js = Language::new(
997 LanguageConfig {
998 name: Arc::from("JavaScript"),
999 path_suffixes: vec!["js".to_string()],
1000 ..Default::default()
1001 },
1002 None,
1003 );
1004 let mut fake_js_servers = js
1005 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1006 name: "js-lsp",
1007 ..Default::default()
1008 }))
1009 .await;
1010
1011 let fs = FakeFs::new(cx.background());
1012 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1013 .await;
1014
1015 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1016 project.update(cx, |project, _| {
1017 project.languages.add(Arc::new(rust));
1018 project.languages.add(Arc::new(js));
1019 });
1020
1021 let _rs_buffer = project
1022 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1023 .await
1024 .unwrap();
1025 let _js_buffer = project
1026 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1027 .await
1028 .unwrap();
1029
1030 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1031 assert_eq!(
1032 fake_rust_server_1
1033 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1034 .await
1035 .text_document
1036 .uri
1037 .as_str(),
1038 "file:///dir/a.rs"
1039 );
1040
1041 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1042 assert_eq!(
1043 fake_js_server
1044 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1045 .await
1046 .text_document
1047 .uri
1048 .as_str(),
1049 "file:///dir/b.js"
1050 );
1051
1052 // Disable Rust language server, ensuring only that server gets stopped.
1053 cx.update(|cx| {
1054 cx.update_global(|settings: &mut SettingsStore, cx| {
1055 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1056 settings.languages.insert(
1057 Arc::from("Rust"),
1058 LanguageSettingsContent {
1059 enable_language_server: Some(false),
1060 ..Default::default()
1061 },
1062 );
1063 });
1064 })
1065 });
1066 fake_rust_server_1
1067 .receive_notification::<lsp::notification::Exit>()
1068 .await;
1069
1070 // Enable Rust and disable JavaScript language servers, ensuring that the
1071 // former gets started again and that the latter stops.
1072 cx.update(|cx| {
1073 cx.update_global(|settings: &mut SettingsStore, cx| {
1074 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1075 settings.languages.insert(
1076 Arc::from("Rust"),
1077 LanguageSettingsContent {
1078 enable_language_server: Some(true),
1079 ..Default::default()
1080 },
1081 );
1082 settings.languages.insert(
1083 Arc::from("JavaScript"),
1084 LanguageSettingsContent {
1085 enable_language_server: Some(false),
1086 ..Default::default()
1087 },
1088 );
1089 });
1090 })
1091 });
1092 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1093 assert_eq!(
1094 fake_rust_server_2
1095 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1096 .await
1097 .text_document
1098 .uri
1099 .as_str(),
1100 "file:///dir/a.rs"
1101 );
1102 fake_js_server
1103 .receive_notification::<lsp::notification::Exit>()
1104 .await;
1105}
1106
1107#[gpui::test]
1108async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1109 init_test(cx);
1110
1111 let mut language = Language::new(
1112 LanguageConfig {
1113 name: "Rust".into(),
1114 path_suffixes: vec!["rs".to_string()],
1115 ..Default::default()
1116 },
1117 Some(tree_sitter_rust::language()),
1118 );
1119 let mut fake_servers = language
1120 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1121 disk_based_diagnostics_sources: vec!["disk".into()],
1122 ..Default::default()
1123 }))
1124 .await;
1125
1126 let text = "
1127 fn a() { A }
1128 fn b() { BB }
1129 fn c() { CCC }
1130 "
1131 .unindent();
1132
1133 let fs = FakeFs::new(cx.background());
1134 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1135
1136 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1137 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1138
1139 let buffer = project
1140 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1141 .await
1142 .unwrap();
1143
1144 let mut fake_server = fake_servers.next().await.unwrap();
1145 let open_notification = fake_server
1146 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1147 .await;
1148
1149 // Edit the buffer, moving the content down
1150 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1151 let change_notification_1 = fake_server
1152 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1153 .await;
1154 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1155
1156 // Report some diagnostics for the initial version of the buffer
1157 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1158 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1159 version: Some(open_notification.text_document.version),
1160 diagnostics: vec![
1161 lsp::Diagnostic {
1162 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1163 severity: Some(DiagnosticSeverity::ERROR),
1164 message: "undefined variable 'A'".to_string(),
1165 source: Some("disk".to_string()),
1166 ..Default::default()
1167 },
1168 lsp::Diagnostic {
1169 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1170 severity: Some(DiagnosticSeverity::ERROR),
1171 message: "undefined variable 'BB'".to_string(),
1172 source: Some("disk".to_string()),
1173 ..Default::default()
1174 },
1175 lsp::Diagnostic {
1176 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1177 severity: Some(DiagnosticSeverity::ERROR),
1178 source: Some("disk".to_string()),
1179 message: "undefined variable 'CCC'".to_string(),
1180 ..Default::default()
1181 },
1182 ],
1183 });
1184
1185 // The diagnostics have moved down since they were created.
1186 buffer.next_notification(cx).await;
1187 buffer.read_with(cx, |buffer, _| {
1188 assert_eq!(
1189 buffer
1190 .snapshot()
1191 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1192 .collect::<Vec<_>>(),
1193 &[
1194 DiagnosticEntry {
1195 range: Point::new(3, 9)..Point::new(3, 11),
1196 diagnostic: Diagnostic {
1197 source: Some("disk".into()),
1198 severity: DiagnosticSeverity::ERROR,
1199 message: "undefined variable 'BB'".to_string(),
1200 is_disk_based: true,
1201 group_id: 1,
1202 is_primary: true,
1203 ..Default::default()
1204 },
1205 },
1206 DiagnosticEntry {
1207 range: Point::new(4, 9)..Point::new(4, 12),
1208 diagnostic: Diagnostic {
1209 source: Some("disk".into()),
1210 severity: DiagnosticSeverity::ERROR,
1211 message: "undefined variable 'CCC'".to_string(),
1212 is_disk_based: true,
1213 group_id: 2,
1214 is_primary: true,
1215 ..Default::default()
1216 }
1217 }
1218 ]
1219 );
1220 assert_eq!(
1221 chunks_with_diagnostics(buffer, 0..buffer.len()),
1222 [
1223 ("\n\nfn a() { ".to_string(), None),
1224 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1225 (" }\nfn b() { ".to_string(), None),
1226 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1227 (" }\nfn c() { ".to_string(), None),
1228 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1229 (" }\n".to_string(), None),
1230 ]
1231 );
1232 assert_eq!(
1233 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1234 [
1235 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1236 (" }\nfn c() { ".to_string(), None),
1237 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1238 ]
1239 );
1240 });
1241
1242 // Ensure overlapping diagnostics are highlighted correctly.
1243 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1244 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1245 version: Some(open_notification.text_document.version),
1246 diagnostics: vec![
1247 lsp::Diagnostic {
1248 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1249 severity: Some(DiagnosticSeverity::ERROR),
1250 message: "undefined variable 'A'".to_string(),
1251 source: Some("disk".to_string()),
1252 ..Default::default()
1253 },
1254 lsp::Diagnostic {
1255 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1256 severity: Some(DiagnosticSeverity::WARNING),
1257 message: "unreachable statement".to_string(),
1258 source: Some("disk".to_string()),
1259 ..Default::default()
1260 },
1261 ],
1262 });
1263
1264 buffer.next_notification(cx).await;
1265 buffer.read_with(cx, |buffer, _| {
1266 assert_eq!(
1267 buffer
1268 .snapshot()
1269 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1270 .collect::<Vec<_>>(),
1271 &[
1272 DiagnosticEntry {
1273 range: Point::new(2, 9)..Point::new(2, 12),
1274 diagnostic: Diagnostic {
1275 source: Some("disk".into()),
1276 severity: DiagnosticSeverity::WARNING,
1277 message: "unreachable statement".to_string(),
1278 is_disk_based: true,
1279 group_id: 4,
1280 is_primary: true,
1281 ..Default::default()
1282 }
1283 },
1284 DiagnosticEntry {
1285 range: Point::new(2, 9)..Point::new(2, 10),
1286 diagnostic: Diagnostic {
1287 source: Some("disk".into()),
1288 severity: DiagnosticSeverity::ERROR,
1289 message: "undefined variable 'A'".to_string(),
1290 is_disk_based: true,
1291 group_id: 3,
1292 is_primary: true,
1293 ..Default::default()
1294 },
1295 }
1296 ]
1297 );
1298 assert_eq!(
1299 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1300 [
1301 ("fn a() { ".to_string(), None),
1302 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1303 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1304 ("\n".to_string(), None),
1305 ]
1306 );
1307 assert_eq!(
1308 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1309 [
1310 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1311 ("\n".to_string(), None),
1312 ]
1313 );
1314 });
1315
1316 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1317 // changes since the last save.
1318 buffer.update(cx, |buffer, cx| {
1319 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1320 buffer.edit(
1321 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1322 None,
1323 cx,
1324 );
1325 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1326 });
1327 let change_notification_2 = fake_server
1328 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1329 .await;
1330 assert!(
1331 change_notification_2.text_document.version > change_notification_1.text_document.version
1332 );
1333
1334 // Handle out-of-order diagnostics
1335 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1336 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1337 version: Some(change_notification_2.text_document.version),
1338 diagnostics: vec![
1339 lsp::Diagnostic {
1340 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1341 severity: Some(DiagnosticSeverity::ERROR),
1342 message: "undefined variable 'BB'".to_string(),
1343 source: Some("disk".to_string()),
1344 ..Default::default()
1345 },
1346 lsp::Diagnostic {
1347 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1348 severity: Some(DiagnosticSeverity::WARNING),
1349 message: "undefined variable 'A'".to_string(),
1350 source: Some("disk".to_string()),
1351 ..Default::default()
1352 },
1353 ],
1354 });
1355
1356 buffer.next_notification(cx).await;
1357 buffer.read_with(cx, |buffer, _| {
1358 assert_eq!(
1359 buffer
1360 .snapshot()
1361 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1362 .collect::<Vec<_>>(),
1363 &[
1364 DiagnosticEntry {
1365 range: Point::new(2, 21)..Point::new(2, 22),
1366 diagnostic: Diagnostic {
1367 source: Some("disk".into()),
1368 severity: DiagnosticSeverity::WARNING,
1369 message: "undefined variable 'A'".to_string(),
1370 is_disk_based: true,
1371 group_id: 6,
1372 is_primary: true,
1373 ..Default::default()
1374 }
1375 },
1376 DiagnosticEntry {
1377 range: Point::new(3, 9)..Point::new(3, 14),
1378 diagnostic: Diagnostic {
1379 source: Some("disk".into()),
1380 severity: DiagnosticSeverity::ERROR,
1381 message: "undefined variable 'BB'".to_string(),
1382 is_disk_based: true,
1383 group_id: 5,
1384 is_primary: true,
1385 ..Default::default()
1386 },
1387 }
1388 ]
1389 );
1390 });
1391}
1392
1393#[gpui::test]
1394async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1395 init_test(cx);
1396
1397 let text = concat!(
1398 "let one = ;\n", //
1399 "let two = \n",
1400 "let three = 3;\n",
1401 );
1402
1403 let fs = FakeFs::new(cx.background());
1404 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1405
1406 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1407 let buffer = project
1408 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1409 .await
1410 .unwrap();
1411
1412 project.update(cx, |project, cx| {
1413 project
1414 .update_buffer_diagnostics(
1415 &buffer,
1416 LanguageServerId(0),
1417 None,
1418 vec![
1419 DiagnosticEntry {
1420 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1421 diagnostic: Diagnostic {
1422 severity: DiagnosticSeverity::ERROR,
1423 message: "syntax error 1".to_string(),
1424 ..Default::default()
1425 },
1426 },
1427 DiagnosticEntry {
1428 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1429 diagnostic: Diagnostic {
1430 severity: DiagnosticSeverity::ERROR,
1431 message: "syntax error 2".to_string(),
1432 ..Default::default()
1433 },
1434 },
1435 ],
1436 cx,
1437 )
1438 .unwrap();
1439 });
1440
1441 // An empty range is extended forward to include the following character.
1442 // At the end of a line, an empty range is extended backward to include
1443 // the preceding character.
1444 buffer.read_with(cx, |buffer, _| {
1445 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1446 assert_eq!(
1447 chunks
1448 .iter()
1449 .map(|(s, d)| (s.as_str(), *d))
1450 .collect::<Vec<_>>(),
1451 &[
1452 ("let one = ", None),
1453 (";", Some(DiagnosticSeverity::ERROR)),
1454 ("\nlet two =", None),
1455 (" ", Some(DiagnosticSeverity::ERROR)),
1456 ("\nlet three = 3;\n", None)
1457 ]
1458 );
1459 });
1460}
1461
1462#[gpui::test]
1463async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1464 init_test(cx);
1465
1466 let fs = FakeFs::new(cx.background());
1467 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1468 .await;
1469
1470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1471
1472 project.update(cx, |project, cx| {
1473 project
1474 .update_diagnostic_entries(
1475 LanguageServerId(0),
1476 Path::new("/dir/a.rs").to_owned(),
1477 None,
1478 vec![DiagnosticEntry {
1479 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1480 diagnostic: Diagnostic {
1481 severity: DiagnosticSeverity::ERROR,
1482 is_primary: true,
1483 message: "syntax error a1".to_string(),
1484 ..Default::default()
1485 },
1486 }],
1487 cx,
1488 )
1489 .unwrap();
1490 project
1491 .update_diagnostic_entries(
1492 LanguageServerId(1),
1493 Path::new("/dir/a.rs").to_owned(),
1494 None,
1495 vec![DiagnosticEntry {
1496 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1497 diagnostic: Diagnostic {
1498 severity: DiagnosticSeverity::ERROR,
1499 is_primary: true,
1500 message: "syntax error b1".to_string(),
1501 ..Default::default()
1502 },
1503 }],
1504 cx,
1505 )
1506 .unwrap();
1507
1508 assert_eq!(
1509 project.diagnostic_summary(cx),
1510 DiagnosticSummary {
1511 error_count: 2,
1512 warning_count: 0,
1513 }
1514 );
1515 });
1516}
1517
1518#[gpui::test]
1519async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1520 init_test(cx);
1521
1522 let mut language = Language::new(
1523 LanguageConfig {
1524 name: "Rust".into(),
1525 path_suffixes: vec!["rs".to_string()],
1526 ..Default::default()
1527 },
1528 Some(tree_sitter_rust::language()),
1529 );
1530 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1531
1532 let text = "
1533 fn a() {
1534 f1();
1535 }
1536 fn b() {
1537 f2();
1538 }
1539 fn c() {
1540 f3();
1541 }
1542 "
1543 .unindent();
1544
1545 let fs = FakeFs::new(cx.background());
1546 fs.insert_tree(
1547 "/dir",
1548 json!({
1549 "a.rs": text.clone(),
1550 }),
1551 )
1552 .await;
1553
1554 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1555 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1556 let buffer = project
1557 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1558 .await
1559 .unwrap();
1560
1561 let mut fake_server = fake_servers.next().await.unwrap();
1562 let lsp_document_version = fake_server
1563 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1564 .await
1565 .text_document
1566 .version;
1567
1568 // Simulate editing the buffer after the language server computes some edits.
1569 buffer.update(cx, |buffer, cx| {
1570 buffer.edit(
1571 [(
1572 Point::new(0, 0)..Point::new(0, 0),
1573 "// above first function\n",
1574 )],
1575 None,
1576 cx,
1577 );
1578 buffer.edit(
1579 [(
1580 Point::new(2, 0)..Point::new(2, 0),
1581 " // inside first function\n",
1582 )],
1583 None,
1584 cx,
1585 );
1586 buffer.edit(
1587 [(
1588 Point::new(6, 4)..Point::new(6, 4),
1589 "// inside second function ",
1590 )],
1591 None,
1592 cx,
1593 );
1594
1595 assert_eq!(
1596 buffer.text(),
1597 "
1598 // above first function
1599 fn a() {
1600 // inside first function
1601 f1();
1602 }
1603 fn b() {
1604 // inside second function f2();
1605 }
1606 fn c() {
1607 f3();
1608 }
1609 "
1610 .unindent()
1611 );
1612 });
1613
1614 let edits = project
1615 .update(cx, |project, cx| {
1616 project.edits_from_lsp(
1617 &buffer,
1618 vec![
1619 // replace body of first function
1620 lsp::TextEdit {
1621 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1622 new_text: "
1623 fn a() {
1624 f10();
1625 }
1626 "
1627 .unindent(),
1628 },
1629 // edit inside second function
1630 lsp::TextEdit {
1631 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1632 new_text: "00".into(),
1633 },
1634 // edit inside third function via two distinct edits
1635 lsp::TextEdit {
1636 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1637 new_text: "4000".into(),
1638 },
1639 lsp::TextEdit {
1640 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1641 new_text: "".into(),
1642 },
1643 ],
1644 LanguageServerId(0),
1645 Some(lsp_document_version),
1646 cx,
1647 )
1648 })
1649 .await
1650 .unwrap();
1651
1652 buffer.update(cx, |buffer, cx| {
1653 for (range, new_text) in edits {
1654 buffer.edit([(range, new_text)], None, cx);
1655 }
1656 assert_eq!(
1657 buffer.text(),
1658 "
1659 // above first function
1660 fn a() {
1661 // inside first function
1662 f10();
1663 }
1664 fn b() {
1665 // inside second function f200();
1666 }
1667 fn c() {
1668 f4000();
1669 }
1670 "
1671 .unindent()
1672 );
1673 });
1674}
1675
1676#[gpui::test]
1677async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1678 init_test(cx);
1679
1680 let text = "
1681 use a::b;
1682 use a::c;
1683
1684 fn f() {
1685 b();
1686 c();
1687 }
1688 "
1689 .unindent();
1690
1691 let fs = FakeFs::new(cx.background());
1692 fs.insert_tree(
1693 "/dir",
1694 json!({
1695 "a.rs": text.clone(),
1696 }),
1697 )
1698 .await;
1699
1700 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1701 let buffer = project
1702 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1703 .await
1704 .unwrap();
1705
1706 // Simulate the language server sending us a small edit in the form of a very large diff.
1707 // Rust-analyzer does this when performing a merge-imports code action.
1708 let edits = project
1709 .update(cx, |project, cx| {
1710 project.edits_from_lsp(
1711 &buffer,
1712 [
1713 // Replace the first use statement without editing the semicolon.
1714 lsp::TextEdit {
1715 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1716 new_text: "a::{b, c}".into(),
1717 },
1718 // Reinsert the remainder of the file between the semicolon and the final
1719 // newline of the file.
1720 lsp::TextEdit {
1721 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1722 new_text: "\n\n".into(),
1723 },
1724 lsp::TextEdit {
1725 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1726 new_text: "
1727 fn f() {
1728 b();
1729 c();
1730 }"
1731 .unindent(),
1732 },
1733 // Delete everything after the first newline of the file.
1734 lsp::TextEdit {
1735 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1736 new_text: "".into(),
1737 },
1738 ],
1739 LanguageServerId(0),
1740 None,
1741 cx,
1742 )
1743 })
1744 .await
1745 .unwrap();
1746
1747 buffer.update(cx, |buffer, cx| {
1748 let edits = edits
1749 .into_iter()
1750 .map(|(range, text)| {
1751 (
1752 range.start.to_point(buffer)..range.end.to_point(buffer),
1753 text,
1754 )
1755 })
1756 .collect::<Vec<_>>();
1757
1758 assert_eq!(
1759 edits,
1760 [
1761 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1762 (Point::new(1, 0)..Point::new(2, 0), "".into())
1763 ]
1764 );
1765
1766 for (range, new_text) in edits {
1767 buffer.edit([(range, new_text)], None, cx);
1768 }
1769 assert_eq!(
1770 buffer.text(),
1771 "
1772 use a::{b, c};
1773
1774 fn f() {
1775 b();
1776 c();
1777 }
1778 "
1779 .unindent()
1780 );
1781 });
1782}
1783
1784#[gpui::test]
1785async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1786 init_test(cx);
1787
1788 let text = "
1789 use a::b;
1790 use a::c;
1791
1792 fn f() {
1793 b();
1794 c();
1795 }
1796 "
1797 .unindent();
1798
1799 let fs = FakeFs::new(cx.background());
1800 fs.insert_tree(
1801 "/dir",
1802 json!({
1803 "a.rs": text.clone(),
1804 }),
1805 )
1806 .await;
1807
1808 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1809 let buffer = project
1810 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1811 .await
1812 .unwrap();
1813
1814 // Simulate the language server sending us edits in a non-ordered fashion,
1815 // with ranges sometimes being inverted or pointing to invalid locations.
1816 let edits = project
1817 .update(cx, |project, cx| {
1818 project.edits_from_lsp(
1819 &buffer,
1820 [
1821 lsp::TextEdit {
1822 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1823 new_text: "\n\n".into(),
1824 },
1825 lsp::TextEdit {
1826 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1827 new_text: "a::{b, c}".into(),
1828 },
1829 lsp::TextEdit {
1830 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1831 new_text: "".into(),
1832 },
1833 lsp::TextEdit {
1834 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1835 new_text: "
1836 fn f() {
1837 b();
1838 c();
1839 }"
1840 .unindent(),
1841 },
1842 ],
1843 LanguageServerId(0),
1844 None,
1845 cx,
1846 )
1847 })
1848 .await
1849 .unwrap();
1850
1851 buffer.update(cx, |buffer, cx| {
1852 let edits = edits
1853 .into_iter()
1854 .map(|(range, text)| {
1855 (
1856 range.start.to_point(buffer)..range.end.to_point(buffer),
1857 text,
1858 )
1859 })
1860 .collect::<Vec<_>>();
1861
1862 assert_eq!(
1863 edits,
1864 [
1865 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1866 (Point::new(1, 0)..Point::new(2, 0), "".into())
1867 ]
1868 );
1869
1870 for (range, new_text) in edits {
1871 buffer.edit([(range, new_text)], None, cx);
1872 }
1873 assert_eq!(
1874 buffer.text(),
1875 "
1876 use a::{b, c};
1877
1878 fn f() {
1879 b();
1880 c();
1881 }
1882 "
1883 .unindent()
1884 );
1885 });
1886}
1887
1888fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1889 buffer: &Buffer,
1890 range: Range<T>,
1891) -> Vec<(String, Option<DiagnosticSeverity>)> {
1892 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1893 for chunk in buffer.snapshot().chunks(range, true) {
1894 if chunks.last().map_or(false, |prev_chunk| {
1895 prev_chunk.1 == chunk.diagnostic_severity
1896 }) {
1897 chunks.last_mut().unwrap().0.push_str(chunk.text);
1898 } else {
1899 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1900 }
1901 }
1902 chunks
1903}
1904
1905#[gpui::test(iterations = 10)]
1906async fn test_definition(cx: &mut gpui::TestAppContext) {
1907 init_test(cx);
1908
1909 let mut language = Language::new(
1910 LanguageConfig {
1911 name: "Rust".into(),
1912 path_suffixes: vec!["rs".to_string()],
1913 ..Default::default()
1914 },
1915 Some(tree_sitter_rust::language()),
1916 );
1917 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1918
1919 let fs = FakeFs::new(cx.background());
1920 fs.insert_tree(
1921 "/dir",
1922 json!({
1923 "a.rs": "const fn a() { A }",
1924 "b.rs": "const y: i32 = crate::a()",
1925 }),
1926 )
1927 .await;
1928
1929 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1930 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1931
1932 let buffer = project
1933 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1934 .await
1935 .unwrap();
1936
1937 let fake_server = fake_servers.next().await.unwrap();
1938 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1939 let params = params.text_document_position_params;
1940 assert_eq!(
1941 params.text_document.uri.to_file_path().unwrap(),
1942 Path::new("/dir/b.rs"),
1943 );
1944 assert_eq!(params.position, lsp::Position::new(0, 22));
1945
1946 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1947 lsp::Location::new(
1948 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1949 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1950 ),
1951 )))
1952 });
1953
1954 let mut definitions = project
1955 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1956 .await
1957 .unwrap();
1958
1959 // Assert no new language server started
1960 cx.foreground().run_until_parked();
1961 assert!(fake_servers.try_next().is_err());
1962
1963 assert_eq!(definitions.len(), 1);
1964 let definition = definitions.pop().unwrap();
1965 cx.update(|cx| {
1966 let target_buffer = definition.target.buffer.read(cx);
1967 assert_eq!(
1968 target_buffer
1969 .file()
1970 .unwrap()
1971 .as_local()
1972 .unwrap()
1973 .abs_path(cx),
1974 Path::new("/dir/a.rs"),
1975 );
1976 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1977 assert_eq!(
1978 list_worktrees(&project, cx),
1979 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1980 );
1981
1982 drop(definition);
1983 });
1984 cx.read(|cx| {
1985 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1986 });
1987
1988 fn list_worktrees<'a>(
1989 project: &'a ModelHandle<Project>,
1990 cx: &'a AppContext,
1991 ) -> Vec<(&'a Path, bool)> {
1992 project
1993 .read(cx)
1994 .worktrees(cx)
1995 .map(|worktree| {
1996 let worktree = worktree.read(cx);
1997 (
1998 worktree.as_local().unwrap().abs_path().as_ref(),
1999 worktree.is_visible(),
2000 )
2001 })
2002 .collect::<Vec<_>>()
2003 }
2004}
2005
2006#[gpui::test]
2007async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2008 init_test(cx);
2009
2010 let mut language = Language::new(
2011 LanguageConfig {
2012 name: "TypeScript".into(),
2013 path_suffixes: vec!["ts".to_string()],
2014 ..Default::default()
2015 },
2016 Some(tree_sitter_typescript::language_typescript()),
2017 );
2018 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2019
2020 let fs = FakeFs::new(cx.background());
2021 fs.insert_tree(
2022 "/dir",
2023 json!({
2024 "a.ts": "",
2025 }),
2026 )
2027 .await;
2028
2029 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2030 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2031 let buffer = project
2032 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2033 .await
2034 .unwrap();
2035
2036 let fake_server = fake_language_servers.next().await.unwrap();
2037
2038 let text = "let a = b.fqn";
2039 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2040 let completions = project.update(cx, |project, cx| {
2041 project.completions(&buffer, text.len(), cx)
2042 });
2043
2044 fake_server
2045 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2046 Ok(Some(lsp::CompletionResponse::Array(vec![
2047 lsp::CompletionItem {
2048 label: "fullyQualifiedName?".into(),
2049 insert_text: Some("fullyQualifiedName".into()),
2050 ..Default::default()
2051 },
2052 ])))
2053 })
2054 .next()
2055 .await;
2056 let completions = completions.await.unwrap();
2057 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2058 assert_eq!(completions.len(), 1);
2059 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2060 assert_eq!(
2061 completions[0].old_range.to_offset(&snapshot),
2062 text.len() - 3..text.len()
2063 );
2064
2065 let text = "let a = \"atoms/cmp\"";
2066 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2067 let completions = project.update(cx, |project, cx| {
2068 project.completions(&buffer, text.len() - 1, cx)
2069 });
2070
2071 fake_server
2072 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2073 Ok(Some(lsp::CompletionResponse::Array(vec![
2074 lsp::CompletionItem {
2075 label: "component".into(),
2076 ..Default::default()
2077 },
2078 ])))
2079 })
2080 .next()
2081 .await;
2082 let completions = completions.await.unwrap();
2083 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2084 assert_eq!(completions.len(), 1);
2085 assert_eq!(completions[0].new_text, "component");
2086 assert_eq!(
2087 completions[0].old_range.to_offset(&snapshot),
2088 text.len() - 4..text.len() - 1
2089 );
2090}
2091
2092#[gpui::test]
2093async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2094 init_test(cx);
2095
2096 let mut language = Language::new(
2097 LanguageConfig {
2098 name: "TypeScript".into(),
2099 path_suffixes: vec!["ts".to_string()],
2100 ..Default::default()
2101 },
2102 Some(tree_sitter_typescript::language_typescript()),
2103 );
2104 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2105
2106 let fs = FakeFs::new(cx.background());
2107 fs.insert_tree(
2108 "/dir",
2109 json!({
2110 "a.ts": "",
2111 }),
2112 )
2113 .await;
2114
2115 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2116 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2117 let buffer = project
2118 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2119 .await
2120 .unwrap();
2121
2122 let fake_server = fake_language_servers.next().await.unwrap();
2123
2124 let text = "let a = b.fqn";
2125 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2126 let completions = project.update(cx, |project, cx| {
2127 project.completions(&buffer, text.len(), cx)
2128 });
2129
2130 fake_server
2131 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2132 Ok(Some(lsp::CompletionResponse::Array(vec![
2133 lsp::CompletionItem {
2134 label: "fullyQualifiedName?".into(),
2135 insert_text: Some("fully\rQualified\r\nName".into()),
2136 ..Default::default()
2137 },
2138 ])))
2139 })
2140 .next()
2141 .await;
2142 let completions = completions.await.unwrap();
2143 assert_eq!(completions.len(), 1);
2144 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2145}
2146
2147#[gpui::test(iterations = 10)]
2148async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2149 init_test(cx);
2150
2151 let mut language = Language::new(
2152 LanguageConfig {
2153 name: "TypeScript".into(),
2154 path_suffixes: vec!["ts".to_string()],
2155 ..Default::default()
2156 },
2157 None,
2158 );
2159 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2160
2161 let fs = FakeFs::new(cx.background());
2162 fs.insert_tree(
2163 "/dir",
2164 json!({
2165 "a.ts": "a",
2166 }),
2167 )
2168 .await;
2169
2170 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2171 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2172 let buffer = project
2173 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2174 .await
2175 .unwrap();
2176
2177 let fake_server = fake_language_servers.next().await.unwrap();
2178
2179 // Language server returns code actions that contain commands, and not edits.
2180 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2181 fake_server
2182 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2183 Ok(Some(vec![
2184 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2185 title: "The code action".into(),
2186 command: Some(lsp::Command {
2187 title: "The command".into(),
2188 command: "_the/command".into(),
2189 arguments: Some(vec![json!("the-argument")]),
2190 }),
2191 ..Default::default()
2192 }),
2193 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2194 title: "two".into(),
2195 ..Default::default()
2196 }),
2197 ]))
2198 })
2199 .next()
2200 .await;
2201
2202 let action = actions.await.unwrap()[0].clone();
2203 let apply = project.update(cx, |project, cx| {
2204 project.apply_code_action(buffer.clone(), action, true, cx)
2205 });
2206
2207 // Resolving the code action does not populate its edits. In absence of
2208 // edits, we must execute the given command.
2209 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2210 |action, _| async move { Ok(action) },
2211 );
2212
2213 // While executing the command, the language server sends the editor
2214 // a `workspaceEdit` request.
2215 fake_server
2216 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2217 let fake = fake_server.clone();
2218 move |params, _| {
2219 assert_eq!(params.command, "_the/command");
2220 let fake = fake.clone();
2221 async move {
2222 fake.server
2223 .request::<lsp::request::ApplyWorkspaceEdit>(
2224 lsp::ApplyWorkspaceEditParams {
2225 label: None,
2226 edit: lsp::WorkspaceEdit {
2227 changes: Some(
2228 [(
2229 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2230 vec![lsp::TextEdit {
2231 range: lsp::Range::new(
2232 lsp::Position::new(0, 0),
2233 lsp::Position::new(0, 0),
2234 ),
2235 new_text: "X".into(),
2236 }],
2237 )]
2238 .into_iter()
2239 .collect(),
2240 ),
2241 ..Default::default()
2242 },
2243 },
2244 )
2245 .await
2246 .unwrap();
2247 Ok(Some(json!(null)))
2248 }
2249 }
2250 })
2251 .next()
2252 .await;
2253
2254 // Applying the code action returns a project transaction containing the edits
2255 // sent by the language server in its `workspaceEdit` request.
2256 let transaction = apply.await.unwrap();
2257 assert!(transaction.0.contains_key(&buffer));
2258 buffer.update(cx, |buffer, cx| {
2259 assert_eq!(buffer.text(), "Xa");
2260 buffer.undo(cx);
2261 assert_eq!(buffer.text(), "a");
2262 });
2263}
2264
2265#[gpui::test(iterations = 10)]
2266async fn test_save_file(cx: &mut gpui::TestAppContext) {
2267 init_test(cx);
2268
2269 let fs = FakeFs::new(cx.background());
2270 fs.insert_tree(
2271 "/dir",
2272 json!({
2273 "file1": "the old contents",
2274 }),
2275 )
2276 .await;
2277
2278 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2279 let buffer = project
2280 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2281 .await
2282 .unwrap();
2283 buffer.update(cx, |buffer, cx| {
2284 assert_eq!(buffer.text(), "the old contents");
2285 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2286 });
2287
2288 project
2289 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2290 .await
2291 .unwrap();
2292
2293 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2294 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2295}
2296
2297#[gpui::test]
2298async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2299 init_test(cx);
2300
2301 let fs = FakeFs::new(cx.background());
2302 fs.insert_tree(
2303 "/dir",
2304 json!({
2305 "file1": "the old contents",
2306 }),
2307 )
2308 .await;
2309
2310 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2311 let buffer = project
2312 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2313 .await
2314 .unwrap();
2315 buffer.update(cx, |buffer, cx| {
2316 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2317 });
2318
2319 project
2320 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2321 .await
2322 .unwrap();
2323
2324 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2325 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2326}
2327
2328#[gpui::test]
2329async fn test_save_as(cx: &mut gpui::TestAppContext) {
2330 init_test(cx);
2331
2332 let fs = FakeFs::new(cx.background());
2333 fs.insert_tree("/dir", json!({})).await;
2334
2335 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2336
2337 let languages = project.read_with(cx, |project, _| project.languages().clone());
2338 languages.register(
2339 "/some/path",
2340 LanguageConfig {
2341 name: "Rust".into(),
2342 path_suffixes: vec!["rs".into()],
2343 ..Default::default()
2344 },
2345 tree_sitter_rust::language(),
2346 vec![],
2347 |_| Default::default(),
2348 );
2349
2350 let buffer = project.update(cx, |project, cx| {
2351 project.create_buffer("", None, cx).unwrap()
2352 });
2353 buffer.update(cx, |buffer, cx| {
2354 buffer.edit([(0..0, "abc")], None, cx);
2355 assert!(buffer.is_dirty());
2356 assert!(!buffer.has_conflict());
2357 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2358 });
2359 project
2360 .update(cx, |project, cx| {
2361 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2362 })
2363 .await
2364 .unwrap();
2365 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2366
2367 cx.foreground().run_until_parked();
2368 buffer.read_with(cx, |buffer, cx| {
2369 assert_eq!(
2370 buffer.file().unwrap().full_path(cx),
2371 Path::new("dir/file1.rs")
2372 );
2373 assert!(!buffer.is_dirty());
2374 assert!(!buffer.has_conflict());
2375 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2376 });
2377
2378 let opened_buffer = project
2379 .update(cx, |project, cx| {
2380 project.open_local_buffer("/dir/file1.rs", cx)
2381 })
2382 .await
2383 .unwrap();
2384 assert_eq!(opened_buffer, buffer);
2385}
2386
2387#[gpui::test(retries = 5)]
2388async fn test_rescan_and_remote_updates(
2389 deterministic: Arc<Deterministic>,
2390 cx: &mut gpui::TestAppContext,
2391) {
2392 init_test(cx);
2393 cx.foreground().allow_parking();
2394
2395 let dir = temp_tree(json!({
2396 "a": {
2397 "file1": "",
2398 "file2": "",
2399 "file3": "",
2400 },
2401 "b": {
2402 "c": {
2403 "file4": "",
2404 "file5": "",
2405 }
2406 }
2407 }));
2408
2409 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2410 let rpc = project.read_with(cx, |p, _| p.client.clone());
2411
2412 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2413 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2414 async move { buffer.await.unwrap() }
2415 };
2416 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2417 project.read_with(cx, |project, cx| {
2418 let tree = project.worktrees(cx).next().unwrap();
2419 tree.read(cx)
2420 .entry_for_path(path)
2421 .unwrap_or_else(|| panic!("no entry for path {}", path))
2422 .id
2423 })
2424 };
2425
2426 let buffer2 = buffer_for_path("a/file2", cx).await;
2427 let buffer3 = buffer_for_path("a/file3", cx).await;
2428 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2429 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2430
2431 let file2_id = id_for_path("a/file2", cx);
2432 let file3_id = id_for_path("a/file3", cx);
2433 let file4_id = id_for_path("b/c/file4", cx);
2434
2435 // Create a remote copy of this worktree.
2436 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2437 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2438 let remote = cx.update(|cx| {
2439 Worktree::remote(
2440 1,
2441 1,
2442 proto::WorktreeMetadata {
2443 id: initial_snapshot.id().to_proto(),
2444 root_name: initial_snapshot.root_name().into(),
2445 abs_path: initial_snapshot
2446 .abs_path()
2447 .as_os_str()
2448 .to_string_lossy()
2449 .into(),
2450 visible: true,
2451 },
2452 rpc.clone(),
2453 cx,
2454 )
2455 });
2456 remote.update(cx, |remote, _| {
2457 let update = initial_snapshot.build_initial_update(1);
2458 remote.as_remote_mut().unwrap().update_from_remote(update);
2459 });
2460 deterministic.run_until_parked();
2461
2462 cx.read(|cx| {
2463 assert!(!buffer2.read(cx).is_dirty());
2464 assert!(!buffer3.read(cx).is_dirty());
2465 assert!(!buffer4.read(cx).is_dirty());
2466 assert!(!buffer5.read(cx).is_dirty());
2467 });
2468
2469 // Rename and delete files and directories.
2470 tree.flush_fs_events(cx).await;
2471 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2472 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2473 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2474 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2475 tree.flush_fs_events(cx).await;
2476
2477 let expected_paths = vec![
2478 "a",
2479 "a/file1",
2480 "a/file2.new",
2481 "b",
2482 "d",
2483 "d/file3",
2484 "d/file4",
2485 ];
2486
2487 cx.read(|app| {
2488 assert_eq!(
2489 tree.read(app)
2490 .paths()
2491 .map(|p| p.to_str().unwrap())
2492 .collect::<Vec<_>>(),
2493 expected_paths
2494 );
2495
2496 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2497 assert_eq!(id_for_path("d/file3", cx), file3_id);
2498 assert_eq!(id_for_path("d/file4", cx), file4_id);
2499
2500 assert_eq!(
2501 buffer2.read(app).file().unwrap().path().as_ref(),
2502 Path::new("a/file2.new")
2503 );
2504 assert_eq!(
2505 buffer3.read(app).file().unwrap().path().as_ref(),
2506 Path::new("d/file3")
2507 );
2508 assert_eq!(
2509 buffer4.read(app).file().unwrap().path().as_ref(),
2510 Path::new("d/file4")
2511 );
2512 assert_eq!(
2513 buffer5.read(app).file().unwrap().path().as_ref(),
2514 Path::new("b/c/file5")
2515 );
2516
2517 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2518 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2519 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2520 assert!(buffer5.read(app).file().unwrap().is_deleted());
2521 });
2522
2523 // Update the remote worktree. Check that it becomes consistent with the
2524 // local worktree.
2525 remote.update(cx, |remote, cx| {
2526 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2527 &initial_snapshot,
2528 1,
2529 1,
2530 true,
2531 );
2532 remote.as_remote_mut().unwrap().update_from_remote(update);
2533 });
2534 deterministic.run_until_parked();
2535 remote.read_with(cx, |remote, _| {
2536 assert_eq!(
2537 remote
2538 .paths()
2539 .map(|p| p.to_str().unwrap())
2540 .collect::<Vec<_>>(),
2541 expected_paths
2542 );
2543 });
2544}
2545
2546#[gpui::test(iterations = 10)]
2547async fn test_buffer_identity_across_renames(
2548 deterministic: Arc<Deterministic>,
2549 cx: &mut gpui::TestAppContext,
2550) {
2551 init_test(cx);
2552
2553 let fs = FakeFs::new(cx.background());
2554 fs.insert_tree(
2555 "/dir",
2556 json!({
2557 "a": {
2558 "file1": "",
2559 }
2560 }),
2561 )
2562 .await;
2563
2564 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2565 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2566 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2567
2568 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2569 project.read_with(cx, |project, cx| {
2570 let tree = project.worktrees(cx).next().unwrap();
2571 tree.read(cx)
2572 .entry_for_path(path)
2573 .unwrap_or_else(|| panic!("no entry for path {}", path))
2574 .id
2575 })
2576 };
2577
2578 let dir_id = id_for_path("a", cx);
2579 let file_id = id_for_path("a/file1", cx);
2580 let buffer = project
2581 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2582 .await
2583 .unwrap();
2584 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2585
2586 project
2587 .update(cx, |project, cx| {
2588 project.rename_entry(dir_id, Path::new("b"), cx)
2589 })
2590 .unwrap()
2591 .await
2592 .unwrap();
2593 deterministic.run_until_parked();
2594 assert_eq!(id_for_path("b", cx), dir_id);
2595 assert_eq!(id_for_path("b/file1", cx), file_id);
2596 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2597}
2598
2599#[gpui::test]
2600async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2601 init_test(cx);
2602
2603 let fs = FakeFs::new(cx.background());
2604 fs.insert_tree(
2605 "/dir",
2606 json!({
2607 "a.txt": "a-contents",
2608 "b.txt": "b-contents",
2609 }),
2610 )
2611 .await;
2612
2613 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2614
2615 // Spawn multiple tasks to open paths, repeating some paths.
2616 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2617 (
2618 p.open_local_buffer("/dir/a.txt", cx),
2619 p.open_local_buffer("/dir/b.txt", cx),
2620 p.open_local_buffer("/dir/a.txt", cx),
2621 )
2622 });
2623
2624 let buffer_a_1 = buffer_a_1.await.unwrap();
2625 let buffer_a_2 = buffer_a_2.await.unwrap();
2626 let buffer_b = buffer_b.await.unwrap();
2627 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2628 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2629
2630 // There is only one buffer per path.
2631 let buffer_a_id = buffer_a_1.id();
2632 assert_eq!(buffer_a_2.id(), buffer_a_id);
2633
2634 // Open the same path again while it is still open.
2635 drop(buffer_a_1);
2636 let buffer_a_3 = project
2637 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2638 .await
2639 .unwrap();
2640
2641 // There's still only one buffer per path.
2642 assert_eq!(buffer_a_3.id(), buffer_a_id);
2643}
2644
2645#[gpui::test]
2646async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2647 init_test(cx);
2648
2649 let fs = FakeFs::new(cx.background());
2650 fs.insert_tree(
2651 "/dir",
2652 json!({
2653 "file1": "abc",
2654 "file2": "def",
2655 "file3": "ghi",
2656 }),
2657 )
2658 .await;
2659
2660 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2661
2662 let buffer1 = project
2663 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2664 .await
2665 .unwrap();
2666 let events = Rc::new(RefCell::new(Vec::new()));
2667
2668 // initially, the buffer isn't dirty.
2669 buffer1.update(cx, |buffer, cx| {
2670 cx.subscribe(&buffer1, {
2671 let events = events.clone();
2672 move |_, _, event, _| match event {
2673 BufferEvent::Operation(_) => {}
2674 _ => events.borrow_mut().push(event.clone()),
2675 }
2676 })
2677 .detach();
2678
2679 assert!(!buffer.is_dirty());
2680 assert!(events.borrow().is_empty());
2681
2682 buffer.edit([(1..2, "")], None, cx);
2683 });
2684
2685 // after the first edit, the buffer is dirty, and emits a dirtied event.
2686 buffer1.update(cx, |buffer, cx| {
2687 assert!(buffer.text() == "ac");
2688 assert!(buffer.is_dirty());
2689 assert_eq!(
2690 *events.borrow(),
2691 &[language::Event::Edited, language::Event::DirtyChanged]
2692 );
2693 events.borrow_mut().clear();
2694 buffer.did_save(
2695 buffer.version(),
2696 buffer.as_rope().fingerprint(),
2697 buffer.file().unwrap().mtime(),
2698 cx,
2699 );
2700 });
2701
2702 // after saving, the buffer is not dirty, and emits a saved event.
2703 buffer1.update(cx, |buffer, cx| {
2704 assert!(!buffer.is_dirty());
2705 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2706 events.borrow_mut().clear();
2707
2708 buffer.edit([(1..1, "B")], None, cx);
2709 buffer.edit([(2..2, "D")], None, cx);
2710 });
2711
2712 // after editing again, the buffer is dirty, and emits another dirty event.
2713 buffer1.update(cx, |buffer, cx| {
2714 assert!(buffer.text() == "aBDc");
2715 assert!(buffer.is_dirty());
2716 assert_eq!(
2717 *events.borrow(),
2718 &[
2719 language::Event::Edited,
2720 language::Event::DirtyChanged,
2721 language::Event::Edited,
2722 ],
2723 );
2724 events.borrow_mut().clear();
2725
2726 // After restoring the buffer to its previously-saved state,
2727 // the buffer is not considered dirty anymore.
2728 buffer.edit([(1..3, "")], None, cx);
2729 assert!(buffer.text() == "ac");
2730 assert!(!buffer.is_dirty());
2731 });
2732
2733 assert_eq!(
2734 *events.borrow(),
2735 &[language::Event::Edited, language::Event::DirtyChanged]
2736 );
2737
2738 // When a file is deleted, the buffer is considered dirty.
2739 let events = Rc::new(RefCell::new(Vec::new()));
2740 let buffer2 = project
2741 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2742 .await
2743 .unwrap();
2744 buffer2.update(cx, |_, cx| {
2745 cx.subscribe(&buffer2, {
2746 let events = events.clone();
2747 move |_, _, event, _| events.borrow_mut().push(event.clone())
2748 })
2749 .detach();
2750 });
2751
2752 fs.remove_file("/dir/file2".as_ref(), Default::default())
2753 .await
2754 .unwrap();
2755 cx.foreground().run_until_parked();
2756 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2757 assert_eq!(
2758 *events.borrow(),
2759 &[
2760 language::Event::DirtyChanged,
2761 language::Event::FileHandleChanged
2762 ]
2763 );
2764
2765 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2766 let events = Rc::new(RefCell::new(Vec::new()));
2767 let buffer3 = project
2768 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2769 .await
2770 .unwrap();
2771 buffer3.update(cx, |_, cx| {
2772 cx.subscribe(&buffer3, {
2773 let events = events.clone();
2774 move |_, _, event, _| events.borrow_mut().push(event.clone())
2775 })
2776 .detach();
2777 });
2778
2779 buffer3.update(cx, |buffer, cx| {
2780 buffer.edit([(0..0, "x")], None, cx);
2781 });
2782 events.borrow_mut().clear();
2783 fs.remove_file("/dir/file3".as_ref(), Default::default())
2784 .await
2785 .unwrap();
2786 cx.foreground().run_until_parked();
2787 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2788 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2789}
2790
2791#[gpui::test]
2792async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2793 init_test(cx);
2794
2795 let initial_contents = "aaa\nbbbbb\nc\n";
2796 let fs = FakeFs::new(cx.background());
2797 fs.insert_tree(
2798 "/dir",
2799 json!({
2800 "the-file": initial_contents,
2801 }),
2802 )
2803 .await;
2804 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2805 let buffer = project
2806 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2807 .await
2808 .unwrap();
2809
2810 let anchors = (0..3)
2811 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2812 .collect::<Vec<_>>();
2813
2814 // Change the file on disk, adding two new lines of text, and removing
2815 // one line.
2816 buffer.read_with(cx, |buffer, _| {
2817 assert!(!buffer.is_dirty());
2818 assert!(!buffer.has_conflict());
2819 });
2820 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2821 fs.save(
2822 "/dir/the-file".as_ref(),
2823 &new_contents.into(),
2824 LineEnding::Unix,
2825 )
2826 .await
2827 .unwrap();
2828
2829 // Because the buffer was not modified, it is reloaded from disk. Its
2830 // contents are edited according to the diff between the old and new
2831 // file contents.
2832 cx.foreground().run_until_parked();
2833 buffer.update(cx, |buffer, _| {
2834 assert_eq!(buffer.text(), new_contents);
2835 assert!(!buffer.is_dirty());
2836 assert!(!buffer.has_conflict());
2837
2838 let anchor_positions = anchors
2839 .iter()
2840 .map(|anchor| anchor.to_point(&*buffer))
2841 .collect::<Vec<_>>();
2842 assert_eq!(
2843 anchor_positions,
2844 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2845 );
2846 });
2847
2848 // Modify the buffer
2849 buffer.update(cx, |buffer, cx| {
2850 buffer.edit([(0..0, " ")], None, cx);
2851 assert!(buffer.is_dirty());
2852 assert!(!buffer.has_conflict());
2853 });
2854
2855 // Change the file on disk again, adding blank lines to the beginning.
2856 fs.save(
2857 "/dir/the-file".as_ref(),
2858 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2859 LineEnding::Unix,
2860 )
2861 .await
2862 .unwrap();
2863
2864 // Because the buffer is modified, it doesn't reload from disk, but is
2865 // marked as having a conflict.
2866 cx.foreground().run_until_parked();
2867 buffer.read_with(cx, |buffer, _| {
2868 assert!(buffer.has_conflict());
2869 });
2870}
2871
2872#[gpui::test]
2873async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2874 init_test(cx);
2875
2876 let fs = FakeFs::new(cx.background());
2877 fs.insert_tree(
2878 "/dir",
2879 json!({
2880 "file1": "a\nb\nc\n",
2881 "file2": "one\r\ntwo\r\nthree\r\n",
2882 }),
2883 )
2884 .await;
2885
2886 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2887 let buffer1 = project
2888 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2889 .await
2890 .unwrap();
2891 let buffer2 = project
2892 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2893 .await
2894 .unwrap();
2895
2896 buffer1.read_with(cx, |buffer, _| {
2897 assert_eq!(buffer.text(), "a\nb\nc\n");
2898 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2899 });
2900 buffer2.read_with(cx, |buffer, _| {
2901 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2902 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2903 });
2904
2905 // Change a file's line endings on disk from unix to windows. The buffer's
2906 // state updates correctly.
2907 fs.save(
2908 "/dir/file1".as_ref(),
2909 &"aaa\nb\nc\n".into(),
2910 LineEnding::Windows,
2911 )
2912 .await
2913 .unwrap();
2914 cx.foreground().run_until_parked();
2915 buffer1.read_with(cx, |buffer, _| {
2916 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2917 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2918 });
2919
2920 // Save a file with windows line endings. The file is written correctly.
2921 buffer2.update(cx, |buffer, cx| {
2922 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2923 });
2924 project
2925 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
2926 .await
2927 .unwrap();
2928 assert_eq!(
2929 fs.load("/dir/file2".as_ref()).await.unwrap(),
2930 "one\r\ntwo\r\nthree\r\nfour\r\n",
2931 );
2932}
2933
2934#[gpui::test]
2935async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2936 init_test(cx);
2937
2938 let fs = FakeFs::new(cx.background());
2939 fs.insert_tree(
2940 "/the-dir",
2941 json!({
2942 "a.rs": "
2943 fn foo(mut v: Vec<usize>) {
2944 for x in &v {
2945 v.push(1);
2946 }
2947 }
2948 "
2949 .unindent(),
2950 }),
2951 )
2952 .await;
2953
2954 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2955 let buffer = project
2956 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2957 .await
2958 .unwrap();
2959
2960 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2961 let message = lsp::PublishDiagnosticsParams {
2962 uri: buffer_uri.clone(),
2963 diagnostics: vec![
2964 lsp::Diagnostic {
2965 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2966 severity: Some(DiagnosticSeverity::WARNING),
2967 message: "error 1".to_string(),
2968 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2969 location: lsp::Location {
2970 uri: buffer_uri.clone(),
2971 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2972 },
2973 message: "error 1 hint 1".to_string(),
2974 }]),
2975 ..Default::default()
2976 },
2977 lsp::Diagnostic {
2978 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2979 severity: Some(DiagnosticSeverity::HINT),
2980 message: "error 1 hint 1".to_string(),
2981 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2982 location: lsp::Location {
2983 uri: buffer_uri.clone(),
2984 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2985 },
2986 message: "original diagnostic".to_string(),
2987 }]),
2988 ..Default::default()
2989 },
2990 lsp::Diagnostic {
2991 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2992 severity: Some(DiagnosticSeverity::ERROR),
2993 message: "error 2".to_string(),
2994 related_information: Some(vec![
2995 lsp::DiagnosticRelatedInformation {
2996 location: lsp::Location {
2997 uri: buffer_uri.clone(),
2998 range: lsp::Range::new(
2999 lsp::Position::new(1, 13),
3000 lsp::Position::new(1, 15),
3001 ),
3002 },
3003 message: "error 2 hint 1".to_string(),
3004 },
3005 lsp::DiagnosticRelatedInformation {
3006 location: lsp::Location {
3007 uri: buffer_uri.clone(),
3008 range: lsp::Range::new(
3009 lsp::Position::new(1, 13),
3010 lsp::Position::new(1, 15),
3011 ),
3012 },
3013 message: "error 2 hint 2".to_string(),
3014 },
3015 ]),
3016 ..Default::default()
3017 },
3018 lsp::Diagnostic {
3019 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3020 severity: Some(DiagnosticSeverity::HINT),
3021 message: "error 2 hint 1".to_string(),
3022 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3023 location: lsp::Location {
3024 uri: buffer_uri.clone(),
3025 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3026 },
3027 message: "original diagnostic".to_string(),
3028 }]),
3029 ..Default::default()
3030 },
3031 lsp::Diagnostic {
3032 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3033 severity: Some(DiagnosticSeverity::HINT),
3034 message: "error 2 hint 2".to_string(),
3035 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3036 location: lsp::Location {
3037 uri: buffer_uri,
3038 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3039 },
3040 message: "original diagnostic".to_string(),
3041 }]),
3042 ..Default::default()
3043 },
3044 ],
3045 version: None,
3046 };
3047
3048 project
3049 .update(cx, |p, cx| {
3050 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3051 })
3052 .unwrap();
3053 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3054
3055 assert_eq!(
3056 buffer
3057 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3058 .collect::<Vec<_>>(),
3059 &[
3060 DiagnosticEntry {
3061 range: Point::new(1, 8)..Point::new(1, 9),
3062 diagnostic: Diagnostic {
3063 severity: DiagnosticSeverity::WARNING,
3064 message: "error 1".to_string(),
3065 group_id: 1,
3066 is_primary: true,
3067 ..Default::default()
3068 }
3069 },
3070 DiagnosticEntry {
3071 range: Point::new(1, 8)..Point::new(1, 9),
3072 diagnostic: Diagnostic {
3073 severity: DiagnosticSeverity::HINT,
3074 message: "error 1 hint 1".to_string(),
3075 group_id: 1,
3076 is_primary: false,
3077 ..Default::default()
3078 }
3079 },
3080 DiagnosticEntry {
3081 range: Point::new(1, 13)..Point::new(1, 15),
3082 diagnostic: Diagnostic {
3083 severity: DiagnosticSeverity::HINT,
3084 message: "error 2 hint 1".to_string(),
3085 group_id: 0,
3086 is_primary: false,
3087 ..Default::default()
3088 }
3089 },
3090 DiagnosticEntry {
3091 range: Point::new(1, 13)..Point::new(1, 15),
3092 diagnostic: Diagnostic {
3093 severity: DiagnosticSeverity::HINT,
3094 message: "error 2 hint 2".to_string(),
3095 group_id: 0,
3096 is_primary: false,
3097 ..Default::default()
3098 }
3099 },
3100 DiagnosticEntry {
3101 range: Point::new(2, 8)..Point::new(2, 17),
3102 diagnostic: Diagnostic {
3103 severity: DiagnosticSeverity::ERROR,
3104 message: "error 2".to_string(),
3105 group_id: 0,
3106 is_primary: true,
3107 ..Default::default()
3108 }
3109 }
3110 ]
3111 );
3112
3113 assert_eq!(
3114 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3115 &[
3116 DiagnosticEntry {
3117 range: Point::new(1, 13)..Point::new(1, 15),
3118 diagnostic: Diagnostic {
3119 severity: DiagnosticSeverity::HINT,
3120 message: "error 2 hint 1".to_string(),
3121 group_id: 0,
3122 is_primary: false,
3123 ..Default::default()
3124 }
3125 },
3126 DiagnosticEntry {
3127 range: Point::new(1, 13)..Point::new(1, 15),
3128 diagnostic: Diagnostic {
3129 severity: DiagnosticSeverity::HINT,
3130 message: "error 2 hint 2".to_string(),
3131 group_id: 0,
3132 is_primary: false,
3133 ..Default::default()
3134 }
3135 },
3136 DiagnosticEntry {
3137 range: Point::new(2, 8)..Point::new(2, 17),
3138 diagnostic: Diagnostic {
3139 severity: DiagnosticSeverity::ERROR,
3140 message: "error 2".to_string(),
3141 group_id: 0,
3142 is_primary: true,
3143 ..Default::default()
3144 }
3145 }
3146 ]
3147 );
3148
3149 assert_eq!(
3150 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3151 &[
3152 DiagnosticEntry {
3153 range: Point::new(1, 8)..Point::new(1, 9),
3154 diagnostic: Diagnostic {
3155 severity: DiagnosticSeverity::WARNING,
3156 message: "error 1".to_string(),
3157 group_id: 1,
3158 is_primary: true,
3159 ..Default::default()
3160 }
3161 },
3162 DiagnosticEntry {
3163 range: Point::new(1, 8)..Point::new(1, 9),
3164 diagnostic: Diagnostic {
3165 severity: DiagnosticSeverity::HINT,
3166 message: "error 1 hint 1".to_string(),
3167 group_id: 1,
3168 is_primary: false,
3169 ..Default::default()
3170 }
3171 },
3172 ]
3173 );
3174}
3175
3176#[gpui::test]
3177async fn test_rename(cx: &mut gpui::TestAppContext) {
3178 init_test(cx);
3179
3180 let mut language = Language::new(
3181 LanguageConfig {
3182 name: "Rust".into(),
3183 path_suffixes: vec!["rs".to_string()],
3184 ..Default::default()
3185 },
3186 Some(tree_sitter_rust::language()),
3187 );
3188 let mut fake_servers = language
3189 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3190 capabilities: lsp::ServerCapabilities {
3191 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3192 prepare_provider: Some(true),
3193 work_done_progress_options: Default::default(),
3194 })),
3195 ..Default::default()
3196 },
3197 ..Default::default()
3198 }))
3199 .await;
3200
3201 let fs = FakeFs::new(cx.background());
3202 fs.insert_tree(
3203 "/dir",
3204 json!({
3205 "one.rs": "const ONE: usize = 1;",
3206 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3207 }),
3208 )
3209 .await;
3210
3211 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3212 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3213 let buffer = project
3214 .update(cx, |project, cx| {
3215 project.open_local_buffer("/dir/one.rs", cx)
3216 })
3217 .await
3218 .unwrap();
3219
3220 let fake_server = fake_servers.next().await.unwrap();
3221
3222 let response = project.update(cx, |project, cx| {
3223 project.prepare_rename(buffer.clone(), 7, cx)
3224 });
3225 fake_server
3226 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3227 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3228 assert_eq!(params.position, lsp::Position::new(0, 7));
3229 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3230 lsp::Position::new(0, 6),
3231 lsp::Position::new(0, 9),
3232 ))))
3233 })
3234 .next()
3235 .await
3236 .unwrap();
3237 let range = response.await.unwrap().unwrap();
3238 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3239 assert_eq!(range, 6..9);
3240
3241 let response = project.update(cx, |project, cx| {
3242 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3243 });
3244 fake_server
3245 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3246 assert_eq!(
3247 params.text_document_position.text_document.uri.as_str(),
3248 "file:///dir/one.rs"
3249 );
3250 assert_eq!(
3251 params.text_document_position.position,
3252 lsp::Position::new(0, 7)
3253 );
3254 assert_eq!(params.new_name, "THREE");
3255 Ok(Some(lsp::WorkspaceEdit {
3256 changes: Some(
3257 [
3258 (
3259 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3260 vec![lsp::TextEdit::new(
3261 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3262 "THREE".to_string(),
3263 )],
3264 ),
3265 (
3266 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3267 vec![
3268 lsp::TextEdit::new(
3269 lsp::Range::new(
3270 lsp::Position::new(0, 24),
3271 lsp::Position::new(0, 27),
3272 ),
3273 "THREE".to_string(),
3274 ),
3275 lsp::TextEdit::new(
3276 lsp::Range::new(
3277 lsp::Position::new(0, 35),
3278 lsp::Position::new(0, 38),
3279 ),
3280 "THREE".to_string(),
3281 ),
3282 ],
3283 ),
3284 ]
3285 .into_iter()
3286 .collect(),
3287 ),
3288 ..Default::default()
3289 }))
3290 })
3291 .next()
3292 .await
3293 .unwrap();
3294 let mut transaction = response.await.unwrap().0;
3295 assert_eq!(transaction.len(), 2);
3296 assert_eq!(
3297 transaction
3298 .remove_entry(&buffer)
3299 .unwrap()
3300 .0
3301 .read_with(cx, |buffer, _| buffer.text()),
3302 "const THREE: usize = 1;"
3303 );
3304 assert_eq!(
3305 transaction
3306 .into_keys()
3307 .next()
3308 .unwrap()
3309 .read_with(cx, |buffer, _| buffer.text()),
3310 "const TWO: usize = one::THREE + one::THREE;"
3311 );
3312}
3313
3314#[gpui::test]
3315async fn test_search(cx: &mut gpui::TestAppContext) {
3316 init_test(cx);
3317
3318 let fs = FakeFs::new(cx.background());
3319 fs.insert_tree(
3320 "/dir",
3321 json!({
3322 "one.rs": "const ONE: usize = 1;",
3323 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3324 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3325 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3326 }),
3327 )
3328 .await;
3329 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3330 assert_eq!(
3331 search(
3332 &project,
3333 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3334 cx
3335 )
3336 .await
3337 .unwrap(),
3338 HashMap::from_iter([
3339 ("two.rs".to_string(), vec![6..9]),
3340 ("three.rs".to_string(), vec![37..40])
3341 ])
3342 );
3343
3344 let buffer_4 = project
3345 .update(cx, |project, cx| {
3346 project.open_local_buffer("/dir/four.rs", cx)
3347 })
3348 .await
3349 .unwrap();
3350 buffer_4.update(cx, |buffer, cx| {
3351 let text = "two::TWO";
3352 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3353 });
3354
3355 assert_eq!(
3356 search(
3357 &project,
3358 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3359 cx
3360 )
3361 .await
3362 .unwrap(),
3363 HashMap::from_iter([
3364 ("two.rs".to_string(), vec![6..9]),
3365 ("three.rs".to_string(), vec![37..40]),
3366 ("four.rs".to_string(), vec![25..28, 36..39])
3367 ])
3368 );
3369}
3370
3371#[gpui::test]
3372async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3373 init_test(cx);
3374
3375 let search_query = "file";
3376
3377 let fs = FakeFs::new(cx.background());
3378 fs.insert_tree(
3379 "/dir",
3380 json!({
3381 "one.rs": r#"// Rust file one"#,
3382 "one.ts": r#"// TypeScript file one"#,
3383 "two.rs": r#"// Rust file two"#,
3384 "two.ts": r#"// TypeScript file two"#,
3385 }),
3386 )
3387 .await;
3388 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3389
3390 assert!(
3391 search(
3392 &project,
3393 SearchQuery::text(
3394 search_query,
3395 false,
3396 true,
3397 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3398 Vec::new()
3399 ),
3400 cx
3401 )
3402 .await
3403 .unwrap()
3404 .is_empty(),
3405 "If no inclusions match, no files should be returned"
3406 );
3407
3408 assert_eq!(
3409 search(
3410 &project,
3411 SearchQuery::text(
3412 search_query,
3413 false,
3414 true,
3415 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3416 Vec::new()
3417 ),
3418 cx
3419 )
3420 .await
3421 .unwrap(),
3422 HashMap::from_iter([
3423 ("one.rs".to_string(), vec![8..12]),
3424 ("two.rs".to_string(), vec![8..12]),
3425 ]),
3426 "Rust only search should give only Rust files"
3427 );
3428
3429 assert_eq!(
3430 search(
3431 &project,
3432 SearchQuery::text(
3433 search_query,
3434 false,
3435 true,
3436 vec![
3437 Glob::new("*.ts").unwrap().compile_matcher(),
3438 Glob::new("*.odd").unwrap().compile_matcher(),
3439 ],
3440 Vec::new()
3441 ),
3442 cx
3443 )
3444 .await
3445 .unwrap(),
3446 HashMap::from_iter([
3447 ("one.ts".to_string(), vec![14..18]),
3448 ("two.ts".to_string(), vec![14..18]),
3449 ]),
3450 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3451 );
3452
3453 assert_eq!(
3454 search(
3455 &project,
3456 SearchQuery::text(
3457 search_query,
3458 false,
3459 true,
3460 vec![
3461 Glob::new("*.rs").unwrap().compile_matcher(),
3462 Glob::new("*.ts").unwrap().compile_matcher(),
3463 Glob::new("*.odd").unwrap().compile_matcher(),
3464 ],
3465 Vec::new()
3466 ),
3467 cx
3468 )
3469 .await
3470 .unwrap(),
3471 HashMap::from_iter([
3472 ("one.rs".to_string(), vec![8..12]),
3473 ("one.ts".to_string(), vec![14..18]),
3474 ("two.rs".to_string(), vec![8..12]),
3475 ("two.ts".to_string(), vec![14..18]),
3476 ]),
3477 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3478 );
3479}
3480
3481#[gpui::test]
3482async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3483 init_test(cx);
3484
3485 let search_query = "file";
3486
3487 let fs = FakeFs::new(cx.background());
3488 fs.insert_tree(
3489 "/dir",
3490 json!({
3491 "one.rs": r#"// Rust file one"#,
3492 "one.ts": r#"// TypeScript file one"#,
3493 "two.rs": r#"// Rust file two"#,
3494 "two.ts": r#"// TypeScript file two"#,
3495 }),
3496 )
3497 .await;
3498 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3499
3500 assert_eq!(
3501 search(
3502 &project,
3503 SearchQuery::text(
3504 search_query,
3505 false,
3506 true,
3507 Vec::new(),
3508 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3509 ),
3510 cx
3511 )
3512 .await
3513 .unwrap(),
3514 HashMap::from_iter([
3515 ("one.rs".to_string(), vec![8..12]),
3516 ("one.ts".to_string(), vec![14..18]),
3517 ("two.rs".to_string(), vec![8..12]),
3518 ("two.ts".to_string(), vec![14..18]),
3519 ]),
3520 "If no exclusions match, all files should be returned"
3521 );
3522
3523 assert_eq!(
3524 search(
3525 &project,
3526 SearchQuery::text(
3527 search_query,
3528 false,
3529 true,
3530 Vec::new(),
3531 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3532 ),
3533 cx
3534 )
3535 .await
3536 .unwrap(),
3537 HashMap::from_iter([
3538 ("one.ts".to_string(), vec![14..18]),
3539 ("two.ts".to_string(), vec![14..18]),
3540 ]),
3541 "Rust exclusion search should give only TypeScript files"
3542 );
3543
3544 assert_eq!(
3545 search(
3546 &project,
3547 SearchQuery::text(
3548 search_query,
3549 false,
3550 true,
3551 Vec::new(),
3552 vec![
3553 Glob::new("*.ts").unwrap().compile_matcher(),
3554 Glob::new("*.odd").unwrap().compile_matcher(),
3555 ],
3556 ),
3557 cx
3558 )
3559 .await
3560 .unwrap(),
3561 HashMap::from_iter([
3562 ("one.rs".to_string(), vec![8..12]),
3563 ("two.rs".to_string(), vec![8..12]),
3564 ]),
3565 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3566 );
3567
3568 assert!(
3569 search(
3570 &project,
3571 SearchQuery::text(
3572 search_query,
3573 false,
3574 true,
3575 Vec::new(),
3576 vec![
3577 Glob::new("*.rs").unwrap().compile_matcher(),
3578 Glob::new("*.ts").unwrap().compile_matcher(),
3579 Glob::new("*.odd").unwrap().compile_matcher(),
3580 ],
3581 ),
3582 cx
3583 )
3584 .await
3585 .unwrap().is_empty(),
3586 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3587 );
3588}
3589
3590#[gpui::test]
3591async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3592 init_test(cx);
3593
3594 let search_query = "file";
3595
3596 let fs = FakeFs::new(cx.background());
3597 fs.insert_tree(
3598 "/dir",
3599 json!({
3600 "one.rs": r#"// Rust file one"#,
3601 "one.ts": r#"// TypeScript file one"#,
3602 "two.rs": r#"// Rust file two"#,
3603 "two.ts": r#"// TypeScript file two"#,
3604 }),
3605 )
3606 .await;
3607 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3608
3609 assert!(
3610 search(
3611 &project,
3612 SearchQuery::text(
3613 search_query,
3614 false,
3615 true,
3616 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3617 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3618 ),
3619 cx
3620 )
3621 .await
3622 .unwrap()
3623 .is_empty(),
3624 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3625 );
3626
3627 assert!(
3628 search(
3629 &project,
3630 SearchQuery::text(
3631 search_query,
3632 false,
3633 true,
3634 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3635 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3636 ),
3637 cx
3638 )
3639 .await
3640 .unwrap()
3641 .is_empty(),
3642 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3643 );
3644
3645 assert!(
3646 search(
3647 &project,
3648 SearchQuery::text(
3649 search_query,
3650 false,
3651 true,
3652 vec![
3653 Glob::new("*.ts").unwrap().compile_matcher(),
3654 Glob::new("*.odd").unwrap().compile_matcher()
3655 ],
3656 vec![
3657 Glob::new("*.ts").unwrap().compile_matcher(),
3658 Glob::new("*.odd").unwrap().compile_matcher()
3659 ],
3660 ),
3661 cx
3662 )
3663 .await
3664 .unwrap()
3665 .is_empty(),
3666 "Non-matching inclusions and exclusions should not change that."
3667 );
3668
3669 assert_eq!(
3670 search(
3671 &project,
3672 SearchQuery::text(
3673 search_query,
3674 false,
3675 true,
3676 vec![
3677 Glob::new("*.ts").unwrap().compile_matcher(),
3678 Glob::new("*.odd").unwrap().compile_matcher()
3679 ],
3680 vec![
3681 Glob::new("*.rs").unwrap().compile_matcher(),
3682 Glob::new("*.odd").unwrap().compile_matcher()
3683 ],
3684 ),
3685 cx
3686 )
3687 .await
3688 .unwrap(),
3689 HashMap::from_iter([
3690 ("one.ts".to_string(), vec![14..18]),
3691 ("two.ts".to_string(), vec![14..18]),
3692 ]),
3693 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3694 );
3695}
3696
3697async fn search(
3698 project: &ModelHandle<Project>,
3699 query: SearchQuery,
3700 cx: &mut gpui::TestAppContext,
3701) -> Result<HashMap<String, Vec<Range<usize>>>> {
3702 let results = project
3703 .update(cx, |project, cx| project.search(query, cx))
3704 .await?;
3705
3706 Ok(results
3707 .into_iter()
3708 .map(|(buffer, ranges)| {
3709 buffer.read_with(cx, |buffer, _| {
3710 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3711 let ranges = ranges
3712 .into_iter()
3713 .map(|range| range.to_offset(buffer))
3714 .collect::<Vec<_>>();
3715 (path, ranges)
3716 })
3717 })
3718 .collect())
3719}
3720
3721fn init_test(cx: &mut gpui::TestAppContext) {
3722 cx.foreground().forbid_parking();
3723
3724 cx.update(|cx| {
3725 cx.set_global(SettingsStore::test(cx));
3726 language::init(cx);
3727 Project::init_settings(cx);
3728 });
3729}