1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_project_specific_settings(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let fs = FakeFs::new(cx.background());
74 fs.insert_tree(
75 "/the-root",
76 json!({
77 ".zed": {
78 "settings.json": r#"{ "tab_size": 8 }"#
79 },
80 "a": {
81 "a.rs": "fn a() {\n A\n}"
82 },
83 "b": {
84 ".zed": {
85 "settings.json": r#"{ "tab_size": 2 }"#
86 },
87 "b.rs": "fn b() {\n B\n}"
88 }
89 }),
90 )
91 .await;
92
93 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
94 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
95
96 deterministic.run_until_parked();
97 cx.read(|cx| {
98 let tree = worktree.read(cx);
99
100 let settings_a = language_settings(
101 None,
102 Some(&File::for_entry(
103 tree.entry_for_path("a/a.rs").unwrap().clone(),
104 worktree.clone(),
105 )),
106 cx,
107 );
108 let settings_b = language_settings(
109 None,
110 Some(&File::for_entry(
111 tree.entry_for_path("b/b.rs").unwrap().clone(),
112 worktree.clone(),
113 )),
114 cx,
115 );
116
117 assert_eq!(settings_a.tab_size.get(), 8);
118 assert_eq!(settings_b.tab_size.get(), 2);
119 });
120}
121
122#[gpui::test]
123async fn test_managing_language_servers(
124 deterministic: Arc<Deterministic>,
125 cx: &mut gpui::TestAppContext,
126) {
127 init_test(cx);
128
129 let mut rust_language = Language::new(
130 LanguageConfig {
131 name: "Rust".into(),
132 path_suffixes: vec!["rs".to_string()],
133 ..Default::default()
134 },
135 Some(tree_sitter_rust::language()),
136 );
137 let mut json_language = Language::new(
138 LanguageConfig {
139 name: "JSON".into(),
140 path_suffixes: vec!["json".to_string()],
141 ..Default::default()
142 },
143 None,
144 );
145 let mut fake_rust_servers = rust_language
146 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
147 name: "the-rust-language-server",
148 capabilities: lsp::ServerCapabilities {
149 completion_provider: Some(lsp::CompletionOptions {
150 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
151 ..Default::default()
152 }),
153 ..Default::default()
154 },
155 ..Default::default()
156 }))
157 .await;
158 let mut fake_json_servers = json_language
159 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
160 name: "the-json-language-server",
161 capabilities: lsp::ServerCapabilities {
162 completion_provider: Some(lsp::CompletionOptions {
163 trigger_characters: Some(vec![":".to_string()]),
164 ..Default::default()
165 }),
166 ..Default::default()
167 },
168 ..Default::default()
169 }))
170 .await;
171
172 let fs = FakeFs::new(cx.background());
173 fs.insert_tree(
174 "/the-root",
175 json!({
176 "test.rs": "const A: i32 = 1;",
177 "test2.rs": "",
178 "Cargo.toml": "a = 1",
179 "package.json": "{\"a\": 1}",
180 }),
181 )
182 .await;
183
184 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
185
186 // Open a buffer without an associated language server.
187 let toml_buffer = project
188 .update(cx, |project, cx| {
189 project.open_local_buffer("/the-root/Cargo.toml", cx)
190 })
191 .await
192 .unwrap();
193
194 // Open a buffer with an associated language server before the language for it has been loaded.
195 let rust_buffer = project
196 .update(cx, |project, cx| {
197 project.open_local_buffer("/the-root/test.rs", cx)
198 })
199 .await
200 .unwrap();
201 rust_buffer.read_with(cx, |buffer, _| {
202 assert_eq!(buffer.language().map(|l| l.name()), None);
203 });
204
205 // Now we add the languages to the project, and ensure they get assigned to all
206 // the relevant open buffers.
207 project.update(cx, |project, _| {
208 project.languages.add(Arc::new(json_language));
209 project.languages.add(Arc::new(rust_language));
210 });
211 deterministic.run_until_parked();
212 rust_buffer.read_with(cx, |buffer, _| {
213 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
214 });
215
216 // A server is started up, and it is notified about Rust files.
217 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
218 assert_eq!(
219 fake_rust_server
220 .receive_notification::<lsp::notification::DidOpenTextDocument>()
221 .await
222 .text_document,
223 lsp::TextDocumentItem {
224 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
225 version: 0,
226 text: "const A: i32 = 1;".to_string(),
227 language_id: Default::default()
228 }
229 );
230
231 // The buffer is configured based on the language server's capabilities.
232 rust_buffer.read_with(cx, |buffer, _| {
233 assert_eq!(
234 buffer.completion_triggers(),
235 &[".".to_string(), "::".to_string()]
236 );
237 });
238 toml_buffer.read_with(cx, |buffer, _| {
239 assert!(buffer.completion_triggers().is_empty());
240 });
241
242 // Edit a buffer. The changes are reported to the language server.
243 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
244 assert_eq!(
245 fake_rust_server
246 .receive_notification::<lsp::notification::DidChangeTextDocument>()
247 .await
248 .text_document,
249 lsp::VersionedTextDocumentIdentifier::new(
250 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
251 1
252 )
253 );
254
255 // Open a third buffer with a different associated language server.
256 let json_buffer = project
257 .update(cx, |project, cx| {
258 project.open_local_buffer("/the-root/package.json", cx)
259 })
260 .await
261 .unwrap();
262
263 // A json language server is started up and is only notified about the json buffer.
264 let mut fake_json_server = fake_json_servers.next().await.unwrap();
265 assert_eq!(
266 fake_json_server
267 .receive_notification::<lsp::notification::DidOpenTextDocument>()
268 .await
269 .text_document,
270 lsp::TextDocumentItem {
271 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
272 version: 0,
273 text: "{\"a\": 1}".to_string(),
274 language_id: Default::default()
275 }
276 );
277
278 // This buffer is configured based on the second language server's
279 // capabilities.
280 json_buffer.read_with(cx, |buffer, _| {
281 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
282 });
283
284 // When opening another buffer whose language server is already running,
285 // it is also configured based on the existing language server's capabilities.
286 let rust_buffer2 = project
287 .update(cx, |project, cx| {
288 project.open_local_buffer("/the-root/test2.rs", cx)
289 })
290 .await
291 .unwrap();
292 rust_buffer2.read_with(cx, |buffer, _| {
293 assert_eq!(
294 buffer.completion_triggers(),
295 &[".".to_string(), "::".to_string()]
296 );
297 });
298
299 // Changes are reported only to servers matching the buffer's language.
300 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
301 rust_buffer2.update(cx, |buffer, cx| {
302 buffer.edit([(0..0, "let x = 1;")], None, cx)
303 });
304 assert_eq!(
305 fake_rust_server
306 .receive_notification::<lsp::notification::DidChangeTextDocument>()
307 .await
308 .text_document,
309 lsp::VersionedTextDocumentIdentifier::new(
310 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
311 1
312 )
313 );
314
315 // Save notifications are reported to all servers.
316 project
317 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
318 .await
319 .unwrap();
320 assert_eq!(
321 fake_rust_server
322 .receive_notification::<lsp::notification::DidSaveTextDocument>()
323 .await
324 .text_document,
325 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
326 );
327 assert_eq!(
328 fake_json_server
329 .receive_notification::<lsp::notification::DidSaveTextDocument>()
330 .await
331 .text_document,
332 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
333 );
334
335 // Renames are reported only to servers matching the buffer's language.
336 fs.rename(
337 Path::new("/the-root/test2.rs"),
338 Path::new("/the-root/test3.rs"),
339 Default::default(),
340 )
341 .await
342 .unwrap();
343 assert_eq!(
344 fake_rust_server
345 .receive_notification::<lsp::notification::DidCloseTextDocument>()
346 .await
347 .text_document,
348 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
349 );
350 assert_eq!(
351 fake_rust_server
352 .receive_notification::<lsp::notification::DidOpenTextDocument>()
353 .await
354 .text_document,
355 lsp::TextDocumentItem {
356 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
357 version: 0,
358 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
359 language_id: Default::default()
360 },
361 );
362
363 rust_buffer2.update(cx, |buffer, cx| {
364 buffer.update_diagnostics(
365 LanguageServerId(0),
366 DiagnosticSet::from_sorted_entries(
367 vec![DiagnosticEntry {
368 diagnostic: Default::default(),
369 range: Anchor::MIN..Anchor::MAX,
370 }],
371 &buffer.snapshot(),
372 ),
373 cx,
374 );
375 assert_eq!(
376 buffer
377 .snapshot()
378 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
379 .count(),
380 1
381 );
382 });
383
384 // When the rename changes the extension of the file, the buffer gets closed on the old
385 // language server and gets opened on the new one.
386 fs.rename(
387 Path::new("/the-root/test3.rs"),
388 Path::new("/the-root/test3.json"),
389 Default::default(),
390 )
391 .await
392 .unwrap();
393 assert_eq!(
394 fake_rust_server
395 .receive_notification::<lsp::notification::DidCloseTextDocument>()
396 .await
397 .text_document,
398 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
399 );
400 assert_eq!(
401 fake_json_server
402 .receive_notification::<lsp::notification::DidOpenTextDocument>()
403 .await
404 .text_document,
405 lsp::TextDocumentItem {
406 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
407 version: 0,
408 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
409 language_id: Default::default()
410 },
411 );
412
413 // We clear the diagnostics, since the language has changed.
414 rust_buffer2.read_with(cx, |buffer, _| {
415 assert_eq!(
416 buffer
417 .snapshot()
418 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
419 .count(),
420 0
421 );
422 });
423
424 // The renamed file's version resets after changing language server.
425 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
426 assert_eq!(
427 fake_json_server
428 .receive_notification::<lsp::notification::DidChangeTextDocument>()
429 .await
430 .text_document,
431 lsp::VersionedTextDocumentIdentifier::new(
432 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 1
434 )
435 );
436
437 // Restart language servers
438 project.update(cx, |project, cx| {
439 project.restart_language_servers_for_buffers(
440 vec![rust_buffer.clone(), json_buffer.clone()],
441 cx,
442 );
443 });
444
445 let mut rust_shutdown_requests = fake_rust_server
446 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
447 let mut json_shutdown_requests = fake_json_server
448 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
449 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
450
451 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
452 let mut fake_json_server = fake_json_servers.next().await.unwrap();
453
454 // Ensure rust document is reopened in new rust language server
455 assert_eq!(
456 fake_rust_server
457 .receive_notification::<lsp::notification::DidOpenTextDocument>()
458 .await
459 .text_document,
460 lsp::TextDocumentItem {
461 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
462 version: 0,
463 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
464 language_id: Default::default()
465 }
466 );
467
468 // Ensure json documents are reopened in new json language server
469 assert_set_eq!(
470 [
471 fake_json_server
472 .receive_notification::<lsp::notification::DidOpenTextDocument>()
473 .await
474 .text_document,
475 fake_json_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 ],
480 [
481 lsp::TextDocumentItem {
482 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
483 version: 0,
484 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
485 language_id: Default::default()
486 },
487 lsp::TextDocumentItem {
488 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
489 version: 0,
490 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
491 language_id: Default::default()
492 }
493 ]
494 );
495
496 // Close notifications are reported only to servers matching the buffer's language.
497 cx.update(|_| drop(json_buffer));
498 let close_message = lsp::DidCloseTextDocumentParams {
499 text_document: lsp::TextDocumentIdentifier::new(
500 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
501 ),
502 };
503 assert_eq!(
504 fake_json_server
505 .receive_notification::<lsp::notification::DidCloseTextDocument>()
506 .await,
507 close_message,
508 );
509}
510
511#[gpui::test]
512async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
513 init_test(cx);
514
515 let mut language = Language::new(
516 LanguageConfig {
517 name: "Rust".into(),
518 path_suffixes: vec!["rs".to_string()],
519 ..Default::default()
520 },
521 Some(tree_sitter_rust::language()),
522 );
523 let mut fake_servers = language
524 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
525 name: "the-language-server",
526 ..Default::default()
527 }))
528 .await;
529
530 let fs = FakeFs::new(cx.background());
531 fs.insert_tree(
532 "/the-root",
533 json!({
534 "a.rs": "",
535 "b.rs": "",
536 }),
537 )
538 .await;
539
540 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
541 project.update(cx, |project, _| {
542 project.languages.add(Arc::new(language));
543 });
544 cx.foreground().run_until_parked();
545
546 // Start the language server by opening a buffer with a compatible file extension.
547 let _buffer = project
548 .update(cx, |project, cx| {
549 project.open_local_buffer("/the-root/a.rs", cx)
550 })
551 .await
552 .unwrap();
553
554 // Keep track of the FS events reported to the language server.
555 let fake_server = fake_servers.next().await.unwrap();
556 let file_changes = Arc::new(Mutex::new(Vec::new()));
557 fake_server
558 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
559 registrations: vec![lsp::Registration {
560 id: Default::default(),
561 method: "workspace/didChangeWatchedFiles".to_string(),
562 register_options: serde_json::to_value(
563 lsp::DidChangeWatchedFilesRegistrationOptions {
564 watchers: vec![lsp::FileSystemWatcher {
565 glob_pattern: "/the-root/*.{rs,c}".to_string(),
566 kind: None,
567 }],
568 },
569 )
570 .ok(),
571 }],
572 })
573 .await
574 .unwrap();
575 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
576 let file_changes = file_changes.clone();
577 move |params, _| {
578 let mut file_changes = file_changes.lock();
579 file_changes.extend(params.changes);
580 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
581 }
582 });
583
584 cx.foreground().run_until_parked();
585 assert_eq!(file_changes.lock().len(), 0);
586
587 // Perform some file system mutations, two of which match the watched patterns,
588 // and one of which does not.
589 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
590 .await
591 .unwrap();
592 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
593 .await
594 .unwrap();
595 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
596 .await
597 .unwrap();
598
599 // The language server receives events for the FS mutations that match its watch patterns.
600 cx.foreground().run_until_parked();
601 assert_eq!(
602 &*file_changes.lock(),
603 &[
604 lsp::FileEvent {
605 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
606 typ: lsp::FileChangeType::DELETED,
607 },
608 lsp::FileEvent {
609 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
610 typ: lsp::FileChangeType::CREATED,
611 },
612 ]
613 );
614}
615
616#[gpui::test]
617async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
618 init_test(cx);
619
620 let fs = FakeFs::new(cx.background());
621 fs.insert_tree(
622 "/dir",
623 json!({
624 "a.rs": "let a = 1;",
625 "b.rs": "let b = 2;"
626 }),
627 )
628 .await;
629
630 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
631
632 let buffer_a = project
633 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
634 .await
635 .unwrap();
636 let buffer_b = project
637 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
638 .await
639 .unwrap();
640
641 project.update(cx, |project, cx| {
642 project
643 .update_diagnostics(
644 LanguageServerId(0),
645 lsp::PublishDiagnosticsParams {
646 uri: Url::from_file_path("/dir/a.rs").unwrap(),
647 version: None,
648 diagnostics: vec![lsp::Diagnostic {
649 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
650 severity: Some(lsp::DiagnosticSeverity::ERROR),
651 message: "error 1".to_string(),
652 ..Default::default()
653 }],
654 },
655 &[],
656 cx,
657 )
658 .unwrap();
659 project
660 .update_diagnostics(
661 LanguageServerId(0),
662 lsp::PublishDiagnosticsParams {
663 uri: Url::from_file_path("/dir/b.rs").unwrap(),
664 version: None,
665 diagnostics: vec![lsp::Diagnostic {
666 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
667 severity: Some(lsp::DiagnosticSeverity::WARNING),
668 message: "error 2".to_string(),
669 ..Default::default()
670 }],
671 },
672 &[],
673 cx,
674 )
675 .unwrap();
676 });
677
678 buffer_a.read_with(cx, |buffer, _| {
679 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
680 assert_eq!(
681 chunks
682 .iter()
683 .map(|(s, d)| (s.as_str(), *d))
684 .collect::<Vec<_>>(),
685 &[
686 ("let ", None),
687 ("a", Some(DiagnosticSeverity::ERROR)),
688 (" = 1;", None),
689 ]
690 );
691 });
692 buffer_b.read_with(cx, |buffer, _| {
693 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
694 assert_eq!(
695 chunks
696 .iter()
697 .map(|(s, d)| (s.as_str(), *d))
698 .collect::<Vec<_>>(),
699 &[
700 ("let ", None),
701 ("b", Some(DiagnosticSeverity::WARNING)),
702 (" = 2;", None),
703 ]
704 );
705 });
706}
707
708#[gpui::test]
709async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
710 init_test(cx);
711
712 let fs = FakeFs::new(cx.background());
713 fs.insert_tree(
714 "/root",
715 json!({
716 "dir": {
717 "a.rs": "let a = 1;",
718 },
719 "other.rs": "let b = c;"
720 }),
721 )
722 .await;
723
724 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
725
726 let (worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_local_worktree("/root/other.rs", false, cx)
729 })
730 .await
731 .unwrap();
732 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
733
734 project.update(cx, |project, cx| {
735 project
736 .update_diagnostics(
737 LanguageServerId(0),
738 lsp::PublishDiagnosticsParams {
739 uri: Url::from_file_path("/root/other.rs").unwrap(),
740 version: None,
741 diagnostics: vec![lsp::Diagnostic {
742 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
743 severity: Some(lsp::DiagnosticSeverity::ERROR),
744 message: "unknown variable 'c'".to_string(),
745 ..Default::default()
746 }],
747 },
748 &[],
749 cx,
750 )
751 .unwrap();
752 });
753
754 let buffer = project
755 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
756 .await
757 .unwrap();
758 buffer.read_with(cx, |buffer, _| {
759 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
760 assert_eq!(
761 chunks
762 .iter()
763 .map(|(s, d)| (s.as_str(), *d))
764 .collect::<Vec<_>>(),
765 &[
766 ("let b = ", None),
767 ("c", Some(DiagnosticSeverity::ERROR)),
768 (";", None),
769 ]
770 );
771 });
772
773 project.read_with(cx, |project, cx| {
774 assert_eq!(project.diagnostic_summaries(cx).next(), None);
775 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
776 });
777}
778
779#[gpui::test]
780async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
781 init_test(cx);
782
783 let progress_token = "the-progress-token";
784 let mut language = Language::new(
785 LanguageConfig {
786 name: "Rust".into(),
787 path_suffixes: vec!["rs".to_string()],
788 ..Default::default()
789 },
790 Some(tree_sitter_rust::language()),
791 );
792 let mut fake_servers = language
793 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
794 disk_based_diagnostics_progress_token: Some(progress_token.into()),
795 disk_based_diagnostics_sources: vec!["disk".into()],
796 ..Default::default()
797 }))
798 .await;
799
800 let fs = FakeFs::new(cx.background());
801 fs.insert_tree(
802 "/dir",
803 json!({
804 "a.rs": "fn a() { A }",
805 "b.rs": "const y: i32 = 1",
806 }),
807 )
808 .await;
809
810 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
811 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
812 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
813
814 // Cause worktree to start the fake language server
815 let _buffer = project
816 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
817 .await
818 .unwrap();
819
820 let mut events = subscribe(&project, cx);
821
822 let fake_server = fake_servers.next().await.unwrap();
823 fake_server
824 .start_progress(format!("{}/0", progress_token))
825 .await;
826 assert_eq!(
827 events.next().await.unwrap(),
828 Event::DiskBasedDiagnosticsStarted {
829 language_server_id: LanguageServerId(0),
830 }
831 );
832
833 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
834 uri: Url::from_file_path("/dir/a.rs").unwrap(),
835 version: None,
836 diagnostics: vec![lsp::Diagnostic {
837 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
838 severity: Some(lsp::DiagnosticSeverity::ERROR),
839 message: "undefined variable 'A'".to_string(),
840 ..Default::default()
841 }],
842 });
843 assert_eq!(
844 events.next().await.unwrap(),
845 Event::DiagnosticsUpdated {
846 language_server_id: LanguageServerId(0),
847 path: (worktree_id, Path::new("a.rs")).into()
848 }
849 );
850
851 fake_server.end_progress(format!("{}/0", progress_token));
852 assert_eq!(
853 events.next().await.unwrap(),
854 Event::DiskBasedDiagnosticsFinished {
855 language_server_id: LanguageServerId(0)
856 }
857 );
858
859 let buffer = project
860 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
861 .await
862 .unwrap();
863
864 buffer.read_with(cx, |buffer, _| {
865 let snapshot = buffer.snapshot();
866 let diagnostics = snapshot
867 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
868 .collect::<Vec<_>>();
869 assert_eq!(
870 diagnostics,
871 &[DiagnosticEntry {
872 range: Point::new(0, 9)..Point::new(0, 10),
873 diagnostic: Diagnostic {
874 severity: lsp::DiagnosticSeverity::ERROR,
875 message: "undefined variable 'A'".to_string(),
876 group_id: 0,
877 is_primary: true,
878 ..Default::default()
879 }
880 }]
881 )
882 });
883
884 // Ensure publishing empty diagnostics twice only results in one update event.
885 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
886 uri: Url::from_file_path("/dir/a.rs").unwrap(),
887 version: None,
888 diagnostics: Default::default(),
889 });
890 assert_eq!(
891 events.next().await.unwrap(),
892 Event::DiagnosticsUpdated {
893 language_server_id: LanguageServerId(0),
894 path: (worktree_id, Path::new("a.rs")).into()
895 }
896 );
897
898 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
899 uri: Url::from_file_path("/dir/a.rs").unwrap(),
900 version: None,
901 diagnostics: Default::default(),
902 });
903 cx.foreground().run_until_parked();
904 assert_eq!(futures::poll!(events.next()), Poll::Pending);
905}
906
907#[gpui::test]
908async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
909 init_test(cx);
910
911 let progress_token = "the-progress-token";
912 let mut language = Language::new(
913 LanguageConfig {
914 path_suffixes: vec!["rs".to_string()],
915 ..Default::default()
916 },
917 None,
918 );
919 let mut fake_servers = language
920 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
921 disk_based_diagnostics_sources: vec!["disk".into()],
922 disk_based_diagnostics_progress_token: Some(progress_token.into()),
923 ..Default::default()
924 }))
925 .await;
926
927 let fs = FakeFs::new(cx.background());
928 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
929
930 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
931 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
932
933 let buffer = project
934 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
935 .await
936 .unwrap();
937
938 // Simulate diagnostics starting to update.
939 let fake_server = fake_servers.next().await.unwrap();
940 fake_server.start_progress(progress_token).await;
941
942 // Restart the server before the diagnostics finish updating.
943 project.update(cx, |project, cx| {
944 project.restart_language_servers_for_buffers([buffer], cx);
945 });
946 let mut events = subscribe(&project, cx);
947
948 // Simulate the newly started server sending more diagnostics.
949 let fake_server = fake_servers.next().await.unwrap();
950 fake_server.start_progress(progress_token).await;
951 assert_eq!(
952 events.next().await.unwrap(),
953 Event::DiskBasedDiagnosticsStarted {
954 language_server_id: LanguageServerId(1)
955 }
956 );
957 project.read_with(cx, |project, _| {
958 assert_eq!(
959 project
960 .language_servers_running_disk_based_diagnostics()
961 .collect::<Vec<_>>(),
962 [LanguageServerId(1)]
963 );
964 });
965
966 // All diagnostics are considered done, despite the old server's diagnostic
967 // task never completing.
968 fake_server.end_progress(progress_token);
969 assert_eq!(
970 events.next().await.unwrap(),
971 Event::DiskBasedDiagnosticsFinished {
972 language_server_id: LanguageServerId(1)
973 }
974 );
975 project.read_with(cx, |project, _| {
976 assert_eq!(
977 project
978 .language_servers_running_disk_based_diagnostics()
979 .collect::<Vec<_>>(),
980 [LanguageServerId(0); 0]
981 );
982 });
983}
984
985#[gpui::test]
986async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
987 init_test(cx);
988
989 let mut language = Language::new(
990 LanguageConfig {
991 path_suffixes: vec!["rs".to_string()],
992 ..Default::default()
993 },
994 None,
995 );
996 let mut fake_servers = language
997 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
998 ..Default::default()
999 }))
1000 .await;
1001
1002 let fs = FakeFs::new(cx.background());
1003 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1004
1005 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1006 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1007
1008 let buffer = project
1009 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1010 .await
1011 .unwrap();
1012
1013 // Publish diagnostics
1014 let fake_server = fake_servers.next().await.unwrap();
1015 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1016 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1017 version: None,
1018 diagnostics: vec![lsp::Diagnostic {
1019 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1020 severity: Some(lsp::DiagnosticSeverity::ERROR),
1021 message: "the message".to_string(),
1022 ..Default::default()
1023 }],
1024 });
1025
1026 cx.foreground().run_until_parked();
1027 buffer.read_with(cx, |buffer, _| {
1028 assert_eq!(
1029 buffer
1030 .snapshot()
1031 .diagnostics_in_range::<_, usize>(0..1, false)
1032 .map(|entry| entry.diagnostic.message.clone())
1033 .collect::<Vec<_>>(),
1034 ["the message".to_string()]
1035 );
1036 });
1037 project.read_with(cx, |project, cx| {
1038 assert_eq!(
1039 project.diagnostic_summary(cx),
1040 DiagnosticSummary {
1041 error_count: 1,
1042 warning_count: 0,
1043 }
1044 );
1045 });
1046
1047 project.update(cx, |project, cx| {
1048 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1049 });
1050
1051 // The diagnostics are cleared.
1052 cx.foreground().run_until_parked();
1053 buffer.read_with(cx, |buffer, _| {
1054 assert_eq!(
1055 buffer
1056 .snapshot()
1057 .diagnostics_in_range::<_, usize>(0..1, false)
1058 .map(|entry| entry.diagnostic.message.clone())
1059 .collect::<Vec<_>>(),
1060 Vec::<String>::new(),
1061 );
1062 });
1063 project.read_with(cx, |project, cx| {
1064 assert_eq!(
1065 project.diagnostic_summary(cx),
1066 DiagnosticSummary {
1067 error_count: 0,
1068 warning_count: 0,
1069 }
1070 );
1071 });
1072}
1073
1074#[gpui::test]
1075async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1076 init_test(cx);
1077
1078 let mut language = Language::new(
1079 LanguageConfig {
1080 path_suffixes: vec!["rs".to_string()],
1081 ..Default::default()
1082 },
1083 None,
1084 );
1085 let mut fake_servers = language
1086 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1087 name: "the-lsp",
1088 ..Default::default()
1089 }))
1090 .await;
1091
1092 let fs = FakeFs::new(cx.background());
1093 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1094
1095 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1096 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1097
1098 let buffer = project
1099 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1100 .await
1101 .unwrap();
1102
1103 // Before restarting the server, report diagnostics with an unknown buffer version.
1104 let fake_server = fake_servers.next().await.unwrap();
1105 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1106 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1107 version: Some(10000),
1108 diagnostics: Vec::new(),
1109 });
1110 cx.foreground().run_until_parked();
1111
1112 project.update(cx, |project, cx| {
1113 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1114 });
1115 let mut fake_server = fake_servers.next().await.unwrap();
1116 let notification = fake_server
1117 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1118 .await
1119 .text_document;
1120 assert_eq!(notification.version, 0);
1121}
1122
1123#[gpui::test]
1124async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1125 init_test(cx);
1126
1127 let mut rust = Language::new(
1128 LanguageConfig {
1129 name: Arc::from("Rust"),
1130 path_suffixes: vec!["rs".to_string()],
1131 ..Default::default()
1132 },
1133 None,
1134 );
1135 let mut fake_rust_servers = rust
1136 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1137 name: "rust-lsp",
1138 ..Default::default()
1139 }))
1140 .await;
1141 let mut js = Language::new(
1142 LanguageConfig {
1143 name: Arc::from("JavaScript"),
1144 path_suffixes: vec!["js".to_string()],
1145 ..Default::default()
1146 },
1147 None,
1148 );
1149 let mut fake_js_servers = js
1150 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1151 name: "js-lsp",
1152 ..Default::default()
1153 }))
1154 .await;
1155
1156 let fs = FakeFs::new(cx.background());
1157 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1158 .await;
1159
1160 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1161 project.update(cx, |project, _| {
1162 project.languages.add(Arc::new(rust));
1163 project.languages.add(Arc::new(js));
1164 });
1165
1166 let _rs_buffer = project
1167 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1168 .await
1169 .unwrap();
1170 let _js_buffer = project
1171 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1172 .await
1173 .unwrap();
1174
1175 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1176 assert_eq!(
1177 fake_rust_server_1
1178 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1179 .await
1180 .text_document
1181 .uri
1182 .as_str(),
1183 "file:///dir/a.rs"
1184 );
1185
1186 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1187 assert_eq!(
1188 fake_js_server
1189 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1190 .await
1191 .text_document
1192 .uri
1193 .as_str(),
1194 "file:///dir/b.js"
1195 );
1196
1197 // Disable Rust language server, ensuring only that server gets stopped.
1198 cx.update(|cx| {
1199 cx.update_global(|settings: &mut SettingsStore, cx| {
1200 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1201 settings.languages.insert(
1202 Arc::from("Rust"),
1203 LanguageSettingsContent {
1204 enable_language_server: Some(false),
1205 ..Default::default()
1206 },
1207 );
1208 });
1209 })
1210 });
1211 fake_rust_server_1
1212 .receive_notification::<lsp::notification::Exit>()
1213 .await;
1214
1215 // Enable Rust and disable JavaScript language servers, ensuring that the
1216 // former gets started again and that the latter stops.
1217 cx.update(|cx| {
1218 cx.update_global(|settings: &mut SettingsStore, cx| {
1219 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1220 settings.languages.insert(
1221 Arc::from("Rust"),
1222 LanguageSettingsContent {
1223 enable_language_server: Some(true),
1224 ..Default::default()
1225 },
1226 );
1227 settings.languages.insert(
1228 Arc::from("JavaScript"),
1229 LanguageSettingsContent {
1230 enable_language_server: Some(false),
1231 ..Default::default()
1232 },
1233 );
1234 });
1235 })
1236 });
1237 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1238 assert_eq!(
1239 fake_rust_server_2
1240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1241 .await
1242 .text_document
1243 .uri
1244 .as_str(),
1245 "file:///dir/a.rs"
1246 );
1247 fake_js_server
1248 .receive_notification::<lsp::notification::Exit>()
1249 .await;
1250}
1251
1252#[gpui::test(iterations = 3)]
1253async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1254 init_test(cx);
1255
1256 let mut language = Language::new(
1257 LanguageConfig {
1258 name: "Rust".into(),
1259 path_suffixes: vec!["rs".to_string()],
1260 ..Default::default()
1261 },
1262 Some(tree_sitter_rust::language()),
1263 );
1264 let mut fake_servers = language
1265 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1266 disk_based_diagnostics_sources: vec!["disk".into()],
1267 ..Default::default()
1268 }))
1269 .await;
1270
1271 let text = "
1272 fn a() { A }
1273 fn b() { BB }
1274 fn c() { CCC }
1275 "
1276 .unindent();
1277
1278 let fs = FakeFs::new(cx.background());
1279 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1280
1281 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1282 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1283
1284 let buffer = project
1285 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1286 .await
1287 .unwrap();
1288
1289 let mut fake_server = fake_servers.next().await.unwrap();
1290 let open_notification = fake_server
1291 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1292 .await;
1293
1294 // Edit the buffer, moving the content down
1295 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1296 let change_notification_1 = fake_server
1297 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1298 .await;
1299 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1300
1301 // Report some diagnostics for the initial version of the buffer
1302 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1303 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1304 version: Some(open_notification.text_document.version),
1305 diagnostics: vec![
1306 lsp::Diagnostic {
1307 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1308 severity: Some(DiagnosticSeverity::ERROR),
1309 message: "undefined variable 'A'".to_string(),
1310 source: Some("disk".to_string()),
1311 ..Default::default()
1312 },
1313 lsp::Diagnostic {
1314 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1315 severity: Some(DiagnosticSeverity::ERROR),
1316 message: "undefined variable 'BB'".to_string(),
1317 source: Some("disk".to_string()),
1318 ..Default::default()
1319 },
1320 lsp::Diagnostic {
1321 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1322 severity: Some(DiagnosticSeverity::ERROR),
1323 source: Some("disk".to_string()),
1324 message: "undefined variable 'CCC'".to_string(),
1325 ..Default::default()
1326 },
1327 ],
1328 });
1329
1330 // The diagnostics have moved down since they were created.
1331 buffer.next_notification(cx).await;
1332 cx.foreground().run_until_parked();
1333 buffer.read_with(cx, |buffer, _| {
1334 assert_eq!(
1335 buffer
1336 .snapshot()
1337 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1338 .collect::<Vec<_>>(),
1339 &[
1340 DiagnosticEntry {
1341 range: Point::new(3, 9)..Point::new(3, 11),
1342 diagnostic: Diagnostic {
1343 source: Some("disk".into()),
1344 severity: DiagnosticSeverity::ERROR,
1345 message: "undefined variable 'BB'".to_string(),
1346 is_disk_based: true,
1347 group_id: 1,
1348 is_primary: true,
1349 ..Default::default()
1350 },
1351 },
1352 DiagnosticEntry {
1353 range: Point::new(4, 9)..Point::new(4, 12),
1354 diagnostic: Diagnostic {
1355 source: Some("disk".into()),
1356 severity: DiagnosticSeverity::ERROR,
1357 message: "undefined variable 'CCC'".to_string(),
1358 is_disk_based: true,
1359 group_id: 2,
1360 is_primary: true,
1361 ..Default::default()
1362 }
1363 }
1364 ]
1365 );
1366 assert_eq!(
1367 chunks_with_diagnostics(buffer, 0..buffer.len()),
1368 [
1369 ("\n\nfn a() { ".to_string(), None),
1370 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1371 (" }\nfn b() { ".to_string(), None),
1372 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1373 (" }\nfn c() { ".to_string(), None),
1374 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1375 (" }\n".to_string(), None),
1376 ]
1377 );
1378 assert_eq!(
1379 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1380 [
1381 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1382 (" }\nfn c() { ".to_string(), None),
1383 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1384 ]
1385 );
1386 });
1387
1388 // Ensure overlapping diagnostics are highlighted correctly.
1389 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1390 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1391 version: Some(open_notification.text_document.version),
1392 diagnostics: vec![
1393 lsp::Diagnostic {
1394 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1395 severity: Some(DiagnosticSeverity::ERROR),
1396 message: "undefined variable 'A'".to_string(),
1397 source: Some("disk".to_string()),
1398 ..Default::default()
1399 },
1400 lsp::Diagnostic {
1401 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1402 severity: Some(DiagnosticSeverity::WARNING),
1403 message: "unreachable statement".to_string(),
1404 source: Some("disk".to_string()),
1405 ..Default::default()
1406 },
1407 ],
1408 });
1409
1410 buffer.next_notification(cx).await;
1411 cx.foreground().run_until_parked();
1412 buffer.read_with(cx, |buffer, _| {
1413 assert_eq!(
1414 buffer
1415 .snapshot()
1416 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1417 .collect::<Vec<_>>(),
1418 &[
1419 DiagnosticEntry {
1420 range: Point::new(2, 9)..Point::new(2, 12),
1421 diagnostic: Diagnostic {
1422 source: Some("disk".into()),
1423 severity: DiagnosticSeverity::WARNING,
1424 message: "unreachable statement".to_string(),
1425 is_disk_based: true,
1426 group_id: 4,
1427 is_primary: true,
1428 ..Default::default()
1429 }
1430 },
1431 DiagnosticEntry {
1432 range: Point::new(2, 9)..Point::new(2, 10),
1433 diagnostic: Diagnostic {
1434 source: Some("disk".into()),
1435 severity: DiagnosticSeverity::ERROR,
1436 message: "undefined variable 'A'".to_string(),
1437 is_disk_based: true,
1438 group_id: 3,
1439 is_primary: true,
1440 ..Default::default()
1441 },
1442 }
1443 ]
1444 );
1445 assert_eq!(
1446 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1447 [
1448 ("fn a() { ".to_string(), None),
1449 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1450 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1451 ("\n".to_string(), None),
1452 ]
1453 );
1454 assert_eq!(
1455 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1456 [
1457 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1458 ("\n".to_string(), None),
1459 ]
1460 );
1461 });
1462
1463 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1464 // changes since the last save.
1465 buffer.update(cx, |buffer, cx| {
1466 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1467 buffer.edit(
1468 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1469 None,
1470 cx,
1471 );
1472 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1473 });
1474 let change_notification_2 = fake_server
1475 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1476 .await;
1477 assert!(
1478 change_notification_2.text_document.version > change_notification_1.text_document.version
1479 );
1480
1481 // Handle out-of-order diagnostics
1482 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1483 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1484 version: Some(change_notification_2.text_document.version),
1485 diagnostics: vec![
1486 lsp::Diagnostic {
1487 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1488 severity: Some(DiagnosticSeverity::ERROR),
1489 message: "undefined variable 'BB'".to_string(),
1490 source: Some("disk".to_string()),
1491 ..Default::default()
1492 },
1493 lsp::Diagnostic {
1494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1495 severity: Some(DiagnosticSeverity::WARNING),
1496 message: "undefined variable 'A'".to_string(),
1497 source: Some("disk".to_string()),
1498 ..Default::default()
1499 },
1500 ],
1501 });
1502
1503 buffer.next_notification(cx).await;
1504 cx.foreground().run_until_parked();
1505 buffer.read_with(cx, |buffer, _| {
1506 assert_eq!(
1507 buffer
1508 .snapshot()
1509 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1510 .collect::<Vec<_>>(),
1511 &[
1512 DiagnosticEntry {
1513 range: Point::new(2, 21)..Point::new(2, 22),
1514 diagnostic: Diagnostic {
1515 source: Some("disk".into()),
1516 severity: DiagnosticSeverity::WARNING,
1517 message: "undefined variable 'A'".to_string(),
1518 is_disk_based: true,
1519 group_id: 6,
1520 is_primary: true,
1521 ..Default::default()
1522 }
1523 },
1524 DiagnosticEntry {
1525 range: Point::new(3, 9)..Point::new(3, 14),
1526 diagnostic: Diagnostic {
1527 source: Some("disk".into()),
1528 severity: DiagnosticSeverity::ERROR,
1529 message: "undefined variable 'BB'".to_string(),
1530 is_disk_based: true,
1531 group_id: 5,
1532 is_primary: true,
1533 ..Default::default()
1534 },
1535 }
1536 ]
1537 );
1538 });
1539}
1540
1541#[gpui::test]
1542async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1543 init_test(cx);
1544
1545 let text = concat!(
1546 "let one = ;\n", //
1547 "let two = \n",
1548 "let three = 3;\n",
1549 );
1550
1551 let fs = FakeFs::new(cx.background());
1552 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1553
1554 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1555 let buffer = project
1556 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1557 .await
1558 .unwrap();
1559
1560 project.update(cx, |project, cx| {
1561 project
1562 .update_buffer_diagnostics(
1563 &buffer,
1564 LanguageServerId(0),
1565 None,
1566 vec![
1567 DiagnosticEntry {
1568 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1569 diagnostic: Diagnostic {
1570 severity: DiagnosticSeverity::ERROR,
1571 message: "syntax error 1".to_string(),
1572 ..Default::default()
1573 },
1574 },
1575 DiagnosticEntry {
1576 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1577 diagnostic: Diagnostic {
1578 severity: DiagnosticSeverity::ERROR,
1579 message: "syntax error 2".to_string(),
1580 ..Default::default()
1581 },
1582 },
1583 ],
1584 cx,
1585 )
1586 .unwrap();
1587 });
1588
1589 // An empty range is extended forward to include the following character.
1590 // At the end of a line, an empty range is extended backward to include
1591 // the preceding character.
1592 buffer.read_with(cx, |buffer, _| {
1593 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1594 assert_eq!(
1595 chunks
1596 .iter()
1597 .map(|(s, d)| (s.as_str(), *d))
1598 .collect::<Vec<_>>(),
1599 &[
1600 ("let one = ", None),
1601 (";", Some(DiagnosticSeverity::ERROR)),
1602 ("\nlet two =", None),
1603 (" ", Some(DiagnosticSeverity::ERROR)),
1604 ("\nlet three = 3;\n", None)
1605 ]
1606 );
1607 });
1608}
1609
1610#[gpui::test]
1611async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1612 init_test(cx);
1613
1614 let fs = FakeFs::new(cx.background());
1615 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1616 .await;
1617
1618 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1619
1620 project.update(cx, |project, cx| {
1621 project
1622 .update_diagnostic_entries(
1623 LanguageServerId(0),
1624 Path::new("/dir/a.rs").to_owned(),
1625 None,
1626 vec![DiagnosticEntry {
1627 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1628 diagnostic: Diagnostic {
1629 severity: DiagnosticSeverity::ERROR,
1630 is_primary: true,
1631 message: "syntax error a1".to_string(),
1632 ..Default::default()
1633 },
1634 }],
1635 cx,
1636 )
1637 .unwrap();
1638 project
1639 .update_diagnostic_entries(
1640 LanguageServerId(1),
1641 Path::new("/dir/a.rs").to_owned(),
1642 None,
1643 vec![DiagnosticEntry {
1644 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1645 diagnostic: Diagnostic {
1646 severity: DiagnosticSeverity::ERROR,
1647 is_primary: true,
1648 message: "syntax error b1".to_string(),
1649 ..Default::default()
1650 },
1651 }],
1652 cx,
1653 )
1654 .unwrap();
1655
1656 assert_eq!(
1657 project.diagnostic_summary(cx),
1658 DiagnosticSummary {
1659 error_count: 2,
1660 warning_count: 0,
1661 }
1662 );
1663 });
1664}
1665
1666#[gpui::test]
1667async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1668 init_test(cx);
1669
1670 let mut language = Language::new(
1671 LanguageConfig {
1672 name: "Rust".into(),
1673 path_suffixes: vec!["rs".to_string()],
1674 ..Default::default()
1675 },
1676 Some(tree_sitter_rust::language()),
1677 );
1678 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1679
1680 let text = "
1681 fn a() {
1682 f1();
1683 }
1684 fn b() {
1685 f2();
1686 }
1687 fn c() {
1688 f3();
1689 }
1690 "
1691 .unindent();
1692
1693 let fs = FakeFs::new(cx.background());
1694 fs.insert_tree(
1695 "/dir",
1696 json!({
1697 "a.rs": text.clone(),
1698 }),
1699 )
1700 .await;
1701
1702 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1703 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1704 let buffer = project
1705 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1706 .await
1707 .unwrap();
1708
1709 let mut fake_server = fake_servers.next().await.unwrap();
1710 let lsp_document_version = fake_server
1711 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1712 .await
1713 .text_document
1714 .version;
1715
1716 // Simulate editing the buffer after the language server computes some edits.
1717 buffer.update(cx, |buffer, cx| {
1718 buffer.edit(
1719 [(
1720 Point::new(0, 0)..Point::new(0, 0),
1721 "// above first function\n",
1722 )],
1723 None,
1724 cx,
1725 );
1726 buffer.edit(
1727 [(
1728 Point::new(2, 0)..Point::new(2, 0),
1729 " // inside first function\n",
1730 )],
1731 None,
1732 cx,
1733 );
1734 buffer.edit(
1735 [(
1736 Point::new(6, 4)..Point::new(6, 4),
1737 "// inside second function ",
1738 )],
1739 None,
1740 cx,
1741 );
1742
1743 assert_eq!(
1744 buffer.text(),
1745 "
1746 // above first function
1747 fn a() {
1748 // inside first function
1749 f1();
1750 }
1751 fn b() {
1752 // inside second function f2();
1753 }
1754 fn c() {
1755 f3();
1756 }
1757 "
1758 .unindent()
1759 );
1760 });
1761
1762 let edits = project
1763 .update(cx, |project, cx| {
1764 project.edits_from_lsp(
1765 &buffer,
1766 vec![
1767 // replace body of first function
1768 lsp::TextEdit {
1769 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1770 new_text: "
1771 fn a() {
1772 f10();
1773 }
1774 "
1775 .unindent(),
1776 },
1777 // edit inside second function
1778 lsp::TextEdit {
1779 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1780 new_text: "00".into(),
1781 },
1782 // edit inside third function via two distinct edits
1783 lsp::TextEdit {
1784 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1785 new_text: "4000".into(),
1786 },
1787 lsp::TextEdit {
1788 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1789 new_text: "".into(),
1790 },
1791 ],
1792 LanguageServerId(0),
1793 Some(lsp_document_version),
1794 cx,
1795 )
1796 })
1797 .await
1798 .unwrap();
1799
1800 buffer.update(cx, |buffer, cx| {
1801 for (range, new_text) in edits {
1802 buffer.edit([(range, new_text)], None, cx);
1803 }
1804 assert_eq!(
1805 buffer.text(),
1806 "
1807 // above first function
1808 fn a() {
1809 // inside first function
1810 f10();
1811 }
1812 fn b() {
1813 // inside second function f200();
1814 }
1815 fn c() {
1816 f4000();
1817 }
1818 "
1819 .unindent()
1820 );
1821 });
1822}
1823
1824#[gpui::test]
1825async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1826 init_test(cx);
1827
1828 let text = "
1829 use a::b;
1830 use a::c;
1831
1832 fn f() {
1833 b();
1834 c();
1835 }
1836 "
1837 .unindent();
1838
1839 let fs = FakeFs::new(cx.background());
1840 fs.insert_tree(
1841 "/dir",
1842 json!({
1843 "a.rs": text.clone(),
1844 }),
1845 )
1846 .await;
1847
1848 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1849 let buffer = project
1850 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1851 .await
1852 .unwrap();
1853
1854 // Simulate the language server sending us a small edit in the form of a very large diff.
1855 // Rust-analyzer does this when performing a merge-imports code action.
1856 let edits = project
1857 .update(cx, |project, cx| {
1858 project.edits_from_lsp(
1859 &buffer,
1860 [
1861 // Replace the first use statement without editing the semicolon.
1862 lsp::TextEdit {
1863 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1864 new_text: "a::{b, c}".into(),
1865 },
1866 // Reinsert the remainder of the file between the semicolon and the final
1867 // newline of the file.
1868 lsp::TextEdit {
1869 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1870 new_text: "\n\n".into(),
1871 },
1872 lsp::TextEdit {
1873 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1874 new_text: "
1875 fn f() {
1876 b();
1877 c();
1878 }"
1879 .unindent(),
1880 },
1881 // Delete everything after the first newline of the file.
1882 lsp::TextEdit {
1883 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1884 new_text: "".into(),
1885 },
1886 ],
1887 LanguageServerId(0),
1888 None,
1889 cx,
1890 )
1891 })
1892 .await
1893 .unwrap();
1894
1895 buffer.update(cx, |buffer, cx| {
1896 let edits = edits
1897 .into_iter()
1898 .map(|(range, text)| {
1899 (
1900 range.start.to_point(buffer)..range.end.to_point(buffer),
1901 text,
1902 )
1903 })
1904 .collect::<Vec<_>>();
1905
1906 assert_eq!(
1907 edits,
1908 [
1909 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1910 (Point::new(1, 0)..Point::new(2, 0), "".into())
1911 ]
1912 );
1913
1914 for (range, new_text) in edits {
1915 buffer.edit([(range, new_text)], None, cx);
1916 }
1917 assert_eq!(
1918 buffer.text(),
1919 "
1920 use a::{b, c};
1921
1922 fn f() {
1923 b();
1924 c();
1925 }
1926 "
1927 .unindent()
1928 );
1929 });
1930}
1931
1932#[gpui::test]
1933async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1934 init_test(cx);
1935
1936 let text = "
1937 use a::b;
1938 use a::c;
1939
1940 fn f() {
1941 b();
1942 c();
1943 }
1944 "
1945 .unindent();
1946
1947 let fs = FakeFs::new(cx.background());
1948 fs.insert_tree(
1949 "/dir",
1950 json!({
1951 "a.rs": text.clone(),
1952 }),
1953 )
1954 .await;
1955
1956 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1957 let buffer = project
1958 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1959 .await
1960 .unwrap();
1961
1962 // Simulate the language server sending us edits in a non-ordered fashion,
1963 // with ranges sometimes being inverted or pointing to invalid locations.
1964 let edits = project
1965 .update(cx, |project, cx| {
1966 project.edits_from_lsp(
1967 &buffer,
1968 [
1969 lsp::TextEdit {
1970 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1971 new_text: "\n\n".into(),
1972 },
1973 lsp::TextEdit {
1974 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1975 new_text: "a::{b, c}".into(),
1976 },
1977 lsp::TextEdit {
1978 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1979 new_text: "".into(),
1980 },
1981 lsp::TextEdit {
1982 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1983 new_text: "
1984 fn f() {
1985 b();
1986 c();
1987 }"
1988 .unindent(),
1989 },
1990 ],
1991 LanguageServerId(0),
1992 None,
1993 cx,
1994 )
1995 })
1996 .await
1997 .unwrap();
1998
1999 buffer.update(cx, |buffer, cx| {
2000 let edits = edits
2001 .into_iter()
2002 .map(|(range, text)| {
2003 (
2004 range.start.to_point(buffer)..range.end.to_point(buffer),
2005 text,
2006 )
2007 })
2008 .collect::<Vec<_>>();
2009
2010 assert_eq!(
2011 edits,
2012 [
2013 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2014 (Point::new(1, 0)..Point::new(2, 0), "".into())
2015 ]
2016 );
2017
2018 for (range, new_text) in edits {
2019 buffer.edit([(range, new_text)], None, cx);
2020 }
2021 assert_eq!(
2022 buffer.text(),
2023 "
2024 use a::{b, c};
2025
2026 fn f() {
2027 b();
2028 c();
2029 }
2030 "
2031 .unindent()
2032 );
2033 });
2034}
2035
2036fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2037 buffer: &Buffer,
2038 range: Range<T>,
2039) -> Vec<(String, Option<DiagnosticSeverity>)> {
2040 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2041 for chunk in buffer.snapshot().chunks(range, true) {
2042 if chunks.last().map_or(false, |prev_chunk| {
2043 prev_chunk.1 == chunk.diagnostic_severity
2044 }) {
2045 chunks.last_mut().unwrap().0.push_str(chunk.text);
2046 } else {
2047 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2048 }
2049 }
2050 chunks
2051}
2052
2053#[gpui::test(iterations = 10)]
2054async fn test_definition(cx: &mut gpui::TestAppContext) {
2055 init_test(cx);
2056
2057 let mut language = Language::new(
2058 LanguageConfig {
2059 name: "Rust".into(),
2060 path_suffixes: vec!["rs".to_string()],
2061 ..Default::default()
2062 },
2063 Some(tree_sitter_rust::language()),
2064 );
2065 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2066
2067 let fs = FakeFs::new(cx.background());
2068 fs.insert_tree(
2069 "/dir",
2070 json!({
2071 "a.rs": "const fn a() { A }",
2072 "b.rs": "const y: i32 = crate::a()",
2073 }),
2074 )
2075 .await;
2076
2077 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2078 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2079
2080 let buffer = project
2081 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2082 .await
2083 .unwrap();
2084
2085 let fake_server = fake_servers.next().await.unwrap();
2086 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2087 let params = params.text_document_position_params;
2088 assert_eq!(
2089 params.text_document.uri.to_file_path().unwrap(),
2090 Path::new("/dir/b.rs"),
2091 );
2092 assert_eq!(params.position, lsp::Position::new(0, 22));
2093
2094 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2095 lsp::Location::new(
2096 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2097 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2098 ),
2099 )))
2100 });
2101
2102 let mut definitions = project
2103 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2104 .await
2105 .unwrap();
2106
2107 // Assert no new language server started
2108 cx.foreground().run_until_parked();
2109 assert!(fake_servers.try_next().is_err());
2110
2111 assert_eq!(definitions.len(), 1);
2112 let definition = definitions.pop().unwrap();
2113 cx.update(|cx| {
2114 let target_buffer = definition.target.buffer.read(cx);
2115 assert_eq!(
2116 target_buffer
2117 .file()
2118 .unwrap()
2119 .as_local()
2120 .unwrap()
2121 .abs_path(cx),
2122 Path::new("/dir/a.rs"),
2123 );
2124 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2125 assert_eq!(
2126 list_worktrees(&project, cx),
2127 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2128 );
2129
2130 drop(definition);
2131 });
2132 cx.read(|cx| {
2133 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2134 });
2135
2136 fn list_worktrees<'a>(
2137 project: &'a ModelHandle<Project>,
2138 cx: &'a AppContext,
2139 ) -> Vec<(&'a Path, bool)> {
2140 project
2141 .read(cx)
2142 .worktrees(cx)
2143 .map(|worktree| {
2144 let worktree = worktree.read(cx);
2145 (
2146 worktree.as_local().unwrap().abs_path().as_ref(),
2147 worktree.is_visible(),
2148 )
2149 })
2150 .collect::<Vec<_>>()
2151 }
2152}
2153
2154#[gpui::test]
2155async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2156 init_test(cx);
2157
2158 let mut language = Language::new(
2159 LanguageConfig {
2160 name: "TypeScript".into(),
2161 path_suffixes: vec!["ts".to_string()],
2162 ..Default::default()
2163 },
2164 Some(tree_sitter_typescript::language_typescript()),
2165 );
2166 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2167
2168 let fs = FakeFs::new(cx.background());
2169 fs.insert_tree(
2170 "/dir",
2171 json!({
2172 "a.ts": "",
2173 }),
2174 )
2175 .await;
2176
2177 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2178 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2179 let buffer = project
2180 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2181 .await
2182 .unwrap();
2183
2184 let fake_server = fake_language_servers.next().await.unwrap();
2185
2186 let text = "let a = b.fqn";
2187 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2188 let completions = project.update(cx, |project, cx| {
2189 project.completions(&buffer, text.len(), cx)
2190 });
2191
2192 fake_server
2193 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2194 Ok(Some(lsp::CompletionResponse::Array(vec![
2195 lsp::CompletionItem {
2196 label: "fullyQualifiedName?".into(),
2197 insert_text: Some("fullyQualifiedName".into()),
2198 ..Default::default()
2199 },
2200 ])))
2201 })
2202 .next()
2203 .await;
2204 let completions = completions.await.unwrap();
2205 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2206 assert_eq!(completions.len(), 1);
2207 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2208 assert_eq!(
2209 completions[0].old_range.to_offset(&snapshot),
2210 text.len() - 3..text.len()
2211 );
2212
2213 let text = "let a = \"atoms/cmp\"";
2214 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2215 let completions = project.update(cx, |project, cx| {
2216 project.completions(&buffer, text.len() - 1, cx)
2217 });
2218
2219 fake_server
2220 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2221 Ok(Some(lsp::CompletionResponse::Array(vec![
2222 lsp::CompletionItem {
2223 label: "component".into(),
2224 ..Default::default()
2225 },
2226 ])))
2227 })
2228 .next()
2229 .await;
2230 let completions = completions.await.unwrap();
2231 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2232 assert_eq!(completions.len(), 1);
2233 assert_eq!(completions[0].new_text, "component");
2234 assert_eq!(
2235 completions[0].old_range.to_offset(&snapshot),
2236 text.len() - 4..text.len() - 1
2237 );
2238}
2239
2240#[gpui::test]
2241async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2242 init_test(cx);
2243
2244 let mut language = Language::new(
2245 LanguageConfig {
2246 name: "TypeScript".into(),
2247 path_suffixes: vec!["ts".to_string()],
2248 ..Default::default()
2249 },
2250 Some(tree_sitter_typescript::language_typescript()),
2251 );
2252 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2253
2254 let fs = FakeFs::new(cx.background());
2255 fs.insert_tree(
2256 "/dir",
2257 json!({
2258 "a.ts": "",
2259 }),
2260 )
2261 .await;
2262
2263 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2264 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2265 let buffer = project
2266 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2267 .await
2268 .unwrap();
2269
2270 let fake_server = fake_language_servers.next().await.unwrap();
2271
2272 let text = "let a = b.fqn";
2273 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2274 let completions = project.update(cx, |project, cx| {
2275 project.completions(&buffer, text.len(), cx)
2276 });
2277
2278 fake_server
2279 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2280 Ok(Some(lsp::CompletionResponse::Array(vec![
2281 lsp::CompletionItem {
2282 label: "fullyQualifiedName?".into(),
2283 insert_text: Some("fully\rQualified\r\nName".into()),
2284 ..Default::default()
2285 },
2286 ])))
2287 })
2288 .next()
2289 .await;
2290 let completions = completions.await.unwrap();
2291 assert_eq!(completions.len(), 1);
2292 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2293}
2294
2295#[gpui::test(iterations = 10)]
2296async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2297 init_test(cx);
2298
2299 let mut language = Language::new(
2300 LanguageConfig {
2301 name: "TypeScript".into(),
2302 path_suffixes: vec!["ts".to_string()],
2303 ..Default::default()
2304 },
2305 None,
2306 );
2307 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2308
2309 let fs = FakeFs::new(cx.background());
2310 fs.insert_tree(
2311 "/dir",
2312 json!({
2313 "a.ts": "a",
2314 }),
2315 )
2316 .await;
2317
2318 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2319 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2320 let buffer = project
2321 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2322 .await
2323 .unwrap();
2324
2325 let fake_server = fake_language_servers.next().await.unwrap();
2326
2327 // Language server returns code actions that contain commands, and not edits.
2328 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2329 fake_server
2330 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2331 Ok(Some(vec![
2332 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2333 title: "The code action".into(),
2334 command: Some(lsp::Command {
2335 title: "The command".into(),
2336 command: "_the/command".into(),
2337 arguments: Some(vec![json!("the-argument")]),
2338 }),
2339 ..Default::default()
2340 }),
2341 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2342 title: "two".into(),
2343 ..Default::default()
2344 }),
2345 ]))
2346 })
2347 .next()
2348 .await;
2349
2350 let action = actions.await.unwrap()[0].clone();
2351 let apply = project.update(cx, |project, cx| {
2352 project.apply_code_action(buffer.clone(), action, true, cx)
2353 });
2354
2355 // Resolving the code action does not populate its edits. In absence of
2356 // edits, we must execute the given command.
2357 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2358 |action, _| async move { Ok(action) },
2359 );
2360
2361 // While executing the command, the language server sends the editor
2362 // a `workspaceEdit` request.
2363 fake_server
2364 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2365 let fake = fake_server.clone();
2366 move |params, _| {
2367 assert_eq!(params.command, "_the/command");
2368 let fake = fake.clone();
2369 async move {
2370 fake.server
2371 .request::<lsp::request::ApplyWorkspaceEdit>(
2372 lsp::ApplyWorkspaceEditParams {
2373 label: None,
2374 edit: lsp::WorkspaceEdit {
2375 changes: Some(
2376 [(
2377 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2378 vec![lsp::TextEdit {
2379 range: lsp::Range::new(
2380 lsp::Position::new(0, 0),
2381 lsp::Position::new(0, 0),
2382 ),
2383 new_text: "X".into(),
2384 }],
2385 )]
2386 .into_iter()
2387 .collect(),
2388 ),
2389 ..Default::default()
2390 },
2391 },
2392 )
2393 .await
2394 .unwrap();
2395 Ok(Some(json!(null)))
2396 }
2397 }
2398 })
2399 .next()
2400 .await;
2401
2402 // Applying the code action returns a project transaction containing the edits
2403 // sent by the language server in its `workspaceEdit` request.
2404 let transaction = apply.await.unwrap();
2405 assert!(transaction.0.contains_key(&buffer));
2406 buffer.update(cx, |buffer, cx| {
2407 assert_eq!(buffer.text(), "Xa");
2408 buffer.undo(cx);
2409 assert_eq!(buffer.text(), "a");
2410 });
2411}
2412
2413#[gpui::test(iterations = 10)]
2414async fn test_save_file(cx: &mut gpui::TestAppContext) {
2415 init_test(cx);
2416
2417 let fs = FakeFs::new(cx.background());
2418 fs.insert_tree(
2419 "/dir",
2420 json!({
2421 "file1": "the old contents",
2422 }),
2423 )
2424 .await;
2425
2426 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2427 let buffer = project
2428 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2429 .await
2430 .unwrap();
2431 buffer.update(cx, |buffer, cx| {
2432 assert_eq!(buffer.text(), "the old contents");
2433 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2434 });
2435
2436 project
2437 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2438 .await
2439 .unwrap();
2440
2441 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2442 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2443}
2444
2445#[gpui::test]
2446async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2447 init_test(cx);
2448
2449 let fs = FakeFs::new(cx.background());
2450 fs.insert_tree(
2451 "/dir",
2452 json!({
2453 "file1": "the old contents",
2454 }),
2455 )
2456 .await;
2457
2458 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2459 let buffer = project
2460 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2461 .await
2462 .unwrap();
2463 buffer.update(cx, |buffer, cx| {
2464 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2465 });
2466
2467 project
2468 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2469 .await
2470 .unwrap();
2471
2472 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2473 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2474}
2475
2476#[gpui::test]
2477async fn test_save_as(cx: &mut gpui::TestAppContext) {
2478 init_test(cx);
2479
2480 let fs = FakeFs::new(cx.background());
2481 fs.insert_tree("/dir", json!({})).await;
2482
2483 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2484
2485 let languages = project.read_with(cx, |project, _| project.languages().clone());
2486 languages.register(
2487 "/some/path",
2488 LanguageConfig {
2489 name: "Rust".into(),
2490 path_suffixes: vec!["rs".into()],
2491 ..Default::default()
2492 },
2493 tree_sitter_rust::language(),
2494 vec![],
2495 |_| Default::default(),
2496 );
2497
2498 let buffer = project.update(cx, |project, cx| {
2499 project.create_buffer("", None, cx).unwrap()
2500 });
2501 buffer.update(cx, |buffer, cx| {
2502 buffer.edit([(0..0, "abc")], None, cx);
2503 assert!(buffer.is_dirty());
2504 assert!(!buffer.has_conflict());
2505 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2506 });
2507 project
2508 .update(cx, |project, cx| {
2509 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2510 })
2511 .await
2512 .unwrap();
2513 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2514
2515 cx.foreground().run_until_parked();
2516 buffer.read_with(cx, |buffer, cx| {
2517 assert_eq!(
2518 buffer.file().unwrap().full_path(cx),
2519 Path::new("dir/file1.rs")
2520 );
2521 assert!(!buffer.is_dirty());
2522 assert!(!buffer.has_conflict());
2523 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2524 });
2525
2526 let opened_buffer = project
2527 .update(cx, |project, cx| {
2528 project.open_local_buffer("/dir/file1.rs", cx)
2529 })
2530 .await
2531 .unwrap();
2532 assert_eq!(opened_buffer, buffer);
2533}
2534
2535#[gpui::test(retries = 5)]
2536async fn test_rescan_and_remote_updates(
2537 deterministic: Arc<Deterministic>,
2538 cx: &mut gpui::TestAppContext,
2539) {
2540 init_test(cx);
2541 cx.foreground().allow_parking();
2542
2543 let dir = temp_tree(json!({
2544 "a": {
2545 "file1": "",
2546 "file2": "",
2547 "file3": "",
2548 },
2549 "b": {
2550 "c": {
2551 "file4": "",
2552 "file5": "",
2553 }
2554 }
2555 }));
2556
2557 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2558 let rpc = project.read_with(cx, |p, _| p.client.clone());
2559
2560 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2561 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2562 async move { buffer.await.unwrap() }
2563 };
2564 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2565 project.read_with(cx, |project, cx| {
2566 let tree = project.worktrees(cx).next().unwrap();
2567 tree.read(cx)
2568 .entry_for_path(path)
2569 .unwrap_or_else(|| panic!("no entry for path {}", path))
2570 .id
2571 })
2572 };
2573
2574 let buffer2 = buffer_for_path("a/file2", cx).await;
2575 let buffer3 = buffer_for_path("a/file3", cx).await;
2576 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2577 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2578
2579 let file2_id = id_for_path("a/file2", cx);
2580 let file3_id = id_for_path("a/file3", cx);
2581 let file4_id = id_for_path("b/c/file4", cx);
2582
2583 // Create a remote copy of this worktree.
2584 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2585
2586 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2587
2588 let updates = Arc::new(Mutex::new(Vec::new()));
2589 tree.update(cx, |tree, cx| {
2590 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2591 let updates = updates.clone();
2592 move |update| {
2593 updates.lock().push(update);
2594 async { true }
2595 }
2596 });
2597 });
2598
2599 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2600 deterministic.run_until_parked();
2601
2602 cx.read(|cx| {
2603 assert!(!buffer2.read(cx).is_dirty());
2604 assert!(!buffer3.read(cx).is_dirty());
2605 assert!(!buffer4.read(cx).is_dirty());
2606 assert!(!buffer5.read(cx).is_dirty());
2607 });
2608
2609 // Rename and delete files and directories.
2610 tree.flush_fs_events(cx).await;
2611 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2612 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2613 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2614 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2615 tree.flush_fs_events(cx).await;
2616
2617 let expected_paths = vec![
2618 "a",
2619 "a/file1",
2620 "a/file2.new",
2621 "b",
2622 "d",
2623 "d/file3",
2624 "d/file4",
2625 ];
2626
2627 cx.read(|app| {
2628 assert_eq!(
2629 tree.read(app)
2630 .paths()
2631 .map(|p| p.to_str().unwrap())
2632 .collect::<Vec<_>>(),
2633 expected_paths
2634 );
2635
2636 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2637 assert_eq!(id_for_path("d/file3", cx), file3_id);
2638 assert_eq!(id_for_path("d/file4", cx), file4_id);
2639
2640 assert_eq!(
2641 buffer2.read(app).file().unwrap().path().as_ref(),
2642 Path::new("a/file2.new")
2643 );
2644 assert_eq!(
2645 buffer3.read(app).file().unwrap().path().as_ref(),
2646 Path::new("d/file3")
2647 );
2648 assert_eq!(
2649 buffer4.read(app).file().unwrap().path().as_ref(),
2650 Path::new("d/file4")
2651 );
2652 assert_eq!(
2653 buffer5.read(app).file().unwrap().path().as_ref(),
2654 Path::new("b/c/file5")
2655 );
2656
2657 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2658 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2659 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2660 assert!(buffer5.read(app).file().unwrap().is_deleted());
2661 });
2662
2663 // Update the remote worktree. Check that it becomes consistent with the
2664 // local worktree.
2665 deterministic.run_until_parked();
2666 remote.update(cx, |remote, _| {
2667 for update in updates.lock().drain(..) {
2668 remote.as_remote_mut().unwrap().update_from_remote(update);
2669 }
2670 });
2671 deterministic.run_until_parked();
2672 remote.read_with(cx, |remote, _| {
2673 assert_eq!(
2674 remote
2675 .paths()
2676 .map(|p| p.to_str().unwrap())
2677 .collect::<Vec<_>>(),
2678 expected_paths
2679 );
2680 });
2681}
2682
2683#[gpui::test(iterations = 10)]
2684async fn test_buffer_identity_across_renames(
2685 deterministic: Arc<Deterministic>,
2686 cx: &mut gpui::TestAppContext,
2687) {
2688 init_test(cx);
2689
2690 let fs = FakeFs::new(cx.background());
2691 fs.insert_tree(
2692 "/dir",
2693 json!({
2694 "a": {
2695 "file1": "",
2696 }
2697 }),
2698 )
2699 .await;
2700
2701 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2702 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2703 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2704
2705 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2706 project.read_with(cx, |project, cx| {
2707 let tree = project.worktrees(cx).next().unwrap();
2708 tree.read(cx)
2709 .entry_for_path(path)
2710 .unwrap_or_else(|| panic!("no entry for path {}", path))
2711 .id
2712 })
2713 };
2714
2715 let dir_id = id_for_path("a", cx);
2716 let file_id = id_for_path("a/file1", cx);
2717 let buffer = project
2718 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2719 .await
2720 .unwrap();
2721 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2722
2723 project
2724 .update(cx, |project, cx| {
2725 project.rename_entry(dir_id, Path::new("b"), cx)
2726 })
2727 .unwrap()
2728 .await
2729 .unwrap();
2730 deterministic.run_until_parked();
2731 assert_eq!(id_for_path("b", cx), dir_id);
2732 assert_eq!(id_for_path("b/file1", cx), file_id);
2733 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2734}
2735
2736#[gpui::test]
2737async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2738 init_test(cx);
2739
2740 let fs = FakeFs::new(cx.background());
2741 fs.insert_tree(
2742 "/dir",
2743 json!({
2744 "a.txt": "a-contents",
2745 "b.txt": "b-contents",
2746 }),
2747 )
2748 .await;
2749
2750 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2751
2752 // Spawn multiple tasks to open paths, repeating some paths.
2753 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2754 (
2755 p.open_local_buffer("/dir/a.txt", cx),
2756 p.open_local_buffer("/dir/b.txt", cx),
2757 p.open_local_buffer("/dir/a.txt", cx),
2758 )
2759 });
2760
2761 let buffer_a_1 = buffer_a_1.await.unwrap();
2762 let buffer_a_2 = buffer_a_2.await.unwrap();
2763 let buffer_b = buffer_b.await.unwrap();
2764 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2765 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2766
2767 // There is only one buffer per path.
2768 let buffer_a_id = buffer_a_1.id();
2769 assert_eq!(buffer_a_2.id(), buffer_a_id);
2770
2771 // Open the same path again while it is still open.
2772 drop(buffer_a_1);
2773 let buffer_a_3 = project
2774 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2775 .await
2776 .unwrap();
2777
2778 // There's still only one buffer per path.
2779 assert_eq!(buffer_a_3.id(), buffer_a_id);
2780}
2781
2782#[gpui::test]
2783async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2784 init_test(cx);
2785
2786 let fs = FakeFs::new(cx.background());
2787 fs.insert_tree(
2788 "/dir",
2789 json!({
2790 "file1": "abc",
2791 "file2": "def",
2792 "file3": "ghi",
2793 }),
2794 )
2795 .await;
2796
2797 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2798
2799 let buffer1 = project
2800 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2801 .await
2802 .unwrap();
2803 let events = Rc::new(RefCell::new(Vec::new()));
2804
2805 // initially, the buffer isn't dirty.
2806 buffer1.update(cx, |buffer, cx| {
2807 cx.subscribe(&buffer1, {
2808 let events = events.clone();
2809 move |_, _, event, _| match event {
2810 BufferEvent::Operation(_) => {}
2811 _ => events.borrow_mut().push(event.clone()),
2812 }
2813 })
2814 .detach();
2815
2816 assert!(!buffer.is_dirty());
2817 assert!(events.borrow().is_empty());
2818
2819 buffer.edit([(1..2, "")], None, cx);
2820 });
2821
2822 // after the first edit, the buffer is dirty, and emits a dirtied event.
2823 buffer1.update(cx, |buffer, cx| {
2824 assert!(buffer.text() == "ac");
2825 assert!(buffer.is_dirty());
2826 assert_eq!(
2827 *events.borrow(),
2828 &[language::Event::Edited, language::Event::DirtyChanged]
2829 );
2830 events.borrow_mut().clear();
2831 buffer.did_save(
2832 buffer.version(),
2833 buffer.as_rope().fingerprint(),
2834 buffer.file().unwrap().mtime(),
2835 cx,
2836 );
2837 });
2838
2839 // after saving, the buffer is not dirty, and emits a saved event.
2840 buffer1.update(cx, |buffer, cx| {
2841 assert!(!buffer.is_dirty());
2842 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2843 events.borrow_mut().clear();
2844
2845 buffer.edit([(1..1, "B")], None, cx);
2846 buffer.edit([(2..2, "D")], None, cx);
2847 });
2848
2849 // after editing again, the buffer is dirty, and emits another dirty event.
2850 buffer1.update(cx, |buffer, cx| {
2851 assert!(buffer.text() == "aBDc");
2852 assert!(buffer.is_dirty());
2853 assert_eq!(
2854 *events.borrow(),
2855 &[
2856 language::Event::Edited,
2857 language::Event::DirtyChanged,
2858 language::Event::Edited,
2859 ],
2860 );
2861 events.borrow_mut().clear();
2862
2863 // After restoring the buffer to its previously-saved state,
2864 // the buffer is not considered dirty anymore.
2865 buffer.edit([(1..3, "")], None, cx);
2866 assert!(buffer.text() == "ac");
2867 assert!(!buffer.is_dirty());
2868 });
2869
2870 assert_eq!(
2871 *events.borrow(),
2872 &[language::Event::Edited, language::Event::DirtyChanged]
2873 );
2874
2875 // When a file is deleted, the buffer is considered dirty.
2876 let events = Rc::new(RefCell::new(Vec::new()));
2877 let buffer2 = project
2878 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2879 .await
2880 .unwrap();
2881 buffer2.update(cx, |_, cx| {
2882 cx.subscribe(&buffer2, {
2883 let events = events.clone();
2884 move |_, _, event, _| events.borrow_mut().push(event.clone())
2885 })
2886 .detach();
2887 });
2888
2889 fs.remove_file("/dir/file2".as_ref(), Default::default())
2890 .await
2891 .unwrap();
2892 cx.foreground().run_until_parked();
2893 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2894 assert_eq!(
2895 *events.borrow(),
2896 &[
2897 language::Event::DirtyChanged,
2898 language::Event::FileHandleChanged
2899 ]
2900 );
2901
2902 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2903 let events = Rc::new(RefCell::new(Vec::new()));
2904 let buffer3 = project
2905 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2906 .await
2907 .unwrap();
2908 buffer3.update(cx, |_, cx| {
2909 cx.subscribe(&buffer3, {
2910 let events = events.clone();
2911 move |_, _, event, _| events.borrow_mut().push(event.clone())
2912 })
2913 .detach();
2914 });
2915
2916 buffer3.update(cx, |buffer, cx| {
2917 buffer.edit([(0..0, "x")], None, cx);
2918 });
2919 events.borrow_mut().clear();
2920 fs.remove_file("/dir/file3".as_ref(), Default::default())
2921 .await
2922 .unwrap();
2923 cx.foreground().run_until_parked();
2924 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2925 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2926}
2927
2928#[gpui::test]
2929async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2930 init_test(cx);
2931
2932 let initial_contents = "aaa\nbbbbb\nc\n";
2933 let fs = FakeFs::new(cx.background());
2934 fs.insert_tree(
2935 "/dir",
2936 json!({
2937 "the-file": initial_contents,
2938 }),
2939 )
2940 .await;
2941 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2942 let buffer = project
2943 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2944 .await
2945 .unwrap();
2946
2947 let anchors = (0..3)
2948 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2949 .collect::<Vec<_>>();
2950
2951 // Change the file on disk, adding two new lines of text, and removing
2952 // one line.
2953 buffer.read_with(cx, |buffer, _| {
2954 assert!(!buffer.is_dirty());
2955 assert!(!buffer.has_conflict());
2956 });
2957 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2958 fs.save(
2959 "/dir/the-file".as_ref(),
2960 &new_contents.into(),
2961 LineEnding::Unix,
2962 )
2963 .await
2964 .unwrap();
2965
2966 // Because the buffer was not modified, it is reloaded from disk. Its
2967 // contents are edited according to the diff between the old and new
2968 // file contents.
2969 cx.foreground().run_until_parked();
2970 buffer.update(cx, |buffer, _| {
2971 assert_eq!(buffer.text(), new_contents);
2972 assert!(!buffer.is_dirty());
2973 assert!(!buffer.has_conflict());
2974
2975 let anchor_positions = anchors
2976 .iter()
2977 .map(|anchor| anchor.to_point(&*buffer))
2978 .collect::<Vec<_>>();
2979 assert_eq!(
2980 anchor_positions,
2981 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2982 );
2983 });
2984
2985 // Modify the buffer
2986 buffer.update(cx, |buffer, cx| {
2987 buffer.edit([(0..0, " ")], None, cx);
2988 assert!(buffer.is_dirty());
2989 assert!(!buffer.has_conflict());
2990 });
2991
2992 // Change the file on disk again, adding blank lines to the beginning.
2993 fs.save(
2994 "/dir/the-file".as_ref(),
2995 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2996 LineEnding::Unix,
2997 )
2998 .await
2999 .unwrap();
3000
3001 // Because the buffer is modified, it doesn't reload from disk, but is
3002 // marked as having a conflict.
3003 cx.foreground().run_until_parked();
3004 buffer.read_with(cx, |buffer, _| {
3005 assert!(buffer.has_conflict());
3006 });
3007}
3008
3009#[gpui::test]
3010async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3011 init_test(cx);
3012
3013 let fs = FakeFs::new(cx.background());
3014 fs.insert_tree(
3015 "/dir",
3016 json!({
3017 "file1": "a\nb\nc\n",
3018 "file2": "one\r\ntwo\r\nthree\r\n",
3019 }),
3020 )
3021 .await;
3022
3023 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3024 let buffer1 = project
3025 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3026 .await
3027 .unwrap();
3028 let buffer2 = project
3029 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3030 .await
3031 .unwrap();
3032
3033 buffer1.read_with(cx, |buffer, _| {
3034 assert_eq!(buffer.text(), "a\nb\nc\n");
3035 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3036 });
3037 buffer2.read_with(cx, |buffer, _| {
3038 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3039 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3040 });
3041
3042 // Change a file's line endings on disk from unix to windows. The buffer's
3043 // state updates correctly.
3044 fs.save(
3045 "/dir/file1".as_ref(),
3046 &"aaa\nb\nc\n".into(),
3047 LineEnding::Windows,
3048 )
3049 .await
3050 .unwrap();
3051 cx.foreground().run_until_parked();
3052 buffer1.read_with(cx, |buffer, _| {
3053 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3054 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3055 });
3056
3057 // Save a file with windows line endings. The file is written correctly.
3058 buffer2.update(cx, |buffer, cx| {
3059 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3060 });
3061 project
3062 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3063 .await
3064 .unwrap();
3065 assert_eq!(
3066 fs.load("/dir/file2".as_ref()).await.unwrap(),
3067 "one\r\ntwo\r\nthree\r\nfour\r\n",
3068 );
3069}
3070
3071#[gpui::test]
3072async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let fs = FakeFs::new(cx.background());
3076 fs.insert_tree(
3077 "/the-dir",
3078 json!({
3079 "a.rs": "
3080 fn foo(mut v: Vec<usize>) {
3081 for x in &v {
3082 v.push(1);
3083 }
3084 }
3085 "
3086 .unindent(),
3087 }),
3088 )
3089 .await;
3090
3091 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3092 let buffer = project
3093 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3094 .await
3095 .unwrap();
3096
3097 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3098 let message = lsp::PublishDiagnosticsParams {
3099 uri: buffer_uri.clone(),
3100 diagnostics: vec![
3101 lsp::Diagnostic {
3102 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3103 severity: Some(DiagnosticSeverity::WARNING),
3104 message: "error 1".to_string(),
3105 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3106 location: lsp::Location {
3107 uri: buffer_uri.clone(),
3108 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3109 },
3110 message: "error 1 hint 1".to_string(),
3111 }]),
3112 ..Default::default()
3113 },
3114 lsp::Diagnostic {
3115 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3116 severity: Some(DiagnosticSeverity::HINT),
3117 message: "error 1 hint 1".to_string(),
3118 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3119 location: lsp::Location {
3120 uri: buffer_uri.clone(),
3121 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3122 },
3123 message: "original diagnostic".to_string(),
3124 }]),
3125 ..Default::default()
3126 },
3127 lsp::Diagnostic {
3128 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3129 severity: Some(DiagnosticSeverity::ERROR),
3130 message: "error 2".to_string(),
3131 related_information: Some(vec![
3132 lsp::DiagnosticRelatedInformation {
3133 location: lsp::Location {
3134 uri: buffer_uri.clone(),
3135 range: lsp::Range::new(
3136 lsp::Position::new(1, 13),
3137 lsp::Position::new(1, 15),
3138 ),
3139 },
3140 message: "error 2 hint 1".to_string(),
3141 },
3142 lsp::DiagnosticRelatedInformation {
3143 location: lsp::Location {
3144 uri: buffer_uri.clone(),
3145 range: lsp::Range::new(
3146 lsp::Position::new(1, 13),
3147 lsp::Position::new(1, 15),
3148 ),
3149 },
3150 message: "error 2 hint 2".to_string(),
3151 },
3152 ]),
3153 ..Default::default()
3154 },
3155 lsp::Diagnostic {
3156 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3157 severity: Some(DiagnosticSeverity::HINT),
3158 message: "error 2 hint 1".to_string(),
3159 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3160 location: lsp::Location {
3161 uri: buffer_uri.clone(),
3162 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3163 },
3164 message: "original diagnostic".to_string(),
3165 }]),
3166 ..Default::default()
3167 },
3168 lsp::Diagnostic {
3169 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3170 severity: Some(DiagnosticSeverity::HINT),
3171 message: "error 2 hint 2".to_string(),
3172 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3173 location: lsp::Location {
3174 uri: buffer_uri,
3175 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3176 },
3177 message: "original diagnostic".to_string(),
3178 }]),
3179 ..Default::default()
3180 },
3181 ],
3182 version: None,
3183 };
3184
3185 project
3186 .update(cx, |p, cx| {
3187 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3188 })
3189 .unwrap();
3190 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3191
3192 assert_eq!(
3193 buffer
3194 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3195 .collect::<Vec<_>>(),
3196 &[
3197 DiagnosticEntry {
3198 range: Point::new(1, 8)..Point::new(1, 9),
3199 diagnostic: Diagnostic {
3200 severity: DiagnosticSeverity::WARNING,
3201 message: "error 1".to_string(),
3202 group_id: 1,
3203 is_primary: true,
3204 ..Default::default()
3205 }
3206 },
3207 DiagnosticEntry {
3208 range: Point::new(1, 8)..Point::new(1, 9),
3209 diagnostic: Diagnostic {
3210 severity: DiagnosticSeverity::HINT,
3211 message: "error 1 hint 1".to_string(),
3212 group_id: 1,
3213 is_primary: false,
3214 ..Default::default()
3215 }
3216 },
3217 DiagnosticEntry {
3218 range: Point::new(1, 13)..Point::new(1, 15),
3219 diagnostic: Diagnostic {
3220 severity: DiagnosticSeverity::HINT,
3221 message: "error 2 hint 1".to_string(),
3222 group_id: 0,
3223 is_primary: false,
3224 ..Default::default()
3225 }
3226 },
3227 DiagnosticEntry {
3228 range: Point::new(1, 13)..Point::new(1, 15),
3229 diagnostic: Diagnostic {
3230 severity: DiagnosticSeverity::HINT,
3231 message: "error 2 hint 2".to_string(),
3232 group_id: 0,
3233 is_primary: false,
3234 ..Default::default()
3235 }
3236 },
3237 DiagnosticEntry {
3238 range: Point::new(2, 8)..Point::new(2, 17),
3239 diagnostic: Diagnostic {
3240 severity: DiagnosticSeverity::ERROR,
3241 message: "error 2".to_string(),
3242 group_id: 0,
3243 is_primary: true,
3244 ..Default::default()
3245 }
3246 }
3247 ]
3248 );
3249
3250 assert_eq!(
3251 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3252 &[
3253 DiagnosticEntry {
3254 range: Point::new(1, 13)..Point::new(1, 15),
3255 diagnostic: Diagnostic {
3256 severity: DiagnosticSeverity::HINT,
3257 message: "error 2 hint 1".to_string(),
3258 group_id: 0,
3259 is_primary: false,
3260 ..Default::default()
3261 }
3262 },
3263 DiagnosticEntry {
3264 range: Point::new(1, 13)..Point::new(1, 15),
3265 diagnostic: Diagnostic {
3266 severity: DiagnosticSeverity::HINT,
3267 message: "error 2 hint 2".to_string(),
3268 group_id: 0,
3269 is_primary: false,
3270 ..Default::default()
3271 }
3272 },
3273 DiagnosticEntry {
3274 range: Point::new(2, 8)..Point::new(2, 17),
3275 diagnostic: Diagnostic {
3276 severity: DiagnosticSeverity::ERROR,
3277 message: "error 2".to_string(),
3278 group_id: 0,
3279 is_primary: true,
3280 ..Default::default()
3281 }
3282 }
3283 ]
3284 );
3285
3286 assert_eq!(
3287 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3288 &[
3289 DiagnosticEntry {
3290 range: Point::new(1, 8)..Point::new(1, 9),
3291 diagnostic: Diagnostic {
3292 severity: DiagnosticSeverity::WARNING,
3293 message: "error 1".to_string(),
3294 group_id: 1,
3295 is_primary: true,
3296 ..Default::default()
3297 }
3298 },
3299 DiagnosticEntry {
3300 range: Point::new(1, 8)..Point::new(1, 9),
3301 diagnostic: Diagnostic {
3302 severity: DiagnosticSeverity::HINT,
3303 message: "error 1 hint 1".to_string(),
3304 group_id: 1,
3305 is_primary: false,
3306 ..Default::default()
3307 }
3308 },
3309 ]
3310 );
3311}
3312
3313#[gpui::test]
3314async fn test_rename(cx: &mut gpui::TestAppContext) {
3315 init_test(cx);
3316
3317 let mut language = Language::new(
3318 LanguageConfig {
3319 name: "Rust".into(),
3320 path_suffixes: vec!["rs".to_string()],
3321 ..Default::default()
3322 },
3323 Some(tree_sitter_rust::language()),
3324 );
3325 let mut fake_servers = language
3326 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3327 capabilities: lsp::ServerCapabilities {
3328 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3329 prepare_provider: Some(true),
3330 work_done_progress_options: Default::default(),
3331 })),
3332 ..Default::default()
3333 },
3334 ..Default::default()
3335 }))
3336 .await;
3337
3338 let fs = FakeFs::new(cx.background());
3339 fs.insert_tree(
3340 "/dir",
3341 json!({
3342 "one.rs": "const ONE: usize = 1;",
3343 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3344 }),
3345 )
3346 .await;
3347
3348 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3349 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3350 let buffer = project
3351 .update(cx, |project, cx| {
3352 project.open_local_buffer("/dir/one.rs", cx)
3353 })
3354 .await
3355 .unwrap();
3356
3357 let fake_server = fake_servers.next().await.unwrap();
3358
3359 let response = project.update(cx, |project, cx| {
3360 project.prepare_rename(buffer.clone(), 7, cx)
3361 });
3362 fake_server
3363 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3364 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3365 assert_eq!(params.position, lsp::Position::new(0, 7));
3366 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3367 lsp::Position::new(0, 6),
3368 lsp::Position::new(0, 9),
3369 ))))
3370 })
3371 .next()
3372 .await
3373 .unwrap();
3374 let range = response.await.unwrap().unwrap();
3375 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3376 assert_eq!(range, 6..9);
3377
3378 let response = project.update(cx, |project, cx| {
3379 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3380 });
3381 fake_server
3382 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3383 assert_eq!(
3384 params.text_document_position.text_document.uri.as_str(),
3385 "file:///dir/one.rs"
3386 );
3387 assert_eq!(
3388 params.text_document_position.position,
3389 lsp::Position::new(0, 7)
3390 );
3391 assert_eq!(params.new_name, "THREE");
3392 Ok(Some(lsp::WorkspaceEdit {
3393 changes: Some(
3394 [
3395 (
3396 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3397 vec![lsp::TextEdit::new(
3398 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3399 "THREE".to_string(),
3400 )],
3401 ),
3402 (
3403 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3404 vec![
3405 lsp::TextEdit::new(
3406 lsp::Range::new(
3407 lsp::Position::new(0, 24),
3408 lsp::Position::new(0, 27),
3409 ),
3410 "THREE".to_string(),
3411 ),
3412 lsp::TextEdit::new(
3413 lsp::Range::new(
3414 lsp::Position::new(0, 35),
3415 lsp::Position::new(0, 38),
3416 ),
3417 "THREE".to_string(),
3418 ),
3419 ],
3420 ),
3421 ]
3422 .into_iter()
3423 .collect(),
3424 ),
3425 ..Default::default()
3426 }))
3427 })
3428 .next()
3429 .await
3430 .unwrap();
3431 let mut transaction = response.await.unwrap().0;
3432 assert_eq!(transaction.len(), 2);
3433 assert_eq!(
3434 transaction
3435 .remove_entry(&buffer)
3436 .unwrap()
3437 .0
3438 .read_with(cx, |buffer, _| buffer.text()),
3439 "const THREE: usize = 1;"
3440 );
3441 assert_eq!(
3442 transaction
3443 .into_keys()
3444 .next()
3445 .unwrap()
3446 .read_with(cx, |buffer, _| buffer.text()),
3447 "const TWO: usize = one::THREE + one::THREE;"
3448 );
3449}
3450
3451#[gpui::test]
3452async fn test_search(cx: &mut gpui::TestAppContext) {
3453 init_test(cx);
3454
3455 let fs = FakeFs::new(cx.background());
3456 fs.insert_tree(
3457 "/dir",
3458 json!({
3459 "one.rs": "const ONE: usize = 1;",
3460 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3461 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3462 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3463 }),
3464 )
3465 .await;
3466 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3467 assert_eq!(
3468 search(
3469 &project,
3470 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3471 cx
3472 )
3473 .await
3474 .unwrap(),
3475 HashMap::from_iter([
3476 ("two.rs".to_string(), vec![6..9]),
3477 ("three.rs".to_string(), vec![37..40])
3478 ])
3479 );
3480
3481 let buffer_4 = project
3482 .update(cx, |project, cx| {
3483 project.open_local_buffer("/dir/four.rs", cx)
3484 })
3485 .await
3486 .unwrap();
3487 buffer_4.update(cx, |buffer, cx| {
3488 let text = "two::TWO";
3489 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3490 });
3491
3492 assert_eq!(
3493 search(
3494 &project,
3495 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3496 cx
3497 )
3498 .await
3499 .unwrap(),
3500 HashMap::from_iter([
3501 ("two.rs".to_string(), vec![6..9]),
3502 ("three.rs".to_string(), vec![37..40]),
3503 ("four.rs".to_string(), vec![25..28, 36..39])
3504 ])
3505 );
3506}
3507
3508#[gpui::test]
3509async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3510 init_test(cx);
3511
3512 let search_query = "file";
3513
3514 let fs = FakeFs::new(cx.background());
3515 fs.insert_tree(
3516 "/dir",
3517 json!({
3518 "one.rs": r#"// Rust file one"#,
3519 "one.ts": r#"// TypeScript file one"#,
3520 "two.rs": r#"// Rust file two"#,
3521 "two.ts": r#"// TypeScript file two"#,
3522 }),
3523 )
3524 .await;
3525 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3526
3527 assert!(
3528 search(
3529 &project,
3530 SearchQuery::text(
3531 search_query,
3532 false,
3533 true,
3534 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3535 Vec::new()
3536 ),
3537 cx
3538 )
3539 .await
3540 .unwrap()
3541 .is_empty(),
3542 "If no inclusions match, no files should be returned"
3543 );
3544
3545 assert_eq!(
3546 search(
3547 &project,
3548 SearchQuery::text(
3549 search_query,
3550 false,
3551 true,
3552 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3553 Vec::new()
3554 ),
3555 cx
3556 )
3557 .await
3558 .unwrap(),
3559 HashMap::from_iter([
3560 ("one.rs".to_string(), vec![8..12]),
3561 ("two.rs".to_string(), vec![8..12]),
3562 ]),
3563 "Rust only search should give only Rust files"
3564 );
3565
3566 assert_eq!(
3567 search(
3568 &project,
3569 SearchQuery::text(
3570 search_query,
3571 false,
3572 true,
3573 vec![
3574 Glob::new("*.ts").unwrap().compile_matcher(),
3575 Glob::new("*.odd").unwrap().compile_matcher(),
3576 ],
3577 Vec::new()
3578 ),
3579 cx
3580 )
3581 .await
3582 .unwrap(),
3583 HashMap::from_iter([
3584 ("one.ts".to_string(), vec![14..18]),
3585 ("two.ts".to_string(), vec![14..18]),
3586 ]),
3587 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3588 );
3589
3590 assert_eq!(
3591 search(
3592 &project,
3593 SearchQuery::text(
3594 search_query,
3595 false,
3596 true,
3597 vec![
3598 Glob::new("*.rs").unwrap().compile_matcher(),
3599 Glob::new("*.ts").unwrap().compile_matcher(),
3600 Glob::new("*.odd").unwrap().compile_matcher(),
3601 ],
3602 Vec::new()
3603 ),
3604 cx
3605 )
3606 .await
3607 .unwrap(),
3608 HashMap::from_iter([
3609 ("one.rs".to_string(), vec![8..12]),
3610 ("one.ts".to_string(), vec![14..18]),
3611 ("two.rs".to_string(), vec![8..12]),
3612 ("two.ts".to_string(), vec![14..18]),
3613 ]),
3614 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3615 );
3616}
3617
3618#[gpui::test]
3619async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3620 init_test(cx);
3621
3622 let search_query = "file";
3623
3624 let fs = FakeFs::new(cx.background());
3625 fs.insert_tree(
3626 "/dir",
3627 json!({
3628 "one.rs": r#"// Rust file one"#,
3629 "one.ts": r#"// TypeScript file one"#,
3630 "two.rs": r#"// Rust file two"#,
3631 "two.ts": r#"// TypeScript file two"#,
3632 }),
3633 )
3634 .await;
3635 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3636
3637 assert_eq!(
3638 search(
3639 &project,
3640 SearchQuery::text(
3641 search_query,
3642 false,
3643 true,
3644 Vec::new(),
3645 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3646 ),
3647 cx
3648 )
3649 .await
3650 .unwrap(),
3651 HashMap::from_iter([
3652 ("one.rs".to_string(), vec![8..12]),
3653 ("one.ts".to_string(), vec![14..18]),
3654 ("two.rs".to_string(), vec![8..12]),
3655 ("two.ts".to_string(), vec![14..18]),
3656 ]),
3657 "If no exclusions match, all files should be returned"
3658 );
3659
3660 assert_eq!(
3661 search(
3662 &project,
3663 SearchQuery::text(
3664 search_query,
3665 false,
3666 true,
3667 Vec::new(),
3668 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3669 ),
3670 cx
3671 )
3672 .await
3673 .unwrap(),
3674 HashMap::from_iter([
3675 ("one.ts".to_string(), vec![14..18]),
3676 ("two.ts".to_string(), vec![14..18]),
3677 ]),
3678 "Rust exclusion search should give only TypeScript files"
3679 );
3680
3681 assert_eq!(
3682 search(
3683 &project,
3684 SearchQuery::text(
3685 search_query,
3686 false,
3687 true,
3688 Vec::new(),
3689 vec![
3690 Glob::new("*.ts").unwrap().compile_matcher(),
3691 Glob::new("*.odd").unwrap().compile_matcher(),
3692 ],
3693 ),
3694 cx
3695 )
3696 .await
3697 .unwrap(),
3698 HashMap::from_iter([
3699 ("one.rs".to_string(), vec![8..12]),
3700 ("two.rs".to_string(), vec![8..12]),
3701 ]),
3702 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3703 );
3704
3705 assert!(
3706 search(
3707 &project,
3708 SearchQuery::text(
3709 search_query,
3710 false,
3711 true,
3712 Vec::new(),
3713 vec![
3714 Glob::new("*.rs").unwrap().compile_matcher(),
3715 Glob::new("*.ts").unwrap().compile_matcher(),
3716 Glob::new("*.odd").unwrap().compile_matcher(),
3717 ],
3718 ),
3719 cx
3720 )
3721 .await
3722 .unwrap().is_empty(),
3723 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3724 );
3725}
3726
3727#[gpui::test]
3728async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3729 init_test(cx);
3730
3731 let search_query = "file";
3732
3733 let fs = FakeFs::new(cx.background());
3734 fs.insert_tree(
3735 "/dir",
3736 json!({
3737 "one.rs": r#"// Rust file one"#,
3738 "one.ts": r#"// TypeScript file one"#,
3739 "two.rs": r#"// Rust file two"#,
3740 "two.ts": r#"// TypeScript file two"#,
3741 }),
3742 )
3743 .await;
3744 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3745
3746 assert!(
3747 search(
3748 &project,
3749 SearchQuery::text(
3750 search_query,
3751 false,
3752 true,
3753 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3754 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3755 ),
3756 cx
3757 )
3758 .await
3759 .unwrap()
3760 .is_empty(),
3761 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3762 );
3763
3764 assert!(
3765 search(
3766 &project,
3767 SearchQuery::text(
3768 search_query,
3769 false,
3770 true,
3771 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3772 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3773 ),
3774 cx
3775 )
3776 .await
3777 .unwrap()
3778 .is_empty(),
3779 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3780 );
3781
3782 assert!(
3783 search(
3784 &project,
3785 SearchQuery::text(
3786 search_query,
3787 false,
3788 true,
3789 vec![
3790 Glob::new("*.ts").unwrap().compile_matcher(),
3791 Glob::new("*.odd").unwrap().compile_matcher()
3792 ],
3793 vec![
3794 Glob::new("*.ts").unwrap().compile_matcher(),
3795 Glob::new("*.odd").unwrap().compile_matcher()
3796 ],
3797 ),
3798 cx
3799 )
3800 .await
3801 .unwrap()
3802 .is_empty(),
3803 "Non-matching inclusions and exclusions should not change that."
3804 );
3805
3806 assert_eq!(
3807 search(
3808 &project,
3809 SearchQuery::text(
3810 search_query,
3811 false,
3812 true,
3813 vec![
3814 Glob::new("*.ts").unwrap().compile_matcher(),
3815 Glob::new("*.odd").unwrap().compile_matcher()
3816 ],
3817 vec![
3818 Glob::new("*.rs").unwrap().compile_matcher(),
3819 Glob::new("*.odd").unwrap().compile_matcher()
3820 ],
3821 ),
3822 cx
3823 )
3824 .await
3825 .unwrap(),
3826 HashMap::from_iter([
3827 ("one.ts".to_string(), vec![14..18]),
3828 ("two.ts".to_string(), vec![14..18]),
3829 ]),
3830 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3831 );
3832}
3833
3834async fn search(
3835 project: &ModelHandle<Project>,
3836 query: SearchQuery,
3837 cx: &mut gpui::TestAppContext,
3838) -> Result<HashMap<String, Vec<Range<usize>>>> {
3839 let results = project
3840 .update(cx, |project, cx| project.search(query, cx))
3841 .await?;
3842
3843 Ok(results
3844 .into_iter()
3845 .map(|(buffer, ranges)| {
3846 buffer.read_with(cx, |buffer, _| {
3847 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3848 let ranges = ranges
3849 .into_iter()
3850 .map(|range| range.to_offset(buffer))
3851 .collect::<Vec<_>>();
3852 (path, ranges)
3853 })
3854 })
3855 .collect())
3856}
3857
3858fn init_test(cx: &mut gpui::TestAppContext) {
3859 cx.foreground().forbid_parking();
3860
3861 cx.update(|cx| {
3862 cx.set_global(SettingsStore::test(cx));
3863 language::init(cx);
3864 Project::init_settings(cx);
3865 });
3866}