1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_project_specific_settings(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let fs = FakeFs::new(cx.background());
74 fs.insert_tree(
75 "/the-root",
76 json!({
77 ".zed": {
78 "settings.json": r#"{ "tab_size": 8 }"#
79 },
80 "a": {
81 "a.rs": "fn a() {\n A\n}"
82 },
83 "b": {
84 ".zed": {
85 "settings.json": r#"{ "tab_size": 2 }"#
86 },
87 "b.rs": "fn b() {\n B\n}"
88 }
89 }),
90 )
91 .await;
92
93 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
94 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
95
96 deterministic.run_until_parked();
97 cx.read(|cx| {
98 let tree = worktree.read(cx);
99
100 let settings_a = language_settings(
101 None,
102 Some(
103 &(File::for_entry(
104 tree.entry_for_path("a/a.rs").unwrap().clone(),
105 worktree.clone(),
106 ) as _),
107 ),
108 cx,
109 );
110 let settings_b = language_settings(
111 None,
112 Some(
113 &(File::for_entry(
114 tree.entry_for_path("b/b.rs").unwrap().clone(),
115 worktree.clone(),
116 ) as _),
117 ),
118 cx,
119 );
120
121 assert_eq!(settings_a.tab_size.get(), 8);
122 assert_eq!(settings_b.tab_size.get(), 2);
123 });
124}
125
126#[gpui::test]
127async fn test_managing_language_servers(
128 deterministic: Arc<Deterministic>,
129 cx: &mut gpui::TestAppContext,
130) {
131 init_test(cx);
132
133 let mut rust_language = Language::new(
134 LanguageConfig {
135 name: "Rust".into(),
136 path_suffixes: vec!["rs".to_string()],
137 ..Default::default()
138 },
139 Some(tree_sitter_rust::language()),
140 );
141 let mut json_language = Language::new(
142 LanguageConfig {
143 name: "JSON".into(),
144 path_suffixes: vec!["json".to_string()],
145 ..Default::default()
146 },
147 None,
148 );
149 let mut fake_rust_servers = rust_language
150 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
151 name: "the-rust-language-server",
152 capabilities: lsp::ServerCapabilities {
153 completion_provider: Some(lsp::CompletionOptions {
154 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
155 ..Default::default()
156 }),
157 ..Default::default()
158 },
159 ..Default::default()
160 }))
161 .await;
162 let mut fake_json_servers = json_language
163 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
164 name: "the-json-language-server",
165 capabilities: lsp::ServerCapabilities {
166 completion_provider: Some(lsp::CompletionOptions {
167 trigger_characters: Some(vec![":".to_string()]),
168 ..Default::default()
169 }),
170 ..Default::default()
171 },
172 ..Default::default()
173 }))
174 .await;
175
176 let fs = FakeFs::new(cx.background());
177 fs.insert_tree(
178 "/the-root",
179 json!({
180 "test.rs": "const A: i32 = 1;",
181 "test2.rs": "",
182 "Cargo.toml": "a = 1",
183 "package.json": "{\"a\": 1}",
184 }),
185 )
186 .await;
187
188 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
189
190 // Open a buffer without an associated language server.
191 let toml_buffer = project
192 .update(cx, |project, cx| {
193 project.open_local_buffer("/the-root/Cargo.toml", cx)
194 })
195 .await
196 .unwrap();
197
198 // Open a buffer with an associated language server before the language for it has been loaded.
199 let rust_buffer = project
200 .update(cx, |project, cx| {
201 project.open_local_buffer("/the-root/test.rs", cx)
202 })
203 .await
204 .unwrap();
205 rust_buffer.read_with(cx, |buffer, _| {
206 assert_eq!(buffer.language().map(|l| l.name()), None);
207 });
208
209 // Now we add the languages to the project, and ensure they get assigned to all
210 // the relevant open buffers.
211 project.update(cx, |project, _| {
212 project.languages.add(Arc::new(json_language));
213 project.languages.add(Arc::new(rust_language));
214 });
215 deterministic.run_until_parked();
216 rust_buffer.read_with(cx, |buffer, _| {
217 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
218 });
219
220 // A server is started up, and it is notified about Rust files.
221 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
222 assert_eq!(
223 fake_rust_server
224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
225 .await
226 .text_document,
227 lsp::TextDocumentItem {
228 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
229 version: 0,
230 text: "const A: i32 = 1;".to_string(),
231 language_id: Default::default()
232 }
233 );
234
235 // The buffer is configured based on the language server's capabilities.
236 rust_buffer.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242 toml_buffer.read_with(cx, |buffer, _| {
243 assert!(buffer.completion_triggers().is_empty());
244 });
245
246 // Edit a buffer. The changes are reported to the language server.
247 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
255 1
256 )
257 );
258
259 // Open a third buffer with a different associated language server.
260 let json_buffer = project
261 .update(cx, |project, cx| {
262 project.open_local_buffer("/the-root/package.json", cx)
263 })
264 .await
265 .unwrap();
266
267 // A json language server is started up and is only notified about the json buffer.
268 let mut fake_json_server = fake_json_servers.next().await.unwrap();
269 assert_eq!(
270 fake_json_server
271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
272 .await
273 .text_document,
274 lsp::TextDocumentItem {
275 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
276 version: 0,
277 text: "{\"a\": 1}".to_string(),
278 language_id: Default::default()
279 }
280 );
281
282 // This buffer is configured based on the second language server's
283 // capabilities.
284 json_buffer.read_with(cx, |buffer, _| {
285 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
286 });
287
288 // When opening another buffer whose language server is already running,
289 // it is also configured based on the existing language server's capabilities.
290 let rust_buffer2 = project
291 .update(cx, |project, cx| {
292 project.open_local_buffer("/the-root/test2.rs", cx)
293 })
294 .await
295 .unwrap();
296 rust_buffer2.read_with(cx, |buffer, _| {
297 assert_eq!(
298 buffer.completion_triggers(),
299 &[".".to_string(), "::".to_string()]
300 );
301 });
302
303 // Changes are reported only to servers matching the buffer's language.
304 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
305 rust_buffer2.update(cx, |buffer, cx| {
306 buffer.edit([(0..0, "let x = 1;")], None, cx)
307 });
308 assert_eq!(
309 fake_rust_server
310 .receive_notification::<lsp::notification::DidChangeTextDocument>()
311 .await
312 .text_document,
313 lsp::VersionedTextDocumentIdentifier::new(
314 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
315 1
316 )
317 );
318
319 // Save notifications are reported to all servers.
320 project
321 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
322 .await
323 .unwrap();
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidSaveTextDocument>()
327 .await
328 .text_document,
329 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
330 );
331 assert_eq!(
332 fake_json_server
333 .receive_notification::<lsp::notification::DidSaveTextDocument>()
334 .await
335 .text_document,
336 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
337 );
338
339 // Renames are reported only to servers matching the buffer's language.
340 fs.rename(
341 Path::new("/the-root/test2.rs"),
342 Path::new("/the-root/test3.rs"),
343 Default::default(),
344 )
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
353 );
354 assert_eq!(
355 fake_rust_server
356 .receive_notification::<lsp::notification::DidOpenTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentItem {
360 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
361 version: 0,
362 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
363 language_id: Default::default()
364 },
365 );
366
367 rust_buffer2.update(cx, |buffer, cx| {
368 buffer.update_diagnostics(
369 LanguageServerId(0),
370 DiagnosticSet::from_sorted_entries(
371 vec![DiagnosticEntry {
372 diagnostic: Default::default(),
373 range: Anchor::MIN..Anchor::MAX,
374 }],
375 &buffer.snapshot(),
376 ),
377 cx,
378 );
379 assert_eq!(
380 buffer
381 .snapshot()
382 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
383 .count(),
384 1
385 );
386 });
387
388 // When the rename changes the extension of the file, the buffer gets closed on the old
389 // language server and gets opened on the new one.
390 fs.rename(
391 Path::new("/the-root/test3.rs"),
392 Path::new("/the-root/test3.json"),
393 Default::default(),
394 )
395 .await
396 .unwrap();
397 assert_eq!(
398 fake_rust_server
399 .receive_notification::<lsp::notification::DidCloseTextDocument>()
400 .await
401 .text_document,
402 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
403 );
404 assert_eq!(
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
411 version: 0,
412 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 );
416
417 // We clear the diagnostics, since the language has changed.
418 rust_buffer2.read_with(cx, |buffer, _| {
419 assert_eq!(
420 buffer
421 .snapshot()
422 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
423 .count(),
424 0
425 );
426 });
427
428 // The renamed file's version resets after changing language server.
429 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidChangeTextDocument>()
433 .await
434 .text_document,
435 lsp::VersionedTextDocumentIdentifier::new(
436 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
437 1
438 )
439 );
440
441 // Restart language servers
442 project.update(cx, |project, cx| {
443 project.restart_language_servers_for_buffers(
444 vec![rust_buffer.clone(), json_buffer.clone()],
445 cx,
446 );
447 });
448
449 let mut rust_shutdown_requests = fake_rust_server
450 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
451 let mut json_shutdown_requests = fake_json_server
452 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
453 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
454
455 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
456 let mut fake_json_server = fake_json_servers.next().await.unwrap();
457
458 // Ensure rust document is reopened in new rust language server
459 assert_eq!(
460 fake_rust_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
466 version: 0,
467 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
468 language_id: Default::default()
469 }
470 );
471
472 // Ensure json documents are reopened in new json language server
473 assert_set_eq!(
474 [
475 fake_json_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 fake_json_server
480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
481 .await
482 .text_document,
483 ],
484 [
485 lsp::TextDocumentItem {
486 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
487 version: 0,
488 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
489 language_id: Default::default()
490 },
491 lsp::TextDocumentItem {
492 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
493 version: 0,
494 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
495 language_id: Default::default()
496 }
497 ]
498 );
499
500 // Close notifications are reported only to servers matching the buffer's language.
501 cx.update(|_| drop(json_buffer));
502 let close_message = lsp::DidCloseTextDocumentParams {
503 text_document: lsp::TextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
505 ),
506 };
507 assert_eq!(
508 fake_json_server
509 .receive_notification::<lsp::notification::DidCloseTextDocument>()
510 .await,
511 close_message,
512 );
513}
514
515#[gpui::test]
516async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let mut language = Language::new(
520 LanguageConfig {
521 name: "Rust".into(),
522 path_suffixes: vec!["rs".to_string()],
523 ..Default::default()
524 },
525 Some(tree_sitter_rust::language()),
526 );
527 let mut fake_servers = language
528 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
529 name: "the-language-server",
530 ..Default::default()
531 }))
532 .await;
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/the-root",
537 json!({
538 "a.rs": "",
539 "b.rs": "",
540 }),
541 )
542 .await;
543
544 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
545 project.update(cx, |project, _| {
546 project.languages.add(Arc::new(language));
547 });
548 cx.foreground().run_until_parked();
549
550 // Start the language server by opening a buffer with a compatible file extension.
551 let _buffer = project
552 .update(cx, |project, cx| {
553 project.open_local_buffer("/the-root/a.rs", cx)
554 })
555 .await
556 .unwrap();
557
558 // Keep track of the FS events reported to the language server.
559 let fake_server = fake_servers.next().await.unwrap();
560 let file_changes = Arc::new(Mutex::new(Vec::new()));
561 fake_server
562 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
563 registrations: vec![lsp::Registration {
564 id: Default::default(),
565 method: "workspace/didChangeWatchedFiles".to_string(),
566 register_options: serde_json::to_value(
567 lsp::DidChangeWatchedFilesRegistrationOptions {
568 watchers: vec![lsp::FileSystemWatcher {
569 glob_pattern: "/the-root/*.{rs,c}".to_string(),
570 kind: None,
571 }],
572 },
573 )
574 .ok(),
575 }],
576 })
577 .await
578 .unwrap();
579 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
580 let file_changes = file_changes.clone();
581 move |params, _| {
582 let mut file_changes = file_changes.lock();
583 file_changes.extend(params.changes);
584 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
585 }
586 });
587
588 cx.foreground().run_until_parked();
589 assert_eq!(file_changes.lock().len(), 0);
590
591 // Perform some file system mutations, two of which match the watched patterns,
592 // and one of which does not.
593 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
594 .await
595 .unwrap();
596 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
597 .await
598 .unwrap();
599 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
600 .await
601 .unwrap();
602
603 // The language server receives events for the FS mutations that match its watch patterns.
604 cx.foreground().run_until_parked();
605 assert_eq!(
606 &*file_changes.lock(),
607 &[
608 lsp::FileEvent {
609 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
610 typ: lsp::FileChangeType::DELETED,
611 },
612 lsp::FileEvent {
613 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
614 typ: lsp::FileChangeType::CREATED,
615 },
616 ]
617 );
618}
619
620#[gpui::test]
621async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
622 init_test(cx);
623
624 let fs = FakeFs::new(cx.background());
625 fs.insert_tree(
626 "/dir",
627 json!({
628 "a.rs": "let a = 1;",
629 "b.rs": "let b = 2;"
630 }),
631 )
632 .await;
633
634 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
635
636 let buffer_a = project
637 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
638 .await
639 .unwrap();
640 let buffer_b = project
641 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
642 .await
643 .unwrap();
644
645 project.update(cx, |project, cx| {
646 project
647 .update_diagnostics(
648 LanguageServerId(0),
649 lsp::PublishDiagnosticsParams {
650 uri: Url::from_file_path("/dir/a.rs").unwrap(),
651 version: None,
652 diagnostics: vec![lsp::Diagnostic {
653 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
654 severity: Some(lsp::DiagnosticSeverity::ERROR),
655 message: "error 1".to_string(),
656 ..Default::default()
657 }],
658 },
659 &[],
660 cx,
661 )
662 .unwrap();
663 project
664 .update_diagnostics(
665 LanguageServerId(0),
666 lsp::PublishDiagnosticsParams {
667 uri: Url::from_file_path("/dir/b.rs").unwrap(),
668 version: None,
669 diagnostics: vec![lsp::Diagnostic {
670 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
671 severity: Some(lsp::DiagnosticSeverity::WARNING),
672 message: "error 2".to_string(),
673 ..Default::default()
674 }],
675 },
676 &[],
677 cx,
678 )
679 .unwrap();
680 });
681
682 buffer_a.read_with(cx, |buffer, _| {
683 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
684 assert_eq!(
685 chunks
686 .iter()
687 .map(|(s, d)| (s.as_str(), *d))
688 .collect::<Vec<_>>(),
689 &[
690 ("let ", None),
691 ("a", Some(DiagnosticSeverity::ERROR)),
692 (" = 1;", None),
693 ]
694 );
695 });
696 buffer_b.read_with(cx, |buffer, _| {
697 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
698 assert_eq!(
699 chunks
700 .iter()
701 .map(|(s, d)| (s.as_str(), *d))
702 .collect::<Vec<_>>(),
703 &[
704 ("let ", None),
705 ("b", Some(DiagnosticSeverity::WARNING)),
706 (" = 2;", None),
707 ]
708 );
709 });
710}
711
712#[gpui::test]
713async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
714 init_test(cx);
715
716 let fs = FakeFs::new(cx.background());
717 fs.insert_tree(
718 "/root",
719 json!({
720 "dir": {
721 "a.rs": "let a = 1;",
722 },
723 "other.rs": "let b = c;"
724 }),
725 )
726 .await;
727
728 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
729
730 let (worktree, _) = project
731 .update(cx, |project, cx| {
732 project.find_or_create_local_worktree("/root/other.rs", false, cx)
733 })
734 .await
735 .unwrap();
736 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
737
738 project.update(cx, |project, cx| {
739 project
740 .update_diagnostics(
741 LanguageServerId(0),
742 lsp::PublishDiagnosticsParams {
743 uri: Url::from_file_path("/root/other.rs").unwrap(),
744 version: None,
745 diagnostics: vec![lsp::Diagnostic {
746 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
747 severity: Some(lsp::DiagnosticSeverity::ERROR),
748 message: "unknown variable 'c'".to_string(),
749 ..Default::default()
750 }],
751 },
752 &[],
753 cx,
754 )
755 .unwrap();
756 });
757
758 let buffer = project
759 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
760 .await
761 .unwrap();
762 buffer.read_with(cx, |buffer, _| {
763 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
764 assert_eq!(
765 chunks
766 .iter()
767 .map(|(s, d)| (s.as_str(), *d))
768 .collect::<Vec<_>>(),
769 &[
770 ("let b = ", None),
771 ("c", Some(DiagnosticSeverity::ERROR)),
772 (";", None),
773 ]
774 );
775 });
776
777 project.read_with(cx, |project, cx| {
778 assert_eq!(project.diagnostic_summaries(cx).next(), None);
779 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
780 });
781}
782
783#[gpui::test]
784async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
785 init_test(cx);
786
787 let progress_token = "the-progress-token";
788 let mut language = Language::new(
789 LanguageConfig {
790 name: "Rust".into(),
791 path_suffixes: vec!["rs".to_string()],
792 ..Default::default()
793 },
794 Some(tree_sitter_rust::language()),
795 );
796 let mut fake_servers = language
797 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
798 disk_based_diagnostics_progress_token: Some(progress_token.into()),
799 disk_based_diagnostics_sources: vec!["disk".into()],
800 ..Default::default()
801 }))
802 .await;
803
804 let fs = FakeFs::new(cx.background());
805 fs.insert_tree(
806 "/dir",
807 json!({
808 "a.rs": "fn a() { A }",
809 "b.rs": "const y: i32 = 1",
810 }),
811 )
812 .await;
813
814 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
815 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
816 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
817
818 // Cause worktree to start the fake language server
819 let _buffer = project
820 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
821 .await
822 .unwrap();
823
824 let mut events = subscribe(&project, cx);
825
826 let fake_server = fake_servers.next().await.unwrap();
827 fake_server
828 .start_progress(format!("{}/0", progress_token))
829 .await;
830 assert_eq!(
831 events.next().await.unwrap(),
832 Event::DiskBasedDiagnosticsStarted {
833 language_server_id: LanguageServerId(0),
834 }
835 );
836
837 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
838 uri: Url::from_file_path("/dir/a.rs").unwrap(),
839 version: None,
840 diagnostics: vec![lsp::Diagnostic {
841 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
842 severity: Some(lsp::DiagnosticSeverity::ERROR),
843 message: "undefined variable 'A'".to_string(),
844 ..Default::default()
845 }],
846 });
847 assert_eq!(
848 events.next().await.unwrap(),
849 Event::DiagnosticsUpdated {
850 language_server_id: LanguageServerId(0),
851 path: (worktree_id, Path::new("a.rs")).into()
852 }
853 );
854
855 fake_server.end_progress(format!("{}/0", progress_token));
856 assert_eq!(
857 events.next().await.unwrap(),
858 Event::DiskBasedDiagnosticsFinished {
859 language_server_id: LanguageServerId(0)
860 }
861 );
862
863 let buffer = project
864 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
865 .await
866 .unwrap();
867
868 buffer.read_with(cx, |buffer, _| {
869 let snapshot = buffer.snapshot();
870 let diagnostics = snapshot
871 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
872 .collect::<Vec<_>>();
873 assert_eq!(
874 diagnostics,
875 &[DiagnosticEntry {
876 range: Point::new(0, 9)..Point::new(0, 10),
877 diagnostic: Diagnostic {
878 severity: lsp::DiagnosticSeverity::ERROR,
879 message: "undefined variable 'A'".to_string(),
880 group_id: 0,
881 is_primary: true,
882 ..Default::default()
883 }
884 }]
885 )
886 });
887
888 // Ensure publishing empty diagnostics twice only results in one update event.
889 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
890 uri: Url::from_file_path("/dir/a.rs").unwrap(),
891 version: None,
892 diagnostics: Default::default(),
893 });
894 assert_eq!(
895 events.next().await.unwrap(),
896 Event::DiagnosticsUpdated {
897 language_server_id: LanguageServerId(0),
898 path: (worktree_id, Path::new("a.rs")).into()
899 }
900 );
901
902 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
903 uri: Url::from_file_path("/dir/a.rs").unwrap(),
904 version: None,
905 diagnostics: Default::default(),
906 });
907 cx.foreground().run_until_parked();
908 assert_eq!(futures::poll!(events.next()), Poll::Pending);
909}
910
911#[gpui::test]
912async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
913 init_test(cx);
914
915 let progress_token = "the-progress-token";
916 let mut language = Language::new(
917 LanguageConfig {
918 path_suffixes: vec!["rs".to_string()],
919 ..Default::default()
920 },
921 None,
922 );
923 let mut fake_servers = language
924 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
925 disk_based_diagnostics_sources: vec!["disk".into()],
926 disk_based_diagnostics_progress_token: Some(progress_token.into()),
927 ..Default::default()
928 }))
929 .await;
930
931 let fs = FakeFs::new(cx.background());
932 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
933
934 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
935 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
936
937 let buffer = project
938 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
939 .await
940 .unwrap();
941
942 // Simulate diagnostics starting to update.
943 let fake_server = fake_servers.next().await.unwrap();
944 fake_server.start_progress(progress_token).await;
945
946 // Restart the server before the diagnostics finish updating.
947 project.update(cx, |project, cx| {
948 project.restart_language_servers_for_buffers([buffer], cx);
949 });
950 let mut events = subscribe(&project, cx);
951
952 // Simulate the newly started server sending more diagnostics.
953 let fake_server = fake_servers.next().await.unwrap();
954 fake_server.start_progress(progress_token).await;
955 assert_eq!(
956 events.next().await.unwrap(),
957 Event::DiskBasedDiagnosticsStarted {
958 language_server_id: LanguageServerId(1)
959 }
960 );
961 project.read_with(cx, |project, _| {
962 assert_eq!(
963 project
964 .language_servers_running_disk_based_diagnostics()
965 .collect::<Vec<_>>(),
966 [LanguageServerId(1)]
967 );
968 });
969
970 // All diagnostics are considered done, despite the old server's diagnostic
971 // task never completing.
972 fake_server.end_progress(progress_token);
973 assert_eq!(
974 events.next().await.unwrap(),
975 Event::DiskBasedDiagnosticsFinished {
976 language_server_id: LanguageServerId(1)
977 }
978 );
979 project.read_with(cx, |project, _| {
980 assert_eq!(
981 project
982 .language_servers_running_disk_based_diagnostics()
983 .collect::<Vec<_>>(),
984 [LanguageServerId(0); 0]
985 );
986 });
987}
988
989#[gpui::test]
990async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
991 init_test(cx);
992
993 let mut language = Language::new(
994 LanguageConfig {
995 path_suffixes: vec!["rs".to_string()],
996 ..Default::default()
997 },
998 None,
999 );
1000 let mut fake_servers = language
1001 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1002 ..Default::default()
1003 }))
1004 .await;
1005
1006 let fs = FakeFs::new(cx.background());
1007 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1008
1009 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1010 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1011
1012 let buffer = project
1013 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1014 .await
1015 .unwrap();
1016
1017 // Publish diagnostics
1018 let fake_server = fake_servers.next().await.unwrap();
1019 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1020 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1021 version: None,
1022 diagnostics: vec![lsp::Diagnostic {
1023 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1024 severity: Some(lsp::DiagnosticSeverity::ERROR),
1025 message: "the message".to_string(),
1026 ..Default::default()
1027 }],
1028 });
1029
1030 cx.foreground().run_until_parked();
1031 buffer.read_with(cx, |buffer, _| {
1032 assert_eq!(
1033 buffer
1034 .snapshot()
1035 .diagnostics_in_range::<_, usize>(0..1, false)
1036 .map(|entry| entry.diagnostic.message.clone())
1037 .collect::<Vec<_>>(),
1038 ["the message".to_string()]
1039 );
1040 });
1041 project.read_with(cx, |project, cx| {
1042 assert_eq!(
1043 project.diagnostic_summary(cx),
1044 DiagnosticSummary {
1045 error_count: 1,
1046 warning_count: 0,
1047 }
1048 );
1049 });
1050
1051 project.update(cx, |project, cx| {
1052 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1053 });
1054
1055 // The diagnostics are cleared.
1056 cx.foreground().run_until_parked();
1057 buffer.read_with(cx, |buffer, _| {
1058 assert_eq!(
1059 buffer
1060 .snapshot()
1061 .diagnostics_in_range::<_, usize>(0..1, false)
1062 .map(|entry| entry.diagnostic.message.clone())
1063 .collect::<Vec<_>>(),
1064 Vec::<String>::new(),
1065 );
1066 });
1067 project.read_with(cx, |project, cx| {
1068 assert_eq!(
1069 project.diagnostic_summary(cx),
1070 DiagnosticSummary {
1071 error_count: 0,
1072 warning_count: 0,
1073 }
1074 );
1075 });
1076}
1077
1078#[gpui::test]
1079async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1080 init_test(cx);
1081
1082 let mut language = Language::new(
1083 LanguageConfig {
1084 path_suffixes: vec!["rs".to_string()],
1085 ..Default::default()
1086 },
1087 None,
1088 );
1089 let mut fake_servers = language
1090 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1091 name: "the-lsp",
1092 ..Default::default()
1093 }))
1094 .await;
1095
1096 let fs = FakeFs::new(cx.background());
1097 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1098
1099 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1100 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1101
1102 let buffer = project
1103 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1104 .await
1105 .unwrap();
1106
1107 // Before restarting the server, report diagnostics with an unknown buffer version.
1108 let fake_server = fake_servers.next().await.unwrap();
1109 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1110 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1111 version: Some(10000),
1112 diagnostics: Vec::new(),
1113 });
1114 cx.foreground().run_until_parked();
1115
1116 project.update(cx, |project, cx| {
1117 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1118 });
1119 let mut fake_server = fake_servers.next().await.unwrap();
1120 let notification = fake_server
1121 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1122 .await
1123 .text_document;
1124 assert_eq!(notification.version, 0);
1125}
1126
1127#[gpui::test]
1128async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1129 init_test(cx);
1130
1131 let mut rust = Language::new(
1132 LanguageConfig {
1133 name: Arc::from("Rust"),
1134 path_suffixes: vec!["rs".to_string()],
1135 ..Default::default()
1136 },
1137 None,
1138 );
1139 let mut fake_rust_servers = rust
1140 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1141 name: "rust-lsp",
1142 ..Default::default()
1143 }))
1144 .await;
1145 let mut js = Language::new(
1146 LanguageConfig {
1147 name: Arc::from("JavaScript"),
1148 path_suffixes: vec!["js".to_string()],
1149 ..Default::default()
1150 },
1151 None,
1152 );
1153 let mut fake_js_servers = js
1154 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1155 name: "js-lsp",
1156 ..Default::default()
1157 }))
1158 .await;
1159
1160 let fs = FakeFs::new(cx.background());
1161 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1162 .await;
1163
1164 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1165 project.update(cx, |project, _| {
1166 project.languages.add(Arc::new(rust));
1167 project.languages.add(Arc::new(js));
1168 });
1169
1170 let _rs_buffer = project
1171 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1172 .await
1173 .unwrap();
1174 let _js_buffer = project
1175 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1176 .await
1177 .unwrap();
1178
1179 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1180 assert_eq!(
1181 fake_rust_server_1
1182 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1183 .await
1184 .text_document
1185 .uri
1186 .as_str(),
1187 "file:///dir/a.rs"
1188 );
1189
1190 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1191 assert_eq!(
1192 fake_js_server
1193 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1194 .await
1195 .text_document
1196 .uri
1197 .as_str(),
1198 "file:///dir/b.js"
1199 );
1200
1201 // Disable Rust language server, ensuring only that server gets stopped.
1202 cx.update(|cx| {
1203 cx.update_global(|settings: &mut SettingsStore, cx| {
1204 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1205 settings.languages.insert(
1206 Arc::from("Rust"),
1207 LanguageSettingsContent {
1208 enable_language_server: Some(false),
1209 ..Default::default()
1210 },
1211 );
1212 });
1213 })
1214 });
1215 fake_rust_server_1
1216 .receive_notification::<lsp::notification::Exit>()
1217 .await;
1218
1219 // Enable Rust and disable JavaScript language servers, ensuring that the
1220 // former gets started again and that the latter stops.
1221 cx.update(|cx| {
1222 cx.update_global(|settings: &mut SettingsStore, cx| {
1223 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1224 settings.languages.insert(
1225 Arc::from("Rust"),
1226 LanguageSettingsContent {
1227 enable_language_server: Some(true),
1228 ..Default::default()
1229 },
1230 );
1231 settings.languages.insert(
1232 Arc::from("JavaScript"),
1233 LanguageSettingsContent {
1234 enable_language_server: Some(false),
1235 ..Default::default()
1236 },
1237 );
1238 });
1239 })
1240 });
1241 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1242 assert_eq!(
1243 fake_rust_server_2
1244 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1245 .await
1246 .text_document
1247 .uri
1248 .as_str(),
1249 "file:///dir/a.rs"
1250 );
1251 fake_js_server
1252 .receive_notification::<lsp::notification::Exit>()
1253 .await;
1254}
1255
1256#[gpui::test(iterations = 3)]
1257async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1258 init_test(cx);
1259
1260 let mut language = Language::new(
1261 LanguageConfig {
1262 name: "Rust".into(),
1263 path_suffixes: vec!["rs".to_string()],
1264 ..Default::default()
1265 },
1266 Some(tree_sitter_rust::language()),
1267 );
1268 let mut fake_servers = language
1269 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1270 disk_based_diagnostics_sources: vec!["disk".into()],
1271 ..Default::default()
1272 }))
1273 .await;
1274
1275 let text = "
1276 fn a() { A }
1277 fn b() { BB }
1278 fn c() { CCC }
1279 "
1280 .unindent();
1281
1282 let fs = FakeFs::new(cx.background());
1283 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1284
1285 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1286 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1287
1288 let buffer = project
1289 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1290 .await
1291 .unwrap();
1292
1293 let mut fake_server = fake_servers.next().await.unwrap();
1294 let open_notification = fake_server
1295 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1296 .await;
1297
1298 // Edit the buffer, moving the content down
1299 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1300 let change_notification_1 = fake_server
1301 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1302 .await;
1303 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1304
1305 // Report some diagnostics for the initial version of the buffer
1306 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1307 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1308 version: Some(open_notification.text_document.version),
1309 diagnostics: vec![
1310 lsp::Diagnostic {
1311 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1312 severity: Some(DiagnosticSeverity::ERROR),
1313 message: "undefined variable 'A'".to_string(),
1314 source: Some("disk".to_string()),
1315 ..Default::default()
1316 },
1317 lsp::Diagnostic {
1318 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1319 severity: Some(DiagnosticSeverity::ERROR),
1320 message: "undefined variable 'BB'".to_string(),
1321 source: Some("disk".to_string()),
1322 ..Default::default()
1323 },
1324 lsp::Diagnostic {
1325 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1326 severity: Some(DiagnosticSeverity::ERROR),
1327 source: Some("disk".to_string()),
1328 message: "undefined variable 'CCC'".to_string(),
1329 ..Default::default()
1330 },
1331 ],
1332 });
1333
1334 // The diagnostics have moved down since they were created.
1335 buffer.next_notification(cx).await;
1336 cx.foreground().run_until_parked();
1337 buffer.read_with(cx, |buffer, _| {
1338 assert_eq!(
1339 buffer
1340 .snapshot()
1341 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1342 .collect::<Vec<_>>(),
1343 &[
1344 DiagnosticEntry {
1345 range: Point::new(3, 9)..Point::new(3, 11),
1346 diagnostic: Diagnostic {
1347 source: Some("disk".into()),
1348 severity: DiagnosticSeverity::ERROR,
1349 message: "undefined variable 'BB'".to_string(),
1350 is_disk_based: true,
1351 group_id: 1,
1352 is_primary: true,
1353 ..Default::default()
1354 },
1355 },
1356 DiagnosticEntry {
1357 range: Point::new(4, 9)..Point::new(4, 12),
1358 diagnostic: Diagnostic {
1359 source: Some("disk".into()),
1360 severity: DiagnosticSeverity::ERROR,
1361 message: "undefined variable 'CCC'".to_string(),
1362 is_disk_based: true,
1363 group_id: 2,
1364 is_primary: true,
1365 ..Default::default()
1366 }
1367 }
1368 ]
1369 );
1370 assert_eq!(
1371 chunks_with_diagnostics(buffer, 0..buffer.len()),
1372 [
1373 ("\n\nfn a() { ".to_string(), None),
1374 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1375 (" }\nfn b() { ".to_string(), None),
1376 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1377 (" }\nfn c() { ".to_string(), None),
1378 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1379 (" }\n".to_string(), None),
1380 ]
1381 );
1382 assert_eq!(
1383 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1384 [
1385 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1386 (" }\nfn c() { ".to_string(), None),
1387 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1388 ]
1389 );
1390 });
1391
1392 // Ensure overlapping diagnostics are highlighted correctly.
1393 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1394 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1395 version: Some(open_notification.text_document.version),
1396 diagnostics: vec![
1397 lsp::Diagnostic {
1398 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1399 severity: Some(DiagnosticSeverity::ERROR),
1400 message: "undefined variable 'A'".to_string(),
1401 source: Some("disk".to_string()),
1402 ..Default::default()
1403 },
1404 lsp::Diagnostic {
1405 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1406 severity: Some(DiagnosticSeverity::WARNING),
1407 message: "unreachable statement".to_string(),
1408 source: Some("disk".to_string()),
1409 ..Default::default()
1410 },
1411 ],
1412 });
1413
1414 buffer.next_notification(cx).await;
1415 cx.foreground().run_until_parked();
1416 buffer.read_with(cx, |buffer, _| {
1417 assert_eq!(
1418 buffer
1419 .snapshot()
1420 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1421 .collect::<Vec<_>>(),
1422 &[
1423 DiagnosticEntry {
1424 range: Point::new(2, 9)..Point::new(2, 12),
1425 diagnostic: Diagnostic {
1426 source: Some("disk".into()),
1427 severity: DiagnosticSeverity::WARNING,
1428 message: "unreachable statement".to_string(),
1429 is_disk_based: true,
1430 group_id: 4,
1431 is_primary: true,
1432 ..Default::default()
1433 }
1434 },
1435 DiagnosticEntry {
1436 range: Point::new(2, 9)..Point::new(2, 10),
1437 diagnostic: Diagnostic {
1438 source: Some("disk".into()),
1439 severity: DiagnosticSeverity::ERROR,
1440 message: "undefined variable 'A'".to_string(),
1441 is_disk_based: true,
1442 group_id: 3,
1443 is_primary: true,
1444 ..Default::default()
1445 },
1446 }
1447 ]
1448 );
1449 assert_eq!(
1450 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1451 [
1452 ("fn a() { ".to_string(), None),
1453 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1454 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1455 ("\n".to_string(), None),
1456 ]
1457 );
1458 assert_eq!(
1459 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1460 [
1461 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1462 ("\n".to_string(), None),
1463 ]
1464 );
1465 });
1466
1467 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1468 // changes since the last save.
1469 buffer.update(cx, |buffer, cx| {
1470 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1471 buffer.edit(
1472 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1473 None,
1474 cx,
1475 );
1476 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1477 });
1478 let change_notification_2 = fake_server
1479 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1480 .await;
1481 assert!(
1482 change_notification_2.text_document.version > change_notification_1.text_document.version
1483 );
1484
1485 // Handle out-of-order diagnostics
1486 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1487 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1488 version: Some(change_notification_2.text_document.version),
1489 diagnostics: vec![
1490 lsp::Diagnostic {
1491 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1492 severity: Some(DiagnosticSeverity::ERROR),
1493 message: "undefined variable 'BB'".to_string(),
1494 source: Some("disk".to_string()),
1495 ..Default::default()
1496 },
1497 lsp::Diagnostic {
1498 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1499 severity: Some(DiagnosticSeverity::WARNING),
1500 message: "undefined variable 'A'".to_string(),
1501 source: Some("disk".to_string()),
1502 ..Default::default()
1503 },
1504 ],
1505 });
1506
1507 buffer.next_notification(cx).await;
1508 cx.foreground().run_until_parked();
1509 buffer.read_with(cx, |buffer, _| {
1510 assert_eq!(
1511 buffer
1512 .snapshot()
1513 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1514 .collect::<Vec<_>>(),
1515 &[
1516 DiagnosticEntry {
1517 range: Point::new(2, 21)..Point::new(2, 22),
1518 diagnostic: Diagnostic {
1519 source: Some("disk".into()),
1520 severity: DiagnosticSeverity::WARNING,
1521 message: "undefined variable 'A'".to_string(),
1522 is_disk_based: true,
1523 group_id: 6,
1524 is_primary: true,
1525 ..Default::default()
1526 }
1527 },
1528 DiagnosticEntry {
1529 range: Point::new(3, 9)..Point::new(3, 14),
1530 diagnostic: Diagnostic {
1531 source: Some("disk".into()),
1532 severity: DiagnosticSeverity::ERROR,
1533 message: "undefined variable 'BB'".to_string(),
1534 is_disk_based: true,
1535 group_id: 5,
1536 is_primary: true,
1537 ..Default::default()
1538 },
1539 }
1540 ]
1541 );
1542 });
1543}
1544
1545#[gpui::test]
1546async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let text = concat!(
1550 "let one = ;\n", //
1551 "let two = \n",
1552 "let three = 3;\n",
1553 );
1554
1555 let fs = FakeFs::new(cx.background());
1556 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1557
1558 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1559 let buffer = project
1560 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1561 .await
1562 .unwrap();
1563
1564 project.update(cx, |project, cx| {
1565 project
1566 .update_buffer_diagnostics(
1567 &buffer,
1568 LanguageServerId(0),
1569 None,
1570 vec![
1571 DiagnosticEntry {
1572 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1573 diagnostic: Diagnostic {
1574 severity: DiagnosticSeverity::ERROR,
1575 message: "syntax error 1".to_string(),
1576 ..Default::default()
1577 },
1578 },
1579 DiagnosticEntry {
1580 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1581 diagnostic: Diagnostic {
1582 severity: DiagnosticSeverity::ERROR,
1583 message: "syntax error 2".to_string(),
1584 ..Default::default()
1585 },
1586 },
1587 ],
1588 cx,
1589 )
1590 .unwrap();
1591 });
1592
1593 // An empty range is extended forward to include the following character.
1594 // At the end of a line, an empty range is extended backward to include
1595 // the preceding character.
1596 buffer.read_with(cx, |buffer, _| {
1597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1598 assert_eq!(
1599 chunks
1600 .iter()
1601 .map(|(s, d)| (s.as_str(), *d))
1602 .collect::<Vec<_>>(),
1603 &[
1604 ("let one = ", None),
1605 (";", Some(DiagnosticSeverity::ERROR)),
1606 ("\nlet two =", None),
1607 (" ", Some(DiagnosticSeverity::ERROR)),
1608 ("\nlet three = 3;\n", None)
1609 ]
1610 );
1611 });
1612}
1613
1614#[gpui::test]
1615async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1616 init_test(cx);
1617
1618 let fs = FakeFs::new(cx.background());
1619 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1620 .await;
1621
1622 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1623
1624 project.update(cx, |project, cx| {
1625 project
1626 .update_diagnostic_entries(
1627 LanguageServerId(0),
1628 Path::new("/dir/a.rs").to_owned(),
1629 None,
1630 vec![DiagnosticEntry {
1631 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1632 diagnostic: Diagnostic {
1633 severity: DiagnosticSeverity::ERROR,
1634 is_primary: true,
1635 message: "syntax error a1".to_string(),
1636 ..Default::default()
1637 },
1638 }],
1639 cx,
1640 )
1641 .unwrap();
1642 project
1643 .update_diagnostic_entries(
1644 LanguageServerId(1),
1645 Path::new("/dir/a.rs").to_owned(),
1646 None,
1647 vec![DiagnosticEntry {
1648 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1649 diagnostic: Diagnostic {
1650 severity: DiagnosticSeverity::ERROR,
1651 is_primary: true,
1652 message: "syntax error b1".to_string(),
1653 ..Default::default()
1654 },
1655 }],
1656 cx,
1657 )
1658 .unwrap();
1659
1660 assert_eq!(
1661 project.diagnostic_summary(cx),
1662 DiagnosticSummary {
1663 error_count: 2,
1664 warning_count: 0,
1665 }
1666 );
1667 });
1668}
1669
1670#[gpui::test]
1671async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1672 init_test(cx);
1673
1674 let mut language = Language::new(
1675 LanguageConfig {
1676 name: "Rust".into(),
1677 path_suffixes: vec!["rs".to_string()],
1678 ..Default::default()
1679 },
1680 Some(tree_sitter_rust::language()),
1681 );
1682 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1683
1684 let text = "
1685 fn a() {
1686 f1();
1687 }
1688 fn b() {
1689 f2();
1690 }
1691 fn c() {
1692 f3();
1693 }
1694 "
1695 .unindent();
1696
1697 let fs = FakeFs::new(cx.background());
1698 fs.insert_tree(
1699 "/dir",
1700 json!({
1701 "a.rs": text.clone(),
1702 }),
1703 )
1704 .await;
1705
1706 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1707 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1708 let buffer = project
1709 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1710 .await
1711 .unwrap();
1712
1713 let mut fake_server = fake_servers.next().await.unwrap();
1714 let lsp_document_version = fake_server
1715 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1716 .await
1717 .text_document
1718 .version;
1719
1720 // Simulate editing the buffer after the language server computes some edits.
1721 buffer.update(cx, |buffer, cx| {
1722 buffer.edit(
1723 [(
1724 Point::new(0, 0)..Point::new(0, 0),
1725 "// above first function\n",
1726 )],
1727 None,
1728 cx,
1729 );
1730 buffer.edit(
1731 [(
1732 Point::new(2, 0)..Point::new(2, 0),
1733 " // inside first function\n",
1734 )],
1735 None,
1736 cx,
1737 );
1738 buffer.edit(
1739 [(
1740 Point::new(6, 4)..Point::new(6, 4),
1741 "// inside second function ",
1742 )],
1743 None,
1744 cx,
1745 );
1746
1747 assert_eq!(
1748 buffer.text(),
1749 "
1750 // above first function
1751 fn a() {
1752 // inside first function
1753 f1();
1754 }
1755 fn b() {
1756 // inside second function f2();
1757 }
1758 fn c() {
1759 f3();
1760 }
1761 "
1762 .unindent()
1763 );
1764 });
1765
1766 let edits = project
1767 .update(cx, |project, cx| {
1768 project.edits_from_lsp(
1769 &buffer,
1770 vec![
1771 // replace body of first function
1772 lsp::TextEdit {
1773 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1774 new_text: "
1775 fn a() {
1776 f10();
1777 }
1778 "
1779 .unindent(),
1780 },
1781 // edit inside second function
1782 lsp::TextEdit {
1783 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1784 new_text: "00".into(),
1785 },
1786 // edit inside third function via two distinct edits
1787 lsp::TextEdit {
1788 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1789 new_text: "4000".into(),
1790 },
1791 lsp::TextEdit {
1792 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1793 new_text: "".into(),
1794 },
1795 ],
1796 LanguageServerId(0),
1797 Some(lsp_document_version),
1798 cx,
1799 )
1800 })
1801 .await
1802 .unwrap();
1803
1804 buffer.update(cx, |buffer, cx| {
1805 for (range, new_text) in edits {
1806 buffer.edit([(range, new_text)], None, cx);
1807 }
1808 assert_eq!(
1809 buffer.text(),
1810 "
1811 // above first function
1812 fn a() {
1813 // inside first function
1814 f10();
1815 }
1816 fn b() {
1817 // inside second function f200();
1818 }
1819 fn c() {
1820 f4000();
1821 }
1822 "
1823 .unindent()
1824 );
1825 });
1826}
1827
1828#[gpui::test]
1829async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1830 init_test(cx);
1831
1832 let text = "
1833 use a::b;
1834 use a::c;
1835
1836 fn f() {
1837 b();
1838 c();
1839 }
1840 "
1841 .unindent();
1842
1843 let fs = FakeFs::new(cx.background());
1844 fs.insert_tree(
1845 "/dir",
1846 json!({
1847 "a.rs": text.clone(),
1848 }),
1849 )
1850 .await;
1851
1852 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1853 let buffer = project
1854 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1855 .await
1856 .unwrap();
1857
1858 // Simulate the language server sending us a small edit in the form of a very large diff.
1859 // Rust-analyzer does this when performing a merge-imports code action.
1860 let edits = project
1861 .update(cx, |project, cx| {
1862 project.edits_from_lsp(
1863 &buffer,
1864 [
1865 // Replace the first use statement without editing the semicolon.
1866 lsp::TextEdit {
1867 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1868 new_text: "a::{b, c}".into(),
1869 },
1870 // Reinsert the remainder of the file between the semicolon and the final
1871 // newline of the file.
1872 lsp::TextEdit {
1873 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1874 new_text: "\n\n".into(),
1875 },
1876 lsp::TextEdit {
1877 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1878 new_text: "
1879 fn f() {
1880 b();
1881 c();
1882 }"
1883 .unindent(),
1884 },
1885 // Delete everything after the first newline of the file.
1886 lsp::TextEdit {
1887 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1888 new_text: "".into(),
1889 },
1890 ],
1891 LanguageServerId(0),
1892 None,
1893 cx,
1894 )
1895 })
1896 .await
1897 .unwrap();
1898
1899 buffer.update(cx, |buffer, cx| {
1900 let edits = edits
1901 .into_iter()
1902 .map(|(range, text)| {
1903 (
1904 range.start.to_point(buffer)..range.end.to_point(buffer),
1905 text,
1906 )
1907 })
1908 .collect::<Vec<_>>();
1909
1910 assert_eq!(
1911 edits,
1912 [
1913 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1914 (Point::new(1, 0)..Point::new(2, 0), "".into())
1915 ]
1916 );
1917
1918 for (range, new_text) in edits {
1919 buffer.edit([(range, new_text)], None, cx);
1920 }
1921 assert_eq!(
1922 buffer.text(),
1923 "
1924 use a::{b, c};
1925
1926 fn f() {
1927 b();
1928 c();
1929 }
1930 "
1931 .unindent()
1932 );
1933 });
1934}
1935
1936#[gpui::test]
1937async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1938 init_test(cx);
1939
1940 let text = "
1941 use a::b;
1942 use a::c;
1943
1944 fn f() {
1945 b();
1946 c();
1947 }
1948 "
1949 .unindent();
1950
1951 let fs = FakeFs::new(cx.background());
1952 fs.insert_tree(
1953 "/dir",
1954 json!({
1955 "a.rs": text.clone(),
1956 }),
1957 )
1958 .await;
1959
1960 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1961 let buffer = project
1962 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1963 .await
1964 .unwrap();
1965
1966 // Simulate the language server sending us edits in a non-ordered fashion,
1967 // with ranges sometimes being inverted or pointing to invalid locations.
1968 let edits = project
1969 .update(cx, |project, cx| {
1970 project.edits_from_lsp(
1971 &buffer,
1972 [
1973 lsp::TextEdit {
1974 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1975 new_text: "\n\n".into(),
1976 },
1977 lsp::TextEdit {
1978 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1979 new_text: "a::{b, c}".into(),
1980 },
1981 lsp::TextEdit {
1982 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1983 new_text: "".into(),
1984 },
1985 lsp::TextEdit {
1986 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1987 new_text: "
1988 fn f() {
1989 b();
1990 c();
1991 }"
1992 .unindent(),
1993 },
1994 ],
1995 LanguageServerId(0),
1996 None,
1997 cx,
1998 )
1999 })
2000 .await
2001 .unwrap();
2002
2003 buffer.update(cx, |buffer, cx| {
2004 let edits = edits
2005 .into_iter()
2006 .map(|(range, text)| {
2007 (
2008 range.start.to_point(buffer)..range.end.to_point(buffer),
2009 text,
2010 )
2011 })
2012 .collect::<Vec<_>>();
2013
2014 assert_eq!(
2015 edits,
2016 [
2017 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2018 (Point::new(1, 0)..Point::new(2, 0), "".into())
2019 ]
2020 );
2021
2022 for (range, new_text) in edits {
2023 buffer.edit([(range, new_text)], None, cx);
2024 }
2025 assert_eq!(
2026 buffer.text(),
2027 "
2028 use a::{b, c};
2029
2030 fn f() {
2031 b();
2032 c();
2033 }
2034 "
2035 .unindent()
2036 );
2037 });
2038}
2039
2040fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2041 buffer: &Buffer,
2042 range: Range<T>,
2043) -> Vec<(String, Option<DiagnosticSeverity>)> {
2044 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2045 for chunk in buffer.snapshot().chunks(range, true) {
2046 if chunks.last().map_or(false, |prev_chunk| {
2047 prev_chunk.1 == chunk.diagnostic_severity
2048 }) {
2049 chunks.last_mut().unwrap().0.push_str(chunk.text);
2050 } else {
2051 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2052 }
2053 }
2054 chunks
2055}
2056
2057#[gpui::test(iterations = 10)]
2058async fn test_definition(cx: &mut gpui::TestAppContext) {
2059 init_test(cx);
2060
2061 let mut language = Language::new(
2062 LanguageConfig {
2063 name: "Rust".into(),
2064 path_suffixes: vec!["rs".to_string()],
2065 ..Default::default()
2066 },
2067 Some(tree_sitter_rust::language()),
2068 );
2069 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2070
2071 let fs = FakeFs::new(cx.background());
2072 fs.insert_tree(
2073 "/dir",
2074 json!({
2075 "a.rs": "const fn a() { A }",
2076 "b.rs": "const y: i32 = crate::a()",
2077 }),
2078 )
2079 .await;
2080
2081 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2082 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2083
2084 let buffer = project
2085 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2086 .await
2087 .unwrap();
2088
2089 let fake_server = fake_servers.next().await.unwrap();
2090 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2091 let params = params.text_document_position_params;
2092 assert_eq!(
2093 params.text_document.uri.to_file_path().unwrap(),
2094 Path::new("/dir/b.rs"),
2095 );
2096 assert_eq!(params.position, lsp::Position::new(0, 22));
2097
2098 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2099 lsp::Location::new(
2100 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2101 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2102 ),
2103 )))
2104 });
2105
2106 let mut definitions = project
2107 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2108 .await
2109 .unwrap();
2110
2111 // Assert no new language server started
2112 cx.foreground().run_until_parked();
2113 assert!(fake_servers.try_next().is_err());
2114
2115 assert_eq!(definitions.len(), 1);
2116 let definition = definitions.pop().unwrap();
2117 cx.update(|cx| {
2118 let target_buffer = definition.target.buffer.read(cx);
2119 assert_eq!(
2120 target_buffer
2121 .file()
2122 .unwrap()
2123 .as_local()
2124 .unwrap()
2125 .abs_path(cx),
2126 Path::new("/dir/a.rs"),
2127 );
2128 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2129 assert_eq!(
2130 list_worktrees(&project, cx),
2131 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2132 );
2133
2134 drop(definition);
2135 });
2136 cx.read(|cx| {
2137 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2138 });
2139
2140 fn list_worktrees<'a>(
2141 project: &'a ModelHandle<Project>,
2142 cx: &'a AppContext,
2143 ) -> Vec<(&'a Path, bool)> {
2144 project
2145 .read(cx)
2146 .worktrees(cx)
2147 .map(|worktree| {
2148 let worktree = worktree.read(cx);
2149 (
2150 worktree.as_local().unwrap().abs_path().as_ref(),
2151 worktree.is_visible(),
2152 )
2153 })
2154 .collect::<Vec<_>>()
2155 }
2156}
2157
2158#[gpui::test]
2159async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2160 init_test(cx);
2161
2162 let mut language = Language::new(
2163 LanguageConfig {
2164 name: "TypeScript".into(),
2165 path_suffixes: vec!["ts".to_string()],
2166 ..Default::default()
2167 },
2168 Some(tree_sitter_typescript::language_typescript()),
2169 );
2170 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2171
2172 let fs = FakeFs::new(cx.background());
2173 fs.insert_tree(
2174 "/dir",
2175 json!({
2176 "a.ts": "",
2177 }),
2178 )
2179 .await;
2180
2181 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2182 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2183 let buffer = project
2184 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2185 .await
2186 .unwrap();
2187
2188 let fake_server = fake_language_servers.next().await.unwrap();
2189
2190 let text = "let a = b.fqn";
2191 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2192 let completions = project.update(cx, |project, cx| {
2193 project.completions(&buffer, text.len(), cx)
2194 });
2195
2196 fake_server
2197 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2198 Ok(Some(lsp::CompletionResponse::Array(vec![
2199 lsp::CompletionItem {
2200 label: "fullyQualifiedName?".into(),
2201 insert_text: Some("fullyQualifiedName".into()),
2202 ..Default::default()
2203 },
2204 ])))
2205 })
2206 .next()
2207 .await;
2208 let completions = completions.await.unwrap();
2209 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2210 assert_eq!(completions.len(), 1);
2211 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2212 assert_eq!(
2213 completions[0].old_range.to_offset(&snapshot),
2214 text.len() - 3..text.len()
2215 );
2216
2217 let text = "let a = \"atoms/cmp\"";
2218 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2219 let completions = project.update(cx, |project, cx| {
2220 project.completions(&buffer, text.len() - 1, cx)
2221 });
2222
2223 fake_server
2224 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2225 Ok(Some(lsp::CompletionResponse::Array(vec![
2226 lsp::CompletionItem {
2227 label: "component".into(),
2228 ..Default::default()
2229 },
2230 ])))
2231 })
2232 .next()
2233 .await;
2234 let completions = completions.await.unwrap();
2235 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2236 assert_eq!(completions.len(), 1);
2237 assert_eq!(completions[0].new_text, "component");
2238 assert_eq!(
2239 completions[0].old_range.to_offset(&snapshot),
2240 text.len() - 4..text.len() - 1
2241 );
2242}
2243
2244#[gpui::test]
2245async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2246 init_test(cx);
2247
2248 let mut language = Language::new(
2249 LanguageConfig {
2250 name: "TypeScript".into(),
2251 path_suffixes: vec!["ts".to_string()],
2252 ..Default::default()
2253 },
2254 Some(tree_sitter_typescript::language_typescript()),
2255 );
2256 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2257
2258 let fs = FakeFs::new(cx.background());
2259 fs.insert_tree(
2260 "/dir",
2261 json!({
2262 "a.ts": "",
2263 }),
2264 )
2265 .await;
2266
2267 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2268 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2269 let buffer = project
2270 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2271 .await
2272 .unwrap();
2273
2274 let fake_server = fake_language_servers.next().await.unwrap();
2275
2276 let text = "let a = b.fqn";
2277 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2278 let completions = project.update(cx, |project, cx| {
2279 project.completions(&buffer, text.len(), cx)
2280 });
2281
2282 fake_server
2283 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2284 Ok(Some(lsp::CompletionResponse::Array(vec![
2285 lsp::CompletionItem {
2286 label: "fullyQualifiedName?".into(),
2287 insert_text: Some("fully\rQualified\r\nName".into()),
2288 ..Default::default()
2289 },
2290 ])))
2291 })
2292 .next()
2293 .await;
2294 let completions = completions.await.unwrap();
2295 assert_eq!(completions.len(), 1);
2296 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2297}
2298
2299#[gpui::test(iterations = 10)]
2300async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2301 init_test(cx);
2302
2303 let mut language = Language::new(
2304 LanguageConfig {
2305 name: "TypeScript".into(),
2306 path_suffixes: vec!["ts".to_string()],
2307 ..Default::default()
2308 },
2309 None,
2310 );
2311 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2312
2313 let fs = FakeFs::new(cx.background());
2314 fs.insert_tree(
2315 "/dir",
2316 json!({
2317 "a.ts": "a",
2318 }),
2319 )
2320 .await;
2321
2322 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2323 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2324 let buffer = project
2325 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2326 .await
2327 .unwrap();
2328
2329 let fake_server = fake_language_servers.next().await.unwrap();
2330
2331 // Language server returns code actions that contain commands, and not edits.
2332 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2333 fake_server
2334 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2335 Ok(Some(vec![
2336 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2337 title: "The code action".into(),
2338 command: Some(lsp::Command {
2339 title: "The command".into(),
2340 command: "_the/command".into(),
2341 arguments: Some(vec![json!("the-argument")]),
2342 }),
2343 ..Default::default()
2344 }),
2345 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2346 title: "two".into(),
2347 ..Default::default()
2348 }),
2349 ]))
2350 })
2351 .next()
2352 .await;
2353
2354 let action = actions.await.unwrap()[0].clone();
2355 let apply = project.update(cx, |project, cx| {
2356 project.apply_code_action(buffer.clone(), action, true, cx)
2357 });
2358
2359 // Resolving the code action does not populate its edits. In absence of
2360 // edits, we must execute the given command.
2361 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2362 |action, _| async move { Ok(action) },
2363 );
2364
2365 // While executing the command, the language server sends the editor
2366 // a `workspaceEdit` request.
2367 fake_server
2368 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2369 let fake = fake_server.clone();
2370 move |params, _| {
2371 assert_eq!(params.command, "_the/command");
2372 let fake = fake.clone();
2373 async move {
2374 fake.server
2375 .request::<lsp::request::ApplyWorkspaceEdit>(
2376 lsp::ApplyWorkspaceEditParams {
2377 label: None,
2378 edit: lsp::WorkspaceEdit {
2379 changes: Some(
2380 [(
2381 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2382 vec![lsp::TextEdit {
2383 range: lsp::Range::new(
2384 lsp::Position::new(0, 0),
2385 lsp::Position::new(0, 0),
2386 ),
2387 new_text: "X".into(),
2388 }],
2389 )]
2390 .into_iter()
2391 .collect(),
2392 ),
2393 ..Default::default()
2394 },
2395 },
2396 )
2397 .await
2398 .unwrap();
2399 Ok(Some(json!(null)))
2400 }
2401 }
2402 })
2403 .next()
2404 .await;
2405
2406 // Applying the code action returns a project transaction containing the edits
2407 // sent by the language server in its `workspaceEdit` request.
2408 let transaction = apply.await.unwrap();
2409 assert!(transaction.0.contains_key(&buffer));
2410 buffer.update(cx, |buffer, cx| {
2411 assert_eq!(buffer.text(), "Xa");
2412 buffer.undo(cx);
2413 assert_eq!(buffer.text(), "a");
2414 });
2415}
2416
2417#[gpui::test(iterations = 10)]
2418async fn test_save_file(cx: &mut gpui::TestAppContext) {
2419 init_test(cx);
2420
2421 let fs = FakeFs::new(cx.background());
2422 fs.insert_tree(
2423 "/dir",
2424 json!({
2425 "file1": "the old contents",
2426 }),
2427 )
2428 .await;
2429
2430 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2431 let buffer = project
2432 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2433 .await
2434 .unwrap();
2435 buffer.update(cx, |buffer, cx| {
2436 assert_eq!(buffer.text(), "the old contents");
2437 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2438 });
2439
2440 project
2441 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2442 .await
2443 .unwrap();
2444
2445 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2446 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2447}
2448
2449#[gpui::test]
2450async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2451 init_test(cx);
2452
2453 let fs = FakeFs::new(cx.background());
2454 fs.insert_tree(
2455 "/dir",
2456 json!({
2457 "file1": "the old contents",
2458 }),
2459 )
2460 .await;
2461
2462 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2463 let buffer = project
2464 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2465 .await
2466 .unwrap();
2467 buffer.update(cx, |buffer, cx| {
2468 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2469 });
2470
2471 project
2472 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2473 .await
2474 .unwrap();
2475
2476 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2477 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2478}
2479
2480#[gpui::test]
2481async fn test_save_as(cx: &mut gpui::TestAppContext) {
2482 init_test(cx);
2483
2484 let fs = FakeFs::new(cx.background());
2485 fs.insert_tree("/dir", json!({})).await;
2486
2487 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2488
2489 let languages = project.read_with(cx, |project, _| project.languages().clone());
2490 languages.register(
2491 "/some/path",
2492 LanguageConfig {
2493 name: "Rust".into(),
2494 path_suffixes: vec!["rs".into()],
2495 ..Default::default()
2496 },
2497 tree_sitter_rust::language(),
2498 vec![],
2499 |_| Default::default(),
2500 );
2501
2502 let buffer = project.update(cx, |project, cx| {
2503 project.create_buffer("", None, cx).unwrap()
2504 });
2505 buffer.update(cx, |buffer, cx| {
2506 buffer.edit([(0..0, "abc")], None, cx);
2507 assert!(buffer.is_dirty());
2508 assert!(!buffer.has_conflict());
2509 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2510 });
2511 project
2512 .update(cx, |project, cx| {
2513 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2514 })
2515 .await
2516 .unwrap();
2517 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2518
2519 cx.foreground().run_until_parked();
2520 buffer.read_with(cx, |buffer, cx| {
2521 assert_eq!(
2522 buffer.file().unwrap().full_path(cx),
2523 Path::new("dir/file1.rs")
2524 );
2525 assert!(!buffer.is_dirty());
2526 assert!(!buffer.has_conflict());
2527 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2528 });
2529
2530 let opened_buffer = project
2531 .update(cx, |project, cx| {
2532 project.open_local_buffer("/dir/file1.rs", cx)
2533 })
2534 .await
2535 .unwrap();
2536 assert_eq!(opened_buffer, buffer);
2537}
2538
2539#[gpui::test(retries = 5)]
2540async fn test_rescan_and_remote_updates(
2541 deterministic: Arc<Deterministic>,
2542 cx: &mut gpui::TestAppContext,
2543) {
2544 init_test(cx);
2545 cx.foreground().allow_parking();
2546
2547 let dir = temp_tree(json!({
2548 "a": {
2549 "file1": "",
2550 "file2": "",
2551 "file3": "",
2552 },
2553 "b": {
2554 "c": {
2555 "file4": "",
2556 "file5": "",
2557 }
2558 }
2559 }));
2560
2561 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2562 let rpc = project.read_with(cx, |p, _| p.client.clone());
2563
2564 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2565 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2566 async move { buffer.await.unwrap() }
2567 };
2568 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2569 project.read_with(cx, |project, cx| {
2570 let tree = project.worktrees(cx).next().unwrap();
2571 tree.read(cx)
2572 .entry_for_path(path)
2573 .unwrap_or_else(|| panic!("no entry for path {}", path))
2574 .id
2575 })
2576 };
2577
2578 let buffer2 = buffer_for_path("a/file2", cx).await;
2579 let buffer3 = buffer_for_path("a/file3", cx).await;
2580 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2581 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2582
2583 let file2_id = id_for_path("a/file2", cx);
2584 let file3_id = id_for_path("a/file3", cx);
2585 let file4_id = id_for_path("b/c/file4", cx);
2586
2587 // Create a remote copy of this worktree.
2588 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2589
2590 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2591
2592 let updates = Arc::new(Mutex::new(Vec::new()));
2593 tree.update(cx, |tree, cx| {
2594 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2595 let updates = updates.clone();
2596 move |update| {
2597 updates.lock().push(update);
2598 async { true }
2599 }
2600 });
2601 });
2602
2603 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2604 deterministic.run_until_parked();
2605
2606 cx.read(|cx| {
2607 assert!(!buffer2.read(cx).is_dirty());
2608 assert!(!buffer3.read(cx).is_dirty());
2609 assert!(!buffer4.read(cx).is_dirty());
2610 assert!(!buffer5.read(cx).is_dirty());
2611 });
2612
2613 // Rename and delete files and directories.
2614 tree.flush_fs_events(cx).await;
2615 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2616 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2617 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2618 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2619 tree.flush_fs_events(cx).await;
2620
2621 let expected_paths = vec![
2622 "a",
2623 "a/file1",
2624 "a/file2.new",
2625 "b",
2626 "d",
2627 "d/file3",
2628 "d/file4",
2629 ];
2630
2631 cx.read(|app| {
2632 assert_eq!(
2633 tree.read(app)
2634 .paths()
2635 .map(|p| p.to_str().unwrap())
2636 .collect::<Vec<_>>(),
2637 expected_paths
2638 );
2639
2640 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2641 assert_eq!(id_for_path("d/file3", cx), file3_id);
2642 assert_eq!(id_for_path("d/file4", cx), file4_id);
2643
2644 assert_eq!(
2645 buffer2.read(app).file().unwrap().path().as_ref(),
2646 Path::new("a/file2.new")
2647 );
2648 assert_eq!(
2649 buffer3.read(app).file().unwrap().path().as_ref(),
2650 Path::new("d/file3")
2651 );
2652 assert_eq!(
2653 buffer4.read(app).file().unwrap().path().as_ref(),
2654 Path::new("d/file4")
2655 );
2656 assert_eq!(
2657 buffer5.read(app).file().unwrap().path().as_ref(),
2658 Path::new("b/c/file5")
2659 );
2660
2661 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2662 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2663 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2664 assert!(buffer5.read(app).file().unwrap().is_deleted());
2665 });
2666
2667 // Update the remote worktree. Check that it becomes consistent with the
2668 // local worktree.
2669 deterministic.run_until_parked();
2670 remote.update(cx, |remote, _| {
2671 for update in updates.lock().drain(..) {
2672 remote.as_remote_mut().unwrap().update_from_remote(update);
2673 }
2674 });
2675 deterministic.run_until_parked();
2676 remote.read_with(cx, |remote, _| {
2677 assert_eq!(
2678 remote
2679 .paths()
2680 .map(|p| p.to_str().unwrap())
2681 .collect::<Vec<_>>(),
2682 expected_paths
2683 );
2684 });
2685}
2686
2687#[gpui::test(iterations = 10)]
2688async fn test_buffer_identity_across_renames(
2689 deterministic: Arc<Deterministic>,
2690 cx: &mut gpui::TestAppContext,
2691) {
2692 init_test(cx);
2693
2694 let fs = FakeFs::new(cx.background());
2695 fs.insert_tree(
2696 "/dir",
2697 json!({
2698 "a": {
2699 "file1": "",
2700 }
2701 }),
2702 )
2703 .await;
2704
2705 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2706 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2707 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2708
2709 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2710 project.read_with(cx, |project, cx| {
2711 let tree = project.worktrees(cx).next().unwrap();
2712 tree.read(cx)
2713 .entry_for_path(path)
2714 .unwrap_or_else(|| panic!("no entry for path {}", path))
2715 .id
2716 })
2717 };
2718
2719 let dir_id = id_for_path("a", cx);
2720 let file_id = id_for_path("a/file1", cx);
2721 let buffer = project
2722 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2723 .await
2724 .unwrap();
2725 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2726
2727 project
2728 .update(cx, |project, cx| {
2729 project.rename_entry(dir_id, Path::new("b"), cx)
2730 })
2731 .unwrap()
2732 .await
2733 .unwrap();
2734 deterministic.run_until_parked();
2735 assert_eq!(id_for_path("b", cx), dir_id);
2736 assert_eq!(id_for_path("b/file1", cx), file_id);
2737 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2738}
2739
2740#[gpui::test]
2741async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743
2744 let fs = FakeFs::new(cx.background());
2745 fs.insert_tree(
2746 "/dir",
2747 json!({
2748 "a.txt": "a-contents",
2749 "b.txt": "b-contents",
2750 }),
2751 )
2752 .await;
2753
2754 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2755
2756 // Spawn multiple tasks to open paths, repeating some paths.
2757 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2758 (
2759 p.open_local_buffer("/dir/a.txt", cx),
2760 p.open_local_buffer("/dir/b.txt", cx),
2761 p.open_local_buffer("/dir/a.txt", cx),
2762 )
2763 });
2764
2765 let buffer_a_1 = buffer_a_1.await.unwrap();
2766 let buffer_a_2 = buffer_a_2.await.unwrap();
2767 let buffer_b = buffer_b.await.unwrap();
2768 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2769 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2770
2771 // There is only one buffer per path.
2772 let buffer_a_id = buffer_a_1.id();
2773 assert_eq!(buffer_a_2.id(), buffer_a_id);
2774
2775 // Open the same path again while it is still open.
2776 drop(buffer_a_1);
2777 let buffer_a_3 = project
2778 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2779 .await
2780 .unwrap();
2781
2782 // There's still only one buffer per path.
2783 assert_eq!(buffer_a_3.id(), buffer_a_id);
2784}
2785
2786#[gpui::test]
2787async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2788 init_test(cx);
2789
2790 let fs = FakeFs::new(cx.background());
2791 fs.insert_tree(
2792 "/dir",
2793 json!({
2794 "file1": "abc",
2795 "file2": "def",
2796 "file3": "ghi",
2797 }),
2798 )
2799 .await;
2800
2801 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2802
2803 let buffer1 = project
2804 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2805 .await
2806 .unwrap();
2807 let events = Rc::new(RefCell::new(Vec::new()));
2808
2809 // initially, the buffer isn't dirty.
2810 buffer1.update(cx, |buffer, cx| {
2811 cx.subscribe(&buffer1, {
2812 let events = events.clone();
2813 move |_, _, event, _| match event {
2814 BufferEvent::Operation(_) => {}
2815 _ => events.borrow_mut().push(event.clone()),
2816 }
2817 })
2818 .detach();
2819
2820 assert!(!buffer.is_dirty());
2821 assert!(events.borrow().is_empty());
2822
2823 buffer.edit([(1..2, "")], None, cx);
2824 });
2825
2826 // after the first edit, the buffer is dirty, and emits a dirtied event.
2827 buffer1.update(cx, |buffer, cx| {
2828 assert!(buffer.text() == "ac");
2829 assert!(buffer.is_dirty());
2830 assert_eq!(
2831 *events.borrow(),
2832 &[language::Event::Edited, language::Event::DirtyChanged]
2833 );
2834 events.borrow_mut().clear();
2835 buffer.did_save(
2836 buffer.version(),
2837 buffer.as_rope().fingerprint(),
2838 buffer.file().unwrap().mtime(),
2839 cx,
2840 );
2841 });
2842
2843 // after saving, the buffer is not dirty, and emits a saved event.
2844 buffer1.update(cx, |buffer, cx| {
2845 assert!(!buffer.is_dirty());
2846 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2847 events.borrow_mut().clear();
2848
2849 buffer.edit([(1..1, "B")], None, cx);
2850 buffer.edit([(2..2, "D")], None, cx);
2851 });
2852
2853 // after editing again, the buffer is dirty, and emits another dirty event.
2854 buffer1.update(cx, |buffer, cx| {
2855 assert!(buffer.text() == "aBDc");
2856 assert!(buffer.is_dirty());
2857 assert_eq!(
2858 *events.borrow(),
2859 &[
2860 language::Event::Edited,
2861 language::Event::DirtyChanged,
2862 language::Event::Edited,
2863 ],
2864 );
2865 events.borrow_mut().clear();
2866
2867 // After restoring the buffer to its previously-saved state,
2868 // the buffer is not considered dirty anymore.
2869 buffer.edit([(1..3, "")], None, cx);
2870 assert!(buffer.text() == "ac");
2871 assert!(!buffer.is_dirty());
2872 });
2873
2874 assert_eq!(
2875 *events.borrow(),
2876 &[language::Event::Edited, language::Event::DirtyChanged]
2877 );
2878
2879 // When a file is deleted, the buffer is considered dirty.
2880 let events = Rc::new(RefCell::new(Vec::new()));
2881 let buffer2 = project
2882 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2883 .await
2884 .unwrap();
2885 buffer2.update(cx, |_, cx| {
2886 cx.subscribe(&buffer2, {
2887 let events = events.clone();
2888 move |_, _, event, _| events.borrow_mut().push(event.clone())
2889 })
2890 .detach();
2891 });
2892
2893 fs.remove_file("/dir/file2".as_ref(), Default::default())
2894 .await
2895 .unwrap();
2896 cx.foreground().run_until_parked();
2897 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2898 assert_eq!(
2899 *events.borrow(),
2900 &[
2901 language::Event::DirtyChanged,
2902 language::Event::FileHandleChanged
2903 ]
2904 );
2905
2906 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2907 let events = Rc::new(RefCell::new(Vec::new()));
2908 let buffer3 = project
2909 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2910 .await
2911 .unwrap();
2912 buffer3.update(cx, |_, cx| {
2913 cx.subscribe(&buffer3, {
2914 let events = events.clone();
2915 move |_, _, event, _| events.borrow_mut().push(event.clone())
2916 })
2917 .detach();
2918 });
2919
2920 buffer3.update(cx, |buffer, cx| {
2921 buffer.edit([(0..0, "x")], None, cx);
2922 });
2923 events.borrow_mut().clear();
2924 fs.remove_file("/dir/file3".as_ref(), Default::default())
2925 .await
2926 .unwrap();
2927 cx.foreground().run_until_parked();
2928 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2929 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2930}
2931
2932#[gpui::test]
2933async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2934 init_test(cx);
2935
2936 let initial_contents = "aaa\nbbbbb\nc\n";
2937 let fs = FakeFs::new(cx.background());
2938 fs.insert_tree(
2939 "/dir",
2940 json!({
2941 "the-file": initial_contents,
2942 }),
2943 )
2944 .await;
2945 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2946 let buffer = project
2947 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2948 .await
2949 .unwrap();
2950
2951 let anchors = (0..3)
2952 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2953 .collect::<Vec<_>>();
2954
2955 // Change the file on disk, adding two new lines of text, and removing
2956 // one line.
2957 buffer.read_with(cx, |buffer, _| {
2958 assert!(!buffer.is_dirty());
2959 assert!(!buffer.has_conflict());
2960 });
2961 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2962 fs.save(
2963 "/dir/the-file".as_ref(),
2964 &new_contents.into(),
2965 LineEnding::Unix,
2966 )
2967 .await
2968 .unwrap();
2969
2970 // Because the buffer was not modified, it is reloaded from disk. Its
2971 // contents are edited according to the diff between the old and new
2972 // file contents.
2973 cx.foreground().run_until_parked();
2974 buffer.update(cx, |buffer, _| {
2975 assert_eq!(buffer.text(), new_contents);
2976 assert!(!buffer.is_dirty());
2977 assert!(!buffer.has_conflict());
2978
2979 let anchor_positions = anchors
2980 .iter()
2981 .map(|anchor| anchor.to_point(&*buffer))
2982 .collect::<Vec<_>>();
2983 assert_eq!(
2984 anchor_positions,
2985 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2986 );
2987 });
2988
2989 // Modify the buffer
2990 buffer.update(cx, |buffer, cx| {
2991 buffer.edit([(0..0, " ")], None, cx);
2992 assert!(buffer.is_dirty());
2993 assert!(!buffer.has_conflict());
2994 });
2995
2996 // Change the file on disk again, adding blank lines to the beginning.
2997 fs.save(
2998 "/dir/the-file".as_ref(),
2999 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3000 LineEnding::Unix,
3001 )
3002 .await
3003 .unwrap();
3004
3005 // Because the buffer is modified, it doesn't reload from disk, but is
3006 // marked as having a conflict.
3007 cx.foreground().run_until_parked();
3008 buffer.read_with(cx, |buffer, _| {
3009 assert!(buffer.has_conflict());
3010 });
3011}
3012
3013#[gpui::test]
3014async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3015 init_test(cx);
3016
3017 let fs = FakeFs::new(cx.background());
3018 fs.insert_tree(
3019 "/dir",
3020 json!({
3021 "file1": "a\nb\nc\n",
3022 "file2": "one\r\ntwo\r\nthree\r\n",
3023 }),
3024 )
3025 .await;
3026
3027 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3028 let buffer1 = project
3029 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3030 .await
3031 .unwrap();
3032 let buffer2 = project
3033 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3034 .await
3035 .unwrap();
3036
3037 buffer1.read_with(cx, |buffer, _| {
3038 assert_eq!(buffer.text(), "a\nb\nc\n");
3039 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3040 });
3041 buffer2.read_with(cx, |buffer, _| {
3042 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3043 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3044 });
3045
3046 // Change a file's line endings on disk from unix to windows. The buffer's
3047 // state updates correctly.
3048 fs.save(
3049 "/dir/file1".as_ref(),
3050 &"aaa\nb\nc\n".into(),
3051 LineEnding::Windows,
3052 )
3053 .await
3054 .unwrap();
3055 cx.foreground().run_until_parked();
3056 buffer1.read_with(cx, |buffer, _| {
3057 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3058 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3059 });
3060
3061 // Save a file with windows line endings. The file is written correctly.
3062 buffer2.update(cx, |buffer, cx| {
3063 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3064 });
3065 project
3066 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3067 .await
3068 .unwrap();
3069 assert_eq!(
3070 fs.load("/dir/file2".as_ref()).await.unwrap(),
3071 "one\r\ntwo\r\nthree\r\nfour\r\n",
3072 );
3073}
3074
3075#[gpui::test]
3076async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3077 init_test(cx);
3078
3079 let fs = FakeFs::new(cx.background());
3080 fs.insert_tree(
3081 "/the-dir",
3082 json!({
3083 "a.rs": "
3084 fn foo(mut v: Vec<usize>) {
3085 for x in &v {
3086 v.push(1);
3087 }
3088 }
3089 "
3090 .unindent(),
3091 }),
3092 )
3093 .await;
3094
3095 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3096 let buffer = project
3097 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3098 .await
3099 .unwrap();
3100
3101 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3102 let message = lsp::PublishDiagnosticsParams {
3103 uri: buffer_uri.clone(),
3104 diagnostics: vec![
3105 lsp::Diagnostic {
3106 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3107 severity: Some(DiagnosticSeverity::WARNING),
3108 message: "error 1".to_string(),
3109 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3110 location: lsp::Location {
3111 uri: buffer_uri.clone(),
3112 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3113 },
3114 message: "error 1 hint 1".to_string(),
3115 }]),
3116 ..Default::default()
3117 },
3118 lsp::Diagnostic {
3119 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3120 severity: Some(DiagnosticSeverity::HINT),
3121 message: "error 1 hint 1".to_string(),
3122 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3123 location: lsp::Location {
3124 uri: buffer_uri.clone(),
3125 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3126 },
3127 message: "original diagnostic".to_string(),
3128 }]),
3129 ..Default::default()
3130 },
3131 lsp::Diagnostic {
3132 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3133 severity: Some(DiagnosticSeverity::ERROR),
3134 message: "error 2".to_string(),
3135 related_information: Some(vec![
3136 lsp::DiagnosticRelatedInformation {
3137 location: lsp::Location {
3138 uri: buffer_uri.clone(),
3139 range: lsp::Range::new(
3140 lsp::Position::new(1, 13),
3141 lsp::Position::new(1, 15),
3142 ),
3143 },
3144 message: "error 2 hint 1".to_string(),
3145 },
3146 lsp::DiagnosticRelatedInformation {
3147 location: lsp::Location {
3148 uri: buffer_uri.clone(),
3149 range: lsp::Range::new(
3150 lsp::Position::new(1, 13),
3151 lsp::Position::new(1, 15),
3152 ),
3153 },
3154 message: "error 2 hint 2".to_string(),
3155 },
3156 ]),
3157 ..Default::default()
3158 },
3159 lsp::Diagnostic {
3160 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3161 severity: Some(DiagnosticSeverity::HINT),
3162 message: "error 2 hint 1".to_string(),
3163 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3164 location: lsp::Location {
3165 uri: buffer_uri.clone(),
3166 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3167 },
3168 message: "original diagnostic".to_string(),
3169 }]),
3170 ..Default::default()
3171 },
3172 lsp::Diagnostic {
3173 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3174 severity: Some(DiagnosticSeverity::HINT),
3175 message: "error 2 hint 2".to_string(),
3176 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3177 location: lsp::Location {
3178 uri: buffer_uri,
3179 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3180 },
3181 message: "original diagnostic".to_string(),
3182 }]),
3183 ..Default::default()
3184 },
3185 ],
3186 version: None,
3187 };
3188
3189 project
3190 .update(cx, |p, cx| {
3191 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3192 })
3193 .unwrap();
3194 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3195
3196 assert_eq!(
3197 buffer
3198 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3199 .collect::<Vec<_>>(),
3200 &[
3201 DiagnosticEntry {
3202 range: Point::new(1, 8)..Point::new(1, 9),
3203 diagnostic: Diagnostic {
3204 severity: DiagnosticSeverity::WARNING,
3205 message: "error 1".to_string(),
3206 group_id: 1,
3207 is_primary: true,
3208 ..Default::default()
3209 }
3210 },
3211 DiagnosticEntry {
3212 range: Point::new(1, 8)..Point::new(1, 9),
3213 diagnostic: Diagnostic {
3214 severity: DiagnosticSeverity::HINT,
3215 message: "error 1 hint 1".to_string(),
3216 group_id: 1,
3217 is_primary: false,
3218 ..Default::default()
3219 }
3220 },
3221 DiagnosticEntry {
3222 range: Point::new(1, 13)..Point::new(1, 15),
3223 diagnostic: Diagnostic {
3224 severity: DiagnosticSeverity::HINT,
3225 message: "error 2 hint 1".to_string(),
3226 group_id: 0,
3227 is_primary: false,
3228 ..Default::default()
3229 }
3230 },
3231 DiagnosticEntry {
3232 range: Point::new(1, 13)..Point::new(1, 15),
3233 diagnostic: Diagnostic {
3234 severity: DiagnosticSeverity::HINT,
3235 message: "error 2 hint 2".to_string(),
3236 group_id: 0,
3237 is_primary: false,
3238 ..Default::default()
3239 }
3240 },
3241 DiagnosticEntry {
3242 range: Point::new(2, 8)..Point::new(2, 17),
3243 diagnostic: Diagnostic {
3244 severity: DiagnosticSeverity::ERROR,
3245 message: "error 2".to_string(),
3246 group_id: 0,
3247 is_primary: true,
3248 ..Default::default()
3249 }
3250 }
3251 ]
3252 );
3253
3254 assert_eq!(
3255 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3256 &[
3257 DiagnosticEntry {
3258 range: Point::new(1, 13)..Point::new(1, 15),
3259 diagnostic: Diagnostic {
3260 severity: DiagnosticSeverity::HINT,
3261 message: "error 2 hint 1".to_string(),
3262 group_id: 0,
3263 is_primary: false,
3264 ..Default::default()
3265 }
3266 },
3267 DiagnosticEntry {
3268 range: Point::new(1, 13)..Point::new(1, 15),
3269 diagnostic: Diagnostic {
3270 severity: DiagnosticSeverity::HINT,
3271 message: "error 2 hint 2".to_string(),
3272 group_id: 0,
3273 is_primary: false,
3274 ..Default::default()
3275 }
3276 },
3277 DiagnosticEntry {
3278 range: Point::new(2, 8)..Point::new(2, 17),
3279 diagnostic: Diagnostic {
3280 severity: DiagnosticSeverity::ERROR,
3281 message: "error 2".to_string(),
3282 group_id: 0,
3283 is_primary: true,
3284 ..Default::default()
3285 }
3286 }
3287 ]
3288 );
3289
3290 assert_eq!(
3291 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3292 &[
3293 DiagnosticEntry {
3294 range: Point::new(1, 8)..Point::new(1, 9),
3295 diagnostic: Diagnostic {
3296 severity: DiagnosticSeverity::WARNING,
3297 message: "error 1".to_string(),
3298 group_id: 1,
3299 is_primary: true,
3300 ..Default::default()
3301 }
3302 },
3303 DiagnosticEntry {
3304 range: Point::new(1, 8)..Point::new(1, 9),
3305 diagnostic: Diagnostic {
3306 severity: DiagnosticSeverity::HINT,
3307 message: "error 1 hint 1".to_string(),
3308 group_id: 1,
3309 is_primary: false,
3310 ..Default::default()
3311 }
3312 },
3313 ]
3314 );
3315}
3316
3317#[gpui::test]
3318async fn test_rename(cx: &mut gpui::TestAppContext) {
3319 init_test(cx);
3320
3321 let mut language = Language::new(
3322 LanguageConfig {
3323 name: "Rust".into(),
3324 path_suffixes: vec!["rs".to_string()],
3325 ..Default::default()
3326 },
3327 Some(tree_sitter_rust::language()),
3328 );
3329 let mut fake_servers = language
3330 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3331 capabilities: lsp::ServerCapabilities {
3332 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3333 prepare_provider: Some(true),
3334 work_done_progress_options: Default::default(),
3335 })),
3336 ..Default::default()
3337 },
3338 ..Default::default()
3339 }))
3340 .await;
3341
3342 let fs = FakeFs::new(cx.background());
3343 fs.insert_tree(
3344 "/dir",
3345 json!({
3346 "one.rs": "const ONE: usize = 1;",
3347 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3348 }),
3349 )
3350 .await;
3351
3352 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3353 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3354 let buffer = project
3355 .update(cx, |project, cx| {
3356 project.open_local_buffer("/dir/one.rs", cx)
3357 })
3358 .await
3359 .unwrap();
3360
3361 let fake_server = fake_servers.next().await.unwrap();
3362
3363 let response = project.update(cx, |project, cx| {
3364 project.prepare_rename(buffer.clone(), 7, cx)
3365 });
3366 fake_server
3367 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3368 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3369 assert_eq!(params.position, lsp::Position::new(0, 7));
3370 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3371 lsp::Position::new(0, 6),
3372 lsp::Position::new(0, 9),
3373 ))))
3374 })
3375 .next()
3376 .await
3377 .unwrap();
3378 let range = response.await.unwrap().unwrap();
3379 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3380 assert_eq!(range, 6..9);
3381
3382 let response = project.update(cx, |project, cx| {
3383 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3384 });
3385 fake_server
3386 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3387 assert_eq!(
3388 params.text_document_position.text_document.uri.as_str(),
3389 "file:///dir/one.rs"
3390 );
3391 assert_eq!(
3392 params.text_document_position.position,
3393 lsp::Position::new(0, 7)
3394 );
3395 assert_eq!(params.new_name, "THREE");
3396 Ok(Some(lsp::WorkspaceEdit {
3397 changes: Some(
3398 [
3399 (
3400 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3401 vec![lsp::TextEdit::new(
3402 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3403 "THREE".to_string(),
3404 )],
3405 ),
3406 (
3407 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3408 vec![
3409 lsp::TextEdit::new(
3410 lsp::Range::new(
3411 lsp::Position::new(0, 24),
3412 lsp::Position::new(0, 27),
3413 ),
3414 "THREE".to_string(),
3415 ),
3416 lsp::TextEdit::new(
3417 lsp::Range::new(
3418 lsp::Position::new(0, 35),
3419 lsp::Position::new(0, 38),
3420 ),
3421 "THREE".to_string(),
3422 ),
3423 ],
3424 ),
3425 ]
3426 .into_iter()
3427 .collect(),
3428 ),
3429 ..Default::default()
3430 }))
3431 })
3432 .next()
3433 .await
3434 .unwrap();
3435 let mut transaction = response.await.unwrap().0;
3436 assert_eq!(transaction.len(), 2);
3437 assert_eq!(
3438 transaction
3439 .remove_entry(&buffer)
3440 .unwrap()
3441 .0
3442 .read_with(cx, |buffer, _| buffer.text()),
3443 "const THREE: usize = 1;"
3444 );
3445 assert_eq!(
3446 transaction
3447 .into_keys()
3448 .next()
3449 .unwrap()
3450 .read_with(cx, |buffer, _| buffer.text()),
3451 "const TWO: usize = one::THREE + one::THREE;"
3452 );
3453}
3454
3455#[gpui::test]
3456async fn test_search(cx: &mut gpui::TestAppContext) {
3457 init_test(cx);
3458
3459 let fs = FakeFs::new(cx.background());
3460 fs.insert_tree(
3461 "/dir",
3462 json!({
3463 "one.rs": "const ONE: usize = 1;",
3464 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3465 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3466 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3467 }),
3468 )
3469 .await;
3470 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3471 assert_eq!(
3472 search(
3473 &project,
3474 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3475 cx
3476 )
3477 .await
3478 .unwrap(),
3479 HashMap::from_iter([
3480 ("two.rs".to_string(), vec![6..9]),
3481 ("three.rs".to_string(), vec![37..40])
3482 ])
3483 );
3484
3485 let buffer_4 = project
3486 .update(cx, |project, cx| {
3487 project.open_local_buffer("/dir/four.rs", cx)
3488 })
3489 .await
3490 .unwrap();
3491 buffer_4.update(cx, |buffer, cx| {
3492 let text = "two::TWO";
3493 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3494 });
3495
3496 assert_eq!(
3497 search(
3498 &project,
3499 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3500 cx
3501 )
3502 .await
3503 .unwrap(),
3504 HashMap::from_iter([
3505 ("two.rs".to_string(), vec![6..9]),
3506 ("three.rs".to_string(), vec![37..40]),
3507 ("four.rs".to_string(), vec![25..28, 36..39])
3508 ])
3509 );
3510}
3511
3512#[gpui::test]
3513async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3514 init_test(cx);
3515
3516 let search_query = "file";
3517
3518 let fs = FakeFs::new(cx.background());
3519 fs.insert_tree(
3520 "/dir",
3521 json!({
3522 "one.rs": r#"// Rust file one"#,
3523 "one.ts": r#"// TypeScript file one"#,
3524 "two.rs": r#"// Rust file two"#,
3525 "two.ts": r#"// TypeScript file two"#,
3526 }),
3527 )
3528 .await;
3529 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3530
3531 assert!(
3532 search(
3533 &project,
3534 SearchQuery::text(
3535 search_query,
3536 false,
3537 true,
3538 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3539 Vec::new()
3540 ),
3541 cx
3542 )
3543 .await
3544 .unwrap()
3545 .is_empty(),
3546 "If no inclusions match, no files should be returned"
3547 );
3548
3549 assert_eq!(
3550 search(
3551 &project,
3552 SearchQuery::text(
3553 search_query,
3554 false,
3555 true,
3556 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3557 Vec::new()
3558 ),
3559 cx
3560 )
3561 .await
3562 .unwrap(),
3563 HashMap::from_iter([
3564 ("one.rs".to_string(), vec![8..12]),
3565 ("two.rs".to_string(), vec![8..12]),
3566 ]),
3567 "Rust only search should give only Rust files"
3568 );
3569
3570 assert_eq!(
3571 search(
3572 &project,
3573 SearchQuery::text(
3574 search_query,
3575 false,
3576 true,
3577 vec![
3578 Glob::new("*.ts").unwrap().compile_matcher(),
3579 Glob::new("*.odd").unwrap().compile_matcher(),
3580 ],
3581 Vec::new()
3582 ),
3583 cx
3584 )
3585 .await
3586 .unwrap(),
3587 HashMap::from_iter([
3588 ("one.ts".to_string(), vec![14..18]),
3589 ("two.ts".to_string(), vec![14..18]),
3590 ]),
3591 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3592 );
3593
3594 assert_eq!(
3595 search(
3596 &project,
3597 SearchQuery::text(
3598 search_query,
3599 false,
3600 true,
3601 vec![
3602 Glob::new("*.rs").unwrap().compile_matcher(),
3603 Glob::new("*.ts").unwrap().compile_matcher(),
3604 Glob::new("*.odd").unwrap().compile_matcher(),
3605 ],
3606 Vec::new()
3607 ),
3608 cx
3609 )
3610 .await
3611 .unwrap(),
3612 HashMap::from_iter([
3613 ("one.rs".to_string(), vec![8..12]),
3614 ("one.ts".to_string(), vec![14..18]),
3615 ("two.rs".to_string(), vec![8..12]),
3616 ("two.ts".to_string(), vec![14..18]),
3617 ]),
3618 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3619 );
3620}
3621
3622#[gpui::test]
3623async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3624 init_test(cx);
3625
3626 let search_query = "file";
3627
3628 let fs = FakeFs::new(cx.background());
3629 fs.insert_tree(
3630 "/dir",
3631 json!({
3632 "one.rs": r#"// Rust file one"#,
3633 "one.ts": r#"// TypeScript file one"#,
3634 "two.rs": r#"// Rust file two"#,
3635 "two.ts": r#"// TypeScript file two"#,
3636 }),
3637 )
3638 .await;
3639 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3640
3641 assert_eq!(
3642 search(
3643 &project,
3644 SearchQuery::text(
3645 search_query,
3646 false,
3647 true,
3648 Vec::new(),
3649 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3650 ),
3651 cx
3652 )
3653 .await
3654 .unwrap(),
3655 HashMap::from_iter([
3656 ("one.rs".to_string(), vec![8..12]),
3657 ("one.ts".to_string(), vec![14..18]),
3658 ("two.rs".to_string(), vec![8..12]),
3659 ("two.ts".to_string(), vec![14..18]),
3660 ]),
3661 "If no exclusions match, all files should be returned"
3662 );
3663
3664 assert_eq!(
3665 search(
3666 &project,
3667 SearchQuery::text(
3668 search_query,
3669 false,
3670 true,
3671 Vec::new(),
3672 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3673 ),
3674 cx
3675 )
3676 .await
3677 .unwrap(),
3678 HashMap::from_iter([
3679 ("one.ts".to_string(), vec![14..18]),
3680 ("two.ts".to_string(), vec![14..18]),
3681 ]),
3682 "Rust exclusion search should give only TypeScript files"
3683 );
3684
3685 assert_eq!(
3686 search(
3687 &project,
3688 SearchQuery::text(
3689 search_query,
3690 false,
3691 true,
3692 Vec::new(),
3693 vec![
3694 Glob::new("*.ts").unwrap().compile_matcher(),
3695 Glob::new("*.odd").unwrap().compile_matcher(),
3696 ],
3697 ),
3698 cx
3699 )
3700 .await
3701 .unwrap(),
3702 HashMap::from_iter([
3703 ("one.rs".to_string(), vec![8..12]),
3704 ("two.rs".to_string(), vec![8..12]),
3705 ]),
3706 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3707 );
3708
3709 assert!(
3710 search(
3711 &project,
3712 SearchQuery::text(
3713 search_query,
3714 false,
3715 true,
3716 Vec::new(),
3717 vec![
3718 Glob::new("*.rs").unwrap().compile_matcher(),
3719 Glob::new("*.ts").unwrap().compile_matcher(),
3720 Glob::new("*.odd").unwrap().compile_matcher(),
3721 ],
3722 ),
3723 cx
3724 )
3725 .await
3726 .unwrap().is_empty(),
3727 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3728 );
3729}
3730
3731#[gpui::test]
3732async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3733 init_test(cx);
3734
3735 let search_query = "file";
3736
3737 let fs = FakeFs::new(cx.background());
3738 fs.insert_tree(
3739 "/dir",
3740 json!({
3741 "one.rs": r#"// Rust file one"#,
3742 "one.ts": r#"// TypeScript file one"#,
3743 "two.rs": r#"// Rust file two"#,
3744 "two.ts": r#"// TypeScript file two"#,
3745 }),
3746 )
3747 .await;
3748 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3749
3750 assert!(
3751 search(
3752 &project,
3753 SearchQuery::text(
3754 search_query,
3755 false,
3756 true,
3757 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3758 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3759 ),
3760 cx
3761 )
3762 .await
3763 .unwrap()
3764 .is_empty(),
3765 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3766 );
3767
3768 assert!(
3769 search(
3770 &project,
3771 SearchQuery::text(
3772 search_query,
3773 false,
3774 true,
3775 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3776 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3777 ),
3778 cx
3779 )
3780 .await
3781 .unwrap()
3782 .is_empty(),
3783 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3784 );
3785
3786 assert!(
3787 search(
3788 &project,
3789 SearchQuery::text(
3790 search_query,
3791 false,
3792 true,
3793 vec![
3794 Glob::new("*.ts").unwrap().compile_matcher(),
3795 Glob::new("*.odd").unwrap().compile_matcher()
3796 ],
3797 vec![
3798 Glob::new("*.ts").unwrap().compile_matcher(),
3799 Glob::new("*.odd").unwrap().compile_matcher()
3800 ],
3801 ),
3802 cx
3803 )
3804 .await
3805 .unwrap()
3806 .is_empty(),
3807 "Non-matching inclusions and exclusions should not change that."
3808 );
3809
3810 assert_eq!(
3811 search(
3812 &project,
3813 SearchQuery::text(
3814 search_query,
3815 false,
3816 true,
3817 vec![
3818 Glob::new("*.ts").unwrap().compile_matcher(),
3819 Glob::new("*.odd").unwrap().compile_matcher()
3820 ],
3821 vec![
3822 Glob::new("*.rs").unwrap().compile_matcher(),
3823 Glob::new("*.odd").unwrap().compile_matcher()
3824 ],
3825 ),
3826 cx
3827 )
3828 .await
3829 .unwrap(),
3830 HashMap::from_iter([
3831 ("one.ts".to_string(), vec![14..18]),
3832 ("two.ts".to_string(), vec![14..18]),
3833 ]),
3834 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3835 );
3836}
3837
3838async fn search(
3839 project: &ModelHandle<Project>,
3840 query: SearchQuery,
3841 cx: &mut gpui::TestAppContext,
3842) -> Result<HashMap<String, Vec<Range<usize>>>> {
3843 let results = project
3844 .update(cx, |project, cx| project.search(query, cx))
3845 .await?;
3846
3847 Ok(results
3848 .into_iter()
3849 .map(|(buffer, ranges)| {
3850 buffer.read_with(cx, |buffer, _| {
3851 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3852 let ranges = ranges
3853 .into_iter()
3854 .map(|range| range.to_offset(buffer))
3855 .collect::<Vec<_>>();
3856 (path, ranges)
3857 })
3858 })
3859 .collect())
3860}
3861
3862fn init_test(cx: &mut gpui::TestAppContext) {
3863 cx.foreground().forbid_parking();
3864
3865 cx.update(|cx| {
3866 cx.set_global(SettingsStore::test(cx));
3867 language::init(cx);
3868 Project::init_settings(cx);
3869 });
3870}