1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_project_specific_settings(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let fs = FakeFs::new(cx.background());
74 fs.insert_tree(
75 "/the-root",
76 json!({
77 ".zed": {
78 "settings.json": r#"{ "tab_size": 8 }"#
79 },
80 "a": {
81 "a.rs": "fn a() {\n A\n}"
82 },
83 "b": {
84 ".zed": {
85 "settings.json": r#"{ "tab_size": 2 }"#
86 },
87 "b.rs": "fn b() {\n B\n}"
88 }
89 }),
90 )
91 .await;
92
93 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
94 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
95
96 deterministic.run_until_parked();
97 cx.read(|cx| {
98 let tree = worktree.read(cx);
99
100 let settings_a = language_settings(
101 None,
102 Some(
103 &(File::for_entry(
104 tree.entry_for_path("a/a.rs").unwrap().clone(),
105 worktree.clone(),
106 ) as _),
107 ),
108 cx,
109 );
110 let settings_b = language_settings(
111 None,
112 Some(
113 &(File::for_entry(
114 tree.entry_for_path("b/b.rs").unwrap().clone(),
115 worktree.clone(),
116 ) as _),
117 ),
118 cx,
119 );
120
121 assert_eq!(settings_a.tab_size.get(), 8);
122 assert_eq!(settings_b.tab_size.get(), 2);
123 });
124}
125
126#[gpui::test]
127async fn test_managing_language_servers(
128 deterministic: Arc<Deterministic>,
129 cx: &mut gpui::TestAppContext,
130) {
131 init_test(cx);
132
133 let mut rust_language = Language::new(
134 LanguageConfig {
135 name: "Rust".into(),
136 path_suffixes: vec!["rs".to_string()],
137 ..Default::default()
138 },
139 Some(tree_sitter_rust::language()),
140 );
141 let mut json_language = Language::new(
142 LanguageConfig {
143 name: "JSON".into(),
144 path_suffixes: vec!["json".to_string()],
145 ..Default::default()
146 },
147 None,
148 );
149 let mut fake_rust_servers = rust_language
150 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
151 name: "the-rust-language-server",
152 capabilities: lsp::ServerCapabilities {
153 completion_provider: Some(lsp::CompletionOptions {
154 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
155 ..Default::default()
156 }),
157 ..Default::default()
158 },
159 ..Default::default()
160 }))
161 .await;
162 let mut fake_json_servers = json_language
163 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
164 name: "the-json-language-server",
165 capabilities: lsp::ServerCapabilities {
166 completion_provider: Some(lsp::CompletionOptions {
167 trigger_characters: Some(vec![":".to_string()]),
168 ..Default::default()
169 }),
170 ..Default::default()
171 },
172 ..Default::default()
173 }))
174 .await;
175
176 let fs = FakeFs::new(cx.background());
177 fs.insert_tree(
178 "/the-root",
179 json!({
180 "test.rs": "const A: i32 = 1;",
181 "test2.rs": "",
182 "Cargo.toml": "a = 1",
183 "package.json": "{\"a\": 1}",
184 }),
185 )
186 .await;
187
188 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
189
190 // Open a buffer without an associated language server.
191 let toml_buffer = project
192 .update(cx, |project, cx| {
193 project.open_local_buffer("/the-root/Cargo.toml", cx)
194 })
195 .await
196 .unwrap();
197
198 // Open a buffer with an associated language server before the language for it has been loaded.
199 let rust_buffer = project
200 .update(cx, |project, cx| {
201 project.open_local_buffer("/the-root/test.rs", cx)
202 })
203 .await
204 .unwrap();
205 rust_buffer.read_with(cx, |buffer, _| {
206 assert_eq!(buffer.language().map(|l| l.name()), None);
207 });
208
209 // Now we add the languages to the project, and ensure they get assigned to all
210 // the relevant open buffers.
211 project.update(cx, |project, _| {
212 project.languages.add(Arc::new(json_language));
213 project.languages.add(Arc::new(rust_language));
214 });
215 deterministic.run_until_parked();
216 rust_buffer.read_with(cx, |buffer, _| {
217 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
218 });
219
220 // A server is started up, and it is notified about Rust files.
221 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
222 assert_eq!(
223 fake_rust_server
224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
225 .await
226 .text_document,
227 lsp::TextDocumentItem {
228 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
229 version: 0,
230 text: "const A: i32 = 1;".to_string(),
231 language_id: Default::default()
232 }
233 );
234
235 // The buffer is configured based on the language server's capabilities.
236 rust_buffer.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242 toml_buffer.read_with(cx, |buffer, _| {
243 assert!(buffer.completion_triggers().is_empty());
244 });
245
246 // Edit a buffer. The changes are reported to the language server.
247 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
255 1
256 )
257 );
258
259 // Open a third buffer with a different associated language server.
260 let json_buffer = project
261 .update(cx, |project, cx| {
262 project.open_local_buffer("/the-root/package.json", cx)
263 })
264 .await
265 .unwrap();
266
267 // A json language server is started up and is only notified about the json buffer.
268 let mut fake_json_server = fake_json_servers.next().await.unwrap();
269 assert_eq!(
270 fake_json_server
271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
272 .await
273 .text_document,
274 lsp::TextDocumentItem {
275 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
276 version: 0,
277 text: "{\"a\": 1}".to_string(),
278 language_id: Default::default()
279 }
280 );
281
282 // This buffer is configured based on the second language server's
283 // capabilities.
284 json_buffer.read_with(cx, |buffer, _| {
285 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
286 });
287
288 // When opening another buffer whose language server is already running,
289 // it is also configured based on the existing language server's capabilities.
290 let rust_buffer2 = project
291 .update(cx, |project, cx| {
292 project.open_local_buffer("/the-root/test2.rs", cx)
293 })
294 .await
295 .unwrap();
296 rust_buffer2.read_with(cx, |buffer, _| {
297 assert_eq!(
298 buffer.completion_triggers(),
299 &[".".to_string(), "::".to_string()]
300 );
301 });
302
303 // Changes are reported only to servers matching the buffer's language.
304 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
305 rust_buffer2.update(cx, |buffer, cx| {
306 buffer.edit([(0..0, "let x = 1;")], None, cx)
307 });
308 assert_eq!(
309 fake_rust_server
310 .receive_notification::<lsp::notification::DidChangeTextDocument>()
311 .await
312 .text_document,
313 lsp::VersionedTextDocumentIdentifier::new(
314 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
315 1
316 )
317 );
318
319 // Save notifications are reported to all servers.
320 project
321 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
322 .await
323 .unwrap();
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidSaveTextDocument>()
327 .await
328 .text_document,
329 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
330 );
331 assert_eq!(
332 fake_json_server
333 .receive_notification::<lsp::notification::DidSaveTextDocument>()
334 .await
335 .text_document,
336 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
337 );
338
339 // Renames are reported only to servers matching the buffer's language.
340 fs.rename(
341 Path::new("/the-root/test2.rs"),
342 Path::new("/the-root/test3.rs"),
343 Default::default(),
344 )
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
353 );
354 assert_eq!(
355 fake_rust_server
356 .receive_notification::<lsp::notification::DidOpenTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentItem {
360 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
361 version: 0,
362 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
363 language_id: Default::default()
364 },
365 );
366
367 rust_buffer2.update(cx, |buffer, cx| {
368 buffer.update_diagnostics(
369 LanguageServerId(0),
370 DiagnosticSet::from_sorted_entries(
371 vec![DiagnosticEntry {
372 diagnostic: Default::default(),
373 range: Anchor::MIN..Anchor::MAX,
374 }],
375 &buffer.snapshot(),
376 ),
377 cx,
378 );
379 assert_eq!(
380 buffer
381 .snapshot()
382 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
383 .count(),
384 1
385 );
386 });
387
388 // When the rename changes the extension of the file, the buffer gets closed on the old
389 // language server and gets opened on the new one.
390 fs.rename(
391 Path::new("/the-root/test3.rs"),
392 Path::new("/the-root/test3.json"),
393 Default::default(),
394 )
395 .await
396 .unwrap();
397 assert_eq!(
398 fake_rust_server
399 .receive_notification::<lsp::notification::DidCloseTextDocument>()
400 .await
401 .text_document,
402 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
403 );
404 assert_eq!(
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
411 version: 0,
412 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 );
416
417 // We clear the diagnostics, since the language has changed.
418 rust_buffer2.read_with(cx, |buffer, _| {
419 assert_eq!(
420 buffer
421 .snapshot()
422 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
423 .count(),
424 0
425 );
426 });
427
428 // The renamed file's version resets after changing language server.
429 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidChangeTextDocument>()
433 .await
434 .text_document,
435 lsp::VersionedTextDocumentIdentifier::new(
436 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
437 1
438 )
439 );
440
441 // Restart language servers
442 project.update(cx, |project, cx| {
443 project.restart_language_servers_for_buffers(
444 vec![rust_buffer.clone(), json_buffer.clone()],
445 cx,
446 );
447 });
448
449 let mut rust_shutdown_requests = fake_rust_server
450 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
451 let mut json_shutdown_requests = fake_json_server
452 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
453 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
454
455 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
456 let mut fake_json_server = fake_json_servers.next().await.unwrap();
457
458 // Ensure rust document is reopened in new rust language server
459 assert_eq!(
460 fake_rust_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
466 version: 0,
467 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
468 language_id: Default::default()
469 }
470 );
471
472 // Ensure json documents are reopened in new json language server
473 assert_set_eq!(
474 [
475 fake_json_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 fake_json_server
480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
481 .await
482 .text_document,
483 ],
484 [
485 lsp::TextDocumentItem {
486 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
487 version: 0,
488 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
489 language_id: Default::default()
490 },
491 lsp::TextDocumentItem {
492 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
493 version: 0,
494 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
495 language_id: Default::default()
496 }
497 ]
498 );
499
500 // Close notifications are reported only to servers matching the buffer's language.
501 cx.update(|_| drop(json_buffer));
502 let close_message = lsp::DidCloseTextDocumentParams {
503 text_document: lsp::TextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
505 ),
506 };
507 assert_eq!(
508 fake_json_server
509 .receive_notification::<lsp::notification::DidCloseTextDocument>()
510 .await,
511 close_message,
512 );
513}
514
515#[gpui::test]
516async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let mut language = Language::new(
520 LanguageConfig {
521 name: "Rust".into(),
522 path_suffixes: vec!["rs".to_string()],
523 ..Default::default()
524 },
525 Some(tree_sitter_rust::language()),
526 );
527 let mut fake_servers = language
528 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
529 name: "the-language-server",
530 ..Default::default()
531 }))
532 .await;
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/the-root",
537 json!({
538 "a.rs": "",
539 "b.rs": "",
540 }),
541 )
542 .await;
543
544 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
545 project.update(cx, |project, _| {
546 project.languages.add(Arc::new(language));
547 });
548 cx.foreground().run_until_parked();
549
550 // Start the language server by opening a buffer with a compatible file extension.
551 let _buffer = project
552 .update(cx, |project, cx| {
553 project.open_local_buffer("/the-root/a.rs", cx)
554 })
555 .await
556 .unwrap();
557
558 // Keep track of the FS events reported to the language server.
559 let fake_server = fake_servers.next().await.unwrap();
560 let file_changes = Arc::new(Mutex::new(Vec::new()));
561 fake_server
562 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
563 registrations: vec![lsp::Registration {
564 id: Default::default(),
565 method: "workspace/didChangeWatchedFiles".to_string(),
566 register_options: serde_json::to_value(
567 lsp::DidChangeWatchedFilesRegistrationOptions {
568 watchers: vec![lsp::FileSystemWatcher {
569 glob_pattern: lsp::GlobPattern::String(
570 "/the-root/*.{rs,c}".to_string(),
571 ),
572 kind: None,
573 }],
574 },
575 )
576 .ok(),
577 }],
578 })
579 .await
580 .unwrap();
581 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
582 let file_changes = file_changes.clone();
583 move |params, _| {
584 let mut file_changes = file_changes.lock();
585 file_changes.extend(params.changes);
586 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
587 }
588 });
589
590 cx.foreground().run_until_parked();
591 assert_eq!(file_changes.lock().len(), 0);
592
593 // Perform some file system mutations, two of which match the watched patterns,
594 // and one of which does not.
595 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
596 .await
597 .unwrap();
598 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
599 .await
600 .unwrap();
601 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
602 .await
603 .unwrap();
604
605 // The language server receives events for the FS mutations that match its watch patterns.
606 cx.foreground().run_until_parked();
607 assert_eq!(
608 &*file_changes.lock(),
609 &[
610 lsp::FileEvent {
611 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
612 typ: lsp::FileChangeType::DELETED,
613 },
614 lsp::FileEvent {
615 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
616 typ: lsp::FileChangeType::CREATED,
617 },
618 ]
619 );
620}
621
622#[gpui::test]
623async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
624 init_test(cx);
625
626 let fs = FakeFs::new(cx.background());
627 fs.insert_tree(
628 "/dir",
629 json!({
630 "a.rs": "let a = 1;",
631 "b.rs": "let b = 2;"
632 }),
633 )
634 .await;
635
636 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
637
638 let buffer_a = project
639 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
640 .await
641 .unwrap();
642 let buffer_b = project
643 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
644 .await
645 .unwrap();
646
647 project.update(cx, |project, cx| {
648 project
649 .update_diagnostics(
650 LanguageServerId(0),
651 lsp::PublishDiagnosticsParams {
652 uri: Url::from_file_path("/dir/a.rs").unwrap(),
653 version: None,
654 diagnostics: vec![lsp::Diagnostic {
655 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
656 severity: Some(lsp::DiagnosticSeverity::ERROR),
657 message: "error 1".to_string(),
658 ..Default::default()
659 }],
660 },
661 &[],
662 cx,
663 )
664 .unwrap();
665 project
666 .update_diagnostics(
667 LanguageServerId(0),
668 lsp::PublishDiagnosticsParams {
669 uri: Url::from_file_path("/dir/b.rs").unwrap(),
670 version: None,
671 diagnostics: vec![lsp::Diagnostic {
672 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
673 severity: Some(lsp::DiagnosticSeverity::WARNING),
674 message: "error 2".to_string(),
675 ..Default::default()
676 }],
677 },
678 &[],
679 cx,
680 )
681 .unwrap();
682 });
683
684 buffer_a.read_with(cx, |buffer, _| {
685 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
686 assert_eq!(
687 chunks
688 .iter()
689 .map(|(s, d)| (s.as_str(), *d))
690 .collect::<Vec<_>>(),
691 &[
692 ("let ", None),
693 ("a", Some(DiagnosticSeverity::ERROR)),
694 (" = 1;", None),
695 ]
696 );
697 });
698 buffer_b.read_with(cx, |buffer, _| {
699 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
700 assert_eq!(
701 chunks
702 .iter()
703 .map(|(s, d)| (s.as_str(), *d))
704 .collect::<Vec<_>>(),
705 &[
706 ("let ", None),
707 ("b", Some(DiagnosticSeverity::WARNING)),
708 (" = 2;", None),
709 ]
710 );
711 });
712}
713
714#[gpui::test]
715async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
716 init_test(cx);
717
718 let fs = FakeFs::new(cx.background());
719 fs.insert_tree(
720 "/root",
721 json!({
722 "dir": {
723 "a.rs": "let a = 1;",
724 },
725 "other.rs": "let b = c;"
726 }),
727 )
728 .await;
729
730 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
731
732 let (worktree, _) = project
733 .update(cx, |project, cx| {
734 project.find_or_create_local_worktree("/root/other.rs", false, cx)
735 })
736 .await
737 .unwrap();
738 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
739
740 project.update(cx, |project, cx| {
741 project
742 .update_diagnostics(
743 LanguageServerId(0),
744 lsp::PublishDiagnosticsParams {
745 uri: Url::from_file_path("/root/other.rs").unwrap(),
746 version: None,
747 diagnostics: vec![lsp::Diagnostic {
748 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
749 severity: Some(lsp::DiagnosticSeverity::ERROR),
750 message: "unknown variable 'c'".to_string(),
751 ..Default::default()
752 }],
753 },
754 &[],
755 cx,
756 )
757 .unwrap();
758 });
759
760 let buffer = project
761 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
762 .await
763 .unwrap();
764 buffer.read_with(cx, |buffer, _| {
765 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
766 assert_eq!(
767 chunks
768 .iter()
769 .map(|(s, d)| (s.as_str(), *d))
770 .collect::<Vec<_>>(),
771 &[
772 ("let b = ", None),
773 ("c", Some(DiagnosticSeverity::ERROR)),
774 (";", None),
775 ]
776 );
777 });
778
779 project.read_with(cx, |project, cx| {
780 assert_eq!(project.diagnostic_summaries(cx).next(), None);
781 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
782 });
783}
784
785#[gpui::test]
786async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
787 init_test(cx);
788
789 let progress_token = "the-progress-token";
790 let mut language = Language::new(
791 LanguageConfig {
792 name: "Rust".into(),
793 path_suffixes: vec!["rs".to_string()],
794 ..Default::default()
795 },
796 Some(tree_sitter_rust::language()),
797 );
798 let mut fake_servers = language
799 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
800 disk_based_diagnostics_progress_token: Some(progress_token.into()),
801 disk_based_diagnostics_sources: vec!["disk".into()],
802 ..Default::default()
803 }))
804 .await;
805
806 let fs = FakeFs::new(cx.background());
807 fs.insert_tree(
808 "/dir",
809 json!({
810 "a.rs": "fn a() { A }",
811 "b.rs": "const y: i32 = 1",
812 }),
813 )
814 .await;
815
816 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
817 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
818 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
819
820 // Cause worktree to start the fake language server
821 let _buffer = project
822 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
823 .await
824 .unwrap();
825
826 let mut events = subscribe(&project, cx);
827
828 let fake_server = fake_servers.next().await.unwrap();
829 assert_eq!(
830 events.next().await.unwrap(),
831 Event::LanguageServerAdded(LanguageServerId(0)),
832 );
833
834 fake_server
835 .start_progress(format!("{}/0", progress_token))
836 .await;
837 assert_eq!(
838 events.next().await.unwrap(),
839 Event::DiskBasedDiagnosticsStarted {
840 language_server_id: LanguageServerId(0),
841 }
842 );
843
844 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
845 uri: Url::from_file_path("/dir/a.rs").unwrap(),
846 version: None,
847 diagnostics: vec![lsp::Diagnostic {
848 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
849 severity: Some(lsp::DiagnosticSeverity::ERROR),
850 message: "undefined variable 'A'".to_string(),
851 ..Default::default()
852 }],
853 });
854 assert_eq!(
855 events.next().await.unwrap(),
856 Event::DiagnosticsUpdated {
857 language_server_id: LanguageServerId(0),
858 path: (worktree_id, Path::new("a.rs")).into()
859 }
860 );
861
862 fake_server.end_progress(format!("{}/0", progress_token));
863 assert_eq!(
864 events.next().await.unwrap(),
865 Event::DiskBasedDiagnosticsFinished {
866 language_server_id: LanguageServerId(0)
867 }
868 );
869
870 let buffer = project
871 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
872 .await
873 .unwrap();
874
875 buffer.read_with(cx, |buffer, _| {
876 let snapshot = buffer.snapshot();
877 let diagnostics = snapshot
878 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
879 .collect::<Vec<_>>();
880 assert_eq!(
881 diagnostics,
882 &[DiagnosticEntry {
883 range: Point::new(0, 9)..Point::new(0, 10),
884 diagnostic: Diagnostic {
885 severity: lsp::DiagnosticSeverity::ERROR,
886 message: "undefined variable 'A'".to_string(),
887 group_id: 0,
888 is_primary: true,
889 ..Default::default()
890 }
891 }]
892 )
893 });
894
895 // Ensure publishing empty diagnostics twice only results in one update event.
896 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
897 uri: Url::from_file_path("/dir/a.rs").unwrap(),
898 version: None,
899 diagnostics: Default::default(),
900 });
901 assert_eq!(
902 events.next().await.unwrap(),
903 Event::DiagnosticsUpdated {
904 language_server_id: LanguageServerId(0),
905 path: (worktree_id, Path::new("a.rs")).into()
906 }
907 );
908
909 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
910 uri: Url::from_file_path("/dir/a.rs").unwrap(),
911 version: None,
912 diagnostics: Default::default(),
913 });
914 cx.foreground().run_until_parked();
915 assert_eq!(futures::poll!(events.next()), Poll::Pending);
916}
917
918#[gpui::test]
919async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
920 init_test(cx);
921
922 let progress_token = "the-progress-token";
923 let mut language = Language::new(
924 LanguageConfig {
925 path_suffixes: vec!["rs".to_string()],
926 ..Default::default()
927 },
928 None,
929 );
930 let mut fake_servers = language
931 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
932 disk_based_diagnostics_sources: vec!["disk".into()],
933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
934 ..Default::default()
935 }))
936 .await;
937
938 let fs = FakeFs::new(cx.background());
939 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
940
941 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
942 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
943
944 let buffer = project
945 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
946 .await
947 .unwrap();
948
949 // Simulate diagnostics starting to update.
950 let fake_server = fake_servers.next().await.unwrap();
951 fake_server.start_progress(progress_token).await;
952
953 // Restart the server before the diagnostics finish updating.
954 project.update(cx, |project, cx| {
955 project.restart_language_servers_for_buffers([buffer], cx);
956 });
957 let mut events = subscribe(&project, cx);
958
959 // Simulate the newly started server sending more diagnostics.
960 let fake_server = fake_servers.next().await.unwrap();
961 assert_eq!(
962 events.next().await.unwrap(),
963 Event::LanguageServerAdded(LanguageServerId(1))
964 );
965 fake_server.start_progress(progress_token).await;
966 assert_eq!(
967 events.next().await.unwrap(),
968 Event::DiskBasedDiagnosticsStarted {
969 language_server_id: LanguageServerId(1)
970 }
971 );
972 project.read_with(cx, |project, _| {
973 assert_eq!(
974 project
975 .language_servers_running_disk_based_diagnostics()
976 .collect::<Vec<_>>(),
977 [LanguageServerId(1)]
978 );
979 });
980
981 // All diagnostics are considered done, despite the old server's diagnostic
982 // task never completing.
983 fake_server.end_progress(progress_token);
984 assert_eq!(
985 events.next().await.unwrap(),
986 Event::DiskBasedDiagnosticsFinished {
987 language_server_id: LanguageServerId(1)
988 }
989 );
990 project.read_with(cx, |project, _| {
991 assert_eq!(
992 project
993 .language_servers_running_disk_based_diagnostics()
994 .collect::<Vec<_>>(),
995 [LanguageServerId(0); 0]
996 );
997 });
998}
999
1000#[gpui::test]
1001async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1002 init_test(cx);
1003
1004 let mut language = Language::new(
1005 LanguageConfig {
1006 path_suffixes: vec!["rs".to_string()],
1007 ..Default::default()
1008 },
1009 None,
1010 );
1011 let mut fake_servers = language
1012 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1013 ..Default::default()
1014 }))
1015 .await;
1016
1017 let fs = FakeFs::new(cx.background());
1018 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1019
1020 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1021 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1022
1023 let buffer = project
1024 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1025 .await
1026 .unwrap();
1027
1028 // Publish diagnostics
1029 let fake_server = fake_servers.next().await.unwrap();
1030 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1031 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1032 version: None,
1033 diagnostics: vec![lsp::Diagnostic {
1034 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1035 severity: Some(lsp::DiagnosticSeverity::ERROR),
1036 message: "the message".to_string(),
1037 ..Default::default()
1038 }],
1039 });
1040
1041 cx.foreground().run_until_parked();
1042 buffer.read_with(cx, |buffer, _| {
1043 assert_eq!(
1044 buffer
1045 .snapshot()
1046 .diagnostics_in_range::<_, usize>(0..1, false)
1047 .map(|entry| entry.diagnostic.message.clone())
1048 .collect::<Vec<_>>(),
1049 ["the message".to_string()]
1050 );
1051 });
1052 project.read_with(cx, |project, cx| {
1053 assert_eq!(
1054 project.diagnostic_summary(cx),
1055 DiagnosticSummary {
1056 error_count: 1,
1057 warning_count: 0,
1058 }
1059 );
1060 });
1061
1062 project.update(cx, |project, cx| {
1063 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1064 });
1065
1066 // The diagnostics are cleared.
1067 cx.foreground().run_until_parked();
1068 buffer.read_with(cx, |buffer, _| {
1069 assert_eq!(
1070 buffer
1071 .snapshot()
1072 .diagnostics_in_range::<_, usize>(0..1, false)
1073 .map(|entry| entry.diagnostic.message.clone())
1074 .collect::<Vec<_>>(),
1075 Vec::<String>::new(),
1076 );
1077 });
1078 project.read_with(cx, |project, cx| {
1079 assert_eq!(
1080 project.diagnostic_summary(cx),
1081 DiagnosticSummary {
1082 error_count: 0,
1083 warning_count: 0,
1084 }
1085 );
1086 });
1087}
1088
1089#[gpui::test]
1090async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1091 init_test(cx);
1092
1093 let mut language = Language::new(
1094 LanguageConfig {
1095 path_suffixes: vec!["rs".to_string()],
1096 ..Default::default()
1097 },
1098 None,
1099 );
1100 let mut fake_servers = language
1101 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1102 name: "the-lsp",
1103 ..Default::default()
1104 }))
1105 .await;
1106
1107 let fs = FakeFs::new(cx.background());
1108 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1109
1110 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1111 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1112
1113 let buffer = project
1114 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1115 .await
1116 .unwrap();
1117
1118 // Before restarting the server, report diagnostics with an unknown buffer version.
1119 let fake_server = fake_servers.next().await.unwrap();
1120 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1121 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1122 version: Some(10000),
1123 diagnostics: Vec::new(),
1124 });
1125 cx.foreground().run_until_parked();
1126
1127 project.update(cx, |project, cx| {
1128 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1129 });
1130 let mut fake_server = fake_servers.next().await.unwrap();
1131 let notification = fake_server
1132 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1133 .await
1134 .text_document;
1135 assert_eq!(notification.version, 0);
1136}
1137
1138#[gpui::test]
1139async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1140 init_test(cx);
1141
1142 let mut rust = Language::new(
1143 LanguageConfig {
1144 name: Arc::from("Rust"),
1145 path_suffixes: vec!["rs".to_string()],
1146 ..Default::default()
1147 },
1148 None,
1149 );
1150 let mut fake_rust_servers = rust
1151 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1152 name: "rust-lsp",
1153 ..Default::default()
1154 }))
1155 .await;
1156 let mut js = Language::new(
1157 LanguageConfig {
1158 name: Arc::from("JavaScript"),
1159 path_suffixes: vec!["js".to_string()],
1160 ..Default::default()
1161 },
1162 None,
1163 );
1164 let mut fake_js_servers = js
1165 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1166 name: "js-lsp",
1167 ..Default::default()
1168 }))
1169 .await;
1170
1171 let fs = FakeFs::new(cx.background());
1172 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1173 .await;
1174
1175 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1176 project.update(cx, |project, _| {
1177 project.languages.add(Arc::new(rust));
1178 project.languages.add(Arc::new(js));
1179 });
1180
1181 let _rs_buffer = project
1182 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1183 .await
1184 .unwrap();
1185 let _js_buffer = project
1186 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1187 .await
1188 .unwrap();
1189
1190 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1191 assert_eq!(
1192 fake_rust_server_1
1193 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1194 .await
1195 .text_document
1196 .uri
1197 .as_str(),
1198 "file:///dir/a.rs"
1199 );
1200
1201 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1202 assert_eq!(
1203 fake_js_server
1204 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1205 .await
1206 .text_document
1207 .uri
1208 .as_str(),
1209 "file:///dir/b.js"
1210 );
1211
1212 // Disable Rust language server, ensuring only that server gets stopped.
1213 cx.update(|cx| {
1214 cx.update_global(|settings: &mut SettingsStore, cx| {
1215 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1216 settings.languages.insert(
1217 Arc::from("Rust"),
1218 LanguageSettingsContent {
1219 enable_language_server: Some(false),
1220 ..Default::default()
1221 },
1222 );
1223 });
1224 })
1225 });
1226 fake_rust_server_1
1227 .receive_notification::<lsp::notification::Exit>()
1228 .await;
1229
1230 // Enable Rust and disable JavaScript language servers, ensuring that the
1231 // former gets started again and that the latter stops.
1232 cx.update(|cx| {
1233 cx.update_global(|settings: &mut SettingsStore, cx| {
1234 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1235 settings.languages.insert(
1236 Arc::from("Rust"),
1237 LanguageSettingsContent {
1238 enable_language_server: Some(true),
1239 ..Default::default()
1240 },
1241 );
1242 settings.languages.insert(
1243 Arc::from("JavaScript"),
1244 LanguageSettingsContent {
1245 enable_language_server: Some(false),
1246 ..Default::default()
1247 },
1248 );
1249 });
1250 })
1251 });
1252 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1253 assert_eq!(
1254 fake_rust_server_2
1255 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1256 .await
1257 .text_document
1258 .uri
1259 .as_str(),
1260 "file:///dir/a.rs"
1261 );
1262 fake_js_server
1263 .receive_notification::<lsp::notification::Exit>()
1264 .await;
1265}
1266
1267#[gpui::test(iterations = 3)]
1268async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1269 init_test(cx);
1270
1271 let mut language = Language::new(
1272 LanguageConfig {
1273 name: "Rust".into(),
1274 path_suffixes: vec!["rs".to_string()],
1275 ..Default::default()
1276 },
1277 Some(tree_sitter_rust::language()),
1278 );
1279 let mut fake_servers = language
1280 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1281 disk_based_diagnostics_sources: vec!["disk".into()],
1282 ..Default::default()
1283 }))
1284 .await;
1285
1286 let text = "
1287 fn a() { A }
1288 fn b() { BB }
1289 fn c() { CCC }
1290 "
1291 .unindent();
1292
1293 let fs = FakeFs::new(cx.background());
1294 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1295
1296 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1297 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1298
1299 let buffer = project
1300 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1301 .await
1302 .unwrap();
1303
1304 let mut fake_server = fake_servers.next().await.unwrap();
1305 let open_notification = fake_server
1306 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1307 .await;
1308
1309 // Edit the buffer, moving the content down
1310 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1311 let change_notification_1 = fake_server
1312 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1313 .await;
1314 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1315
1316 // Report some diagnostics for the initial version of the buffer
1317 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1318 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1319 version: Some(open_notification.text_document.version),
1320 diagnostics: vec![
1321 lsp::Diagnostic {
1322 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1323 severity: Some(DiagnosticSeverity::ERROR),
1324 message: "undefined variable 'A'".to_string(),
1325 source: Some("disk".to_string()),
1326 ..Default::default()
1327 },
1328 lsp::Diagnostic {
1329 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1330 severity: Some(DiagnosticSeverity::ERROR),
1331 message: "undefined variable 'BB'".to_string(),
1332 source: Some("disk".to_string()),
1333 ..Default::default()
1334 },
1335 lsp::Diagnostic {
1336 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1337 severity: Some(DiagnosticSeverity::ERROR),
1338 source: Some("disk".to_string()),
1339 message: "undefined variable 'CCC'".to_string(),
1340 ..Default::default()
1341 },
1342 ],
1343 });
1344
1345 // The diagnostics have moved down since they were created.
1346 buffer.next_notification(cx).await;
1347 cx.foreground().run_until_parked();
1348 buffer.read_with(cx, |buffer, _| {
1349 assert_eq!(
1350 buffer
1351 .snapshot()
1352 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1353 .collect::<Vec<_>>(),
1354 &[
1355 DiagnosticEntry {
1356 range: Point::new(3, 9)..Point::new(3, 11),
1357 diagnostic: Diagnostic {
1358 source: Some("disk".into()),
1359 severity: DiagnosticSeverity::ERROR,
1360 message: "undefined variable 'BB'".to_string(),
1361 is_disk_based: true,
1362 group_id: 1,
1363 is_primary: true,
1364 ..Default::default()
1365 },
1366 },
1367 DiagnosticEntry {
1368 range: Point::new(4, 9)..Point::new(4, 12),
1369 diagnostic: Diagnostic {
1370 source: Some("disk".into()),
1371 severity: DiagnosticSeverity::ERROR,
1372 message: "undefined variable 'CCC'".to_string(),
1373 is_disk_based: true,
1374 group_id: 2,
1375 is_primary: true,
1376 ..Default::default()
1377 }
1378 }
1379 ]
1380 );
1381 assert_eq!(
1382 chunks_with_diagnostics(buffer, 0..buffer.len()),
1383 [
1384 ("\n\nfn a() { ".to_string(), None),
1385 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1386 (" }\nfn b() { ".to_string(), None),
1387 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1388 (" }\nfn c() { ".to_string(), None),
1389 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1390 (" }\n".to_string(), None),
1391 ]
1392 );
1393 assert_eq!(
1394 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1395 [
1396 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1397 (" }\nfn c() { ".to_string(), None),
1398 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1399 ]
1400 );
1401 });
1402
1403 // Ensure overlapping diagnostics are highlighted correctly.
1404 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1405 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1406 version: Some(open_notification.text_document.version),
1407 diagnostics: vec![
1408 lsp::Diagnostic {
1409 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1410 severity: Some(DiagnosticSeverity::ERROR),
1411 message: "undefined variable 'A'".to_string(),
1412 source: Some("disk".to_string()),
1413 ..Default::default()
1414 },
1415 lsp::Diagnostic {
1416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1417 severity: Some(DiagnosticSeverity::WARNING),
1418 message: "unreachable statement".to_string(),
1419 source: Some("disk".to_string()),
1420 ..Default::default()
1421 },
1422 ],
1423 });
1424
1425 buffer.next_notification(cx).await;
1426 cx.foreground().run_until_parked();
1427 buffer.read_with(cx, |buffer, _| {
1428 assert_eq!(
1429 buffer
1430 .snapshot()
1431 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1432 .collect::<Vec<_>>(),
1433 &[
1434 DiagnosticEntry {
1435 range: Point::new(2, 9)..Point::new(2, 12),
1436 diagnostic: Diagnostic {
1437 source: Some("disk".into()),
1438 severity: DiagnosticSeverity::WARNING,
1439 message: "unreachable statement".to_string(),
1440 is_disk_based: true,
1441 group_id: 4,
1442 is_primary: true,
1443 ..Default::default()
1444 }
1445 },
1446 DiagnosticEntry {
1447 range: Point::new(2, 9)..Point::new(2, 10),
1448 diagnostic: Diagnostic {
1449 source: Some("disk".into()),
1450 severity: DiagnosticSeverity::ERROR,
1451 message: "undefined variable 'A'".to_string(),
1452 is_disk_based: true,
1453 group_id: 3,
1454 is_primary: true,
1455 ..Default::default()
1456 },
1457 }
1458 ]
1459 );
1460 assert_eq!(
1461 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1462 [
1463 ("fn a() { ".to_string(), None),
1464 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1465 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1466 ("\n".to_string(), None),
1467 ]
1468 );
1469 assert_eq!(
1470 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1471 [
1472 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1473 ("\n".to_string(), None),
1474 ]
1475 );
1476 });
1477
1478 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1479 // changes since the last save.
1480 buffer.update(cx, |buffer, cx| {
1481 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1482 buffer.edit(
1483 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1484 None,
1485 cx,
1486 );
1487 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1488 });
1489 let change_notification_2 = fake_server
1490 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1491 .await;
1492 assert!(
1493 change_notification_2.text_document.version > change_notification_1.text_document.version
1494 );
1495
1496 // Handle out-of-order diagnostics
1497 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1498 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1499 version: Some(change_notification_2.text_document.version),
1500 diagnostics: vec![
1501 lsp::Diagnostic {
1502 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1503 severity: Some(DiagnosticSeverity::ERROR),
1504 message: "undefined variable 'BB'".to_string(),
1505 source: Some("disk".to_string()),
1506 ..Default::default()
1507 },
1508 lsp::Diagnostic {
1509 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1510 severity: Some(DiagnosticSeverity::WARNING),
1511 message: "undefined variable 'A'".to_string(),
1512 source: Some("disk".to_string()),
1513 ..Default::default()
1514 },
1515 ],
1516 });
1517
1518 buffer.next_notification(cx).await;
1519 cx.foreground().run_until_parked();
1520 buffer.read_with(cx, |buffer, _| {
1521 assert_eq!(
1522 buffer
1523 .snapshot()
1524 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1525 .collect::<Vec<_>>(),
1526 &[
1527 DiagnosticEntry {
1528 range: Point::new(2, 21)..Point::new(2, 22),
1529 diagnostic: Diagnostic {
1530 source: Some("disk".into()),
1531 severity: DiagnosticSeverity::WARNING,
1532 message: "undefined variable 'A'".to_string(),
1533 is_disk_based: true,
1534 group_id: 6,
1535 is_primary: true,
1536 ..Default::default()
1537 }
1538 },
1539 DiagnosticEntry {
1540 range: Point::new(3, 9)..Point::new(3, 14),
1541 diagnostic: Diagnostic {
1542 source: Some("disk".into()),
1543 severity: DiagnosticSeverity::ERROR,
1544 message: "undefined variable 'BB'".to_string(),
1545 is_disk_based: true,
1546 group_id: 5,
1547 is_primary: true,
1548 ..Default::default()
1549 },
1550 }
1551 ]
1552 );
1553 });
1554}
1555
1556#[gpui::test]
1557async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1558 init_test(cx);
1559
1560 let text = concat!(
1561 "let one = ;\n", //
1562 "let two = \n",
1563 "let three = 3;\n",
1564 );
1565
1566 let fs = FakeFs::new(cx.background());
1567 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1568
1569 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1570 let buffer = project
1571 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1572 .await
1573 .unwrap();
1574
1575 project.update(cx, |project, cx| {
1576 project
1577 .update_buffer_diagnostics(
1578 &buffer,
1579 LanguageServerId(0),
1580 None,
1581 vec![
1582 DiagnosticEntry {
1583 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1584 diagnostic: Diagnostic {
1585 severity: DiagnosticSeverity::ERROR,
1586 message: "syntax error 1".to_string(),
1587 ..Default::default()
1588 },
1589 },
1590 DiagnosticEntry {
1591 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1592 diagnostic: Diagnostic {
1593 severity: DiagnosticSeverity::ERROR,
1594 message: "syntax error 2".to_string(),
1595 ..Default::default()
1596 },
1597 },
1598 ],
1599 cx,
1600 )
1601 .unwrap();
1602 });
1603
1604 // An empty range is extended forward to include the following character.
1605 // At the end of a line, an empty range is extended backward to include
1606 // the preceding character.
1607 buffer.read_with(cx, |buffer, _| {
1608 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1609 assert_eq!(
1610 chunks
1611 .iter()
1612 .map(|(s, d)| (s.as_str(), *d))
1613 .collect::<Vec<_>>(),
1614 &[
1615 ("let one = ", None),
1616 (";", Some(DiagnosticSeverity::ERROR)),
1617 ("\nlet two =", None),
1618 (" ", Some(DiagnosticSeverity::ERROR)),
1619 ("\nlet three = 3;\n", None)
1620 ]
1621 );
1622 });
1623}
1624
1625#[gpui::test]
1626async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1627 init_test(cx);
1628
1629 let fs = FakeFs::new(cx.background());
1630 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1631 .await;
1632
1633 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1634
1635 project.update(cx, |project, cx| {
1636 project
1637 .update_diagnostic_entries(
1638 LanguageServerId(0),
1639 Path::new("/dir/a.rs").to_owned(),
1640 None,
1641 vec![DiagnosticEntry {
1642 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1643 diagnostic: Diagnostic {
1644 severity: DiagnosticSeverity::ERROR,
1645 is_primary: true,
1646 message: "syntax error a1".to_string(),
1647 ..Default::default()
1648 },
1649 }],
1650 cx,
1651 )
1652 .unwrap();
1653 project
1654 .update_diagnostic_entries(
1655 LanguageServerId(1),
1656 Path::new("/dir/a.rs").to_owned(),
1657 None,
1658 vec![DiagnosticEntry {
1659 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1660 diagnostic: Diagnostic {
1661 severity: DiagnosticSeverity::ERROR,
1662 is_primary: true,
1663 message: "syntax error b1".to_string(),
1664 ..Default::default()
1665 },
1666 }],
1667 cx,
1668 )
1669 .unwrap();
1670
1671 assert_eq!(
1672 project.diagnostic_summary(cx),
1673 DiagnosticSummary {
1674 error_count: 2,
1675 warning_count: 0,
1676 }
1677 );
1678 });
1679}
1680
1681#[gpui::test]
1682async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1683 init_test(cx);
1684
1685 let mut language = Language::new(
1686 LanguageConfig {
1687 name: "Rust".into(),
1688 path_suffixes: vec!["rs".to_string()],
1689 ..Default::default()
1690 },
1691 Some(tree_sitter_rust::language()),
1692 );
1693 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1694
1695 let text = "
1696 fn a() {
1697 f1();
1698 }
1699 fn b() {
1700 f2();
1701 }
1702 fn c() {
1703 f3();
1704 }
1705 "
1706 .unindent();
1707
1708 let fs = FakeFs::new(cx.background());
1709 fs.insert_tree(
1710 "/dir",
1711 json!({
1712 "a.rs": text.clone(),
1713 }),
1714 )
1715 .await;
1716
1717 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1718 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1719 let buffer = project
1720 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1721 .await
1722 .unwrap();
1723
1724 let mut fake_server = fake_servers.next().await.unwrap();
1725 let lsp_document_version = fake_server
1726 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1727 .await
1728 .text_document
1729 .version;
1730
1731 // Simulate editing the buffer after the language server computes some edits.
1732 buffer.update(cx, |buffer, cx| {
1733 buffer.edit(
1734 [(
1735 Point::new(0, 0)..Point::new(0, 0),
1736 "// above first function\n",
1737 )],
1738 None,
1739 cx,
1740 );
1741 buffer.edit(
1742 [(
1743 Point::new(2, 0)..Point::new(2, 0),
1744 " // inside first function\n",
1745 )],
1746 None,
1747 cx,
1748 );
1749 buffer.edit(
1750 [(
1751 Point::new(6, 4)..Point::new(6, 4),
1752 "// inside second function ",
1753 )],
1754 None,
1755 cx,
1756 );
1757
1758 assert_eq!(
1759 buffer.text(),
1760 "
1761 // above first function
1762 fn a() {
1763 // inside first function
1764 f1();
1765 }
1766 fn b() {
1767 // inside second function f2();
1768 }
1769 fn c() {
1770 f3();
1771 }
1772 "
1773 .unindent()
1774 );
1775 });
1776
1777 let edits = project
1778 .update(cx, |project, cx| {
1779 project.edits_from_lsp(
1780 &buffer,
1781 vec![
1782 // replace body of first function
1783 lsp::TextEdit {
1784 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1785 new_text: "
1786 fn a() {
1787 f10();
1788 }
1789 "
1790 .unindent(),
1791 },
1792 // edit inside second function
1793 lsp::TextEdit {
1794 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1795 new_text: "00".into(),
1796 },
1797 // edit inside third function via two distinct edits
1798 lsp::TextEdit {
1799 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1800 new_text: "4000".into(),
1801 },
1802 lsp::TextEdit {
1803 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1804 new_text: "".into(),
1805 },
1806 ],
1807 LanguageServerId(0),
1808 Some(lsp_document_version),
1809 cx,
1810 )
1811 })
1812 .await
1813 .unwrap();
1814
1815 buffer.update(cx, |buffer, cx| {
1816 for (range, new_text) in edits {
1817 buffer.edit([(range, new_text)], None, cx);
1818 }
1819 assert_eq!(
1820 buffer.text(),
1821 "
1822 // above first function
1823 fn a() {
1824 // inside first function
1825 f10();
1826 }
1827 fn b() {
1828 // inside second function f200();
1829 }
1830 fn c() {
1831 f4000();
1832 }
1833 "
1834 .unindent()
1835 );
1836 });
1837}
1838
1839#[gpui::test]
1840async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1841 init_test(cx);
1842
1843 let text = "
1844 use a::b;
1845 use a::c;
1846
1847 fn f() {
1848 b();
1849 c();
1850 }
1851 "
1852 .unindent();
1853
1854 let fs = FakeFs::new(cx.background());
1855 fs.insert_tree(
1856 "/dir",
1857 json!({
1858 "a.rs": text.clone(),
1859 }),
1860 )
1861 .await;
1862
1863 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1864 let buffer = project
1865 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1866 .await
1867 .unwrap();
1868
1869 // Simulate the language server sending us a small edit in the form of a very large diff.
1870 // Rust-analyzer does this when performing a merge-imports code action.
1871 let edits = project
1872 .update(cx, |project, cx| {
1873 project.edits_from_lsp(
1874 &buffer,
1875 [
1876 // Replace the first use statement without editing the semicolon.
1877 lsp::TextEdit {
1878 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1879 new_text: "a::{b, c}".into(),
1880 },
1881 // Reinsert the remainder of the file between the semicolon and the final
1882 // newline of the file.
1883 lsp::TextEdit {
1884 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1885 new_text: "\n\n".into(),
1886 },
1887 lsp::TextEdit {
1888 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1889 new_text: "
1890 fn f() {
1891 b();
1892 c();
1893 }"
1894 .unindent(),
1895 },
1896 // Delete everything after the first newline of the file.
1897 lsp::TextEdit {
1898 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1899 new_text: "".into(),
1900 },
1901 ],
1902 LanguageServerId(0),
1903 None,
1904 cx,
1905 )
1906 })
1907 .await
1908 .unwrap();
1909
1910 buffer.update(cx, |buffer, cx| {
1911 let edits = edits
1912 .into_iter()
1913 .map(|(range, text)| {
1914 (
1915 range.start.to_point(buffer)..range.end.to_point(buffer),
1916 text,
1917 )
1918 })
1919 .collect::<Vec<_>>();
1920
1921 assert_eq!(
1922 edits,
1923 [
1924 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1925 (Point::new(1, 0)..Point::new(2, 0), "".into())
1926 ]
1927 );
1928
1929 for (range, new_text) in edits {
1930 buffer.edit([(range, new_text)], None, cx);
1931 }
1932 assert_eq!(
1933 buffer.text(),
1934 "
1935 use a::{b, c};
1936
1937 fn f() {
1938 b();
1939 c();
1940 }
1941 "
1942 .unindent()
1943 );
1944 });
1945}
1946
1947#[gpui::test]
1948async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1949 init_test(cx);
1950
1951 let text = "
1952 use a::b;
1953 use a::c;
1954
1955 fn f() {
1956 b();
1957 c();
1958 }
1959 "
1960 .unindent();
1961
1962 let fs = FakeFs::new(cx.background());
1963 fs.insert_tree(
1964 "/dir",
1965 json!({
1966 "a.rs": text.clone(),
1967 }),
1968 )
1969 .await;
1970
1971 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1972 let buffer = project
1973 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1974 .await
1975 .unwrap();
1976
1977 // Simulate the language server sending us edits in a non-ordered fashion,
1978 // with ranges sometimes being inverted or pointing to invalid locations.
1979 let edits = project
1980 .update(cx, |project, cx| {
1981 project.edits_from_lsp(
1982 &buffer,
1983 [
1984 lsp::TextEdit {
1985 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1986 new_text: "\n\n".into(),
1987 },
1988 lsp::TextEdit {
1989 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1990 new_text: "a::{b, c}".into(),
1991 },
1992 lsp::TextEdit {
1993 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1994 new_text: "".into(),
1995 },
1996 lsp::TextEdit {
1997 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1998 new_text: "
1999 fn f() {
2000 b();
2001 c();
2002 }"
2003 .unindent(),
2004 },
2005 ],
2006 LanguageServerId(0),
2007 None,
2008 cx,
2009 )
2010 })
2011 .await
2012 .unwrap();
2013
2014 buffer.update(cx, |buffer, cx| {
2015 let edits = edits
2016 .into_iter()
2017 .map(|(range, text)| {
2018 (
2019 range.start.to_point(buffer)..range.end.to_point(buffer),
2020 text,
2021 )
2022 })
2023 .collect::<Vec<_>>();
2024
2025 assert_eq!(
2026 edits,
2027 [
2028 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2029 (Point::new(1, 0)..Point::new(2, 0), "".into())
2030 ]
2031 );
2032
2033 for (range, new_text) in edits {
2034 buffer.edit([(range, new_text)], None, cx);
2035 }
2036 assert_eq!(
2037 buffer.text(),
2038 "
2039 use a::{b, c};
2040
2041 fn f() {
2042 b();
2043 c();
2044 }
2045 "
2046 .unindent()
2047 );
2048 });
2049}
2050
2051fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2052 buffer: &Buffer,
2053 range: Range<T>,
2054) -> Vec<(String, Option<DiagnosticSeverity>)> {
2055 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2056 for chunk in buffer.snapshot().chunks(range, true) {
2057 if chunks.last().map_or(false, |prev_chunk| {
2058 prev_chunk.1 == chunk.diagnostic_severity
2059 }) {
2060 chunks.last_mut().unwrap().0.push_str(chunk.text);
2061 } else {
2062 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2063 }
2064 }
2065 chunks
2066}
2067
2068#[gpui::test(iterations = 10)]
2069async fn test_definition(cx: &mut gpui::TestAppContext) {
2070 init_test(cx);
2071
2072 let mut language = Language::new(
2073 LanguageConfig {
2074 name: "Rust".into(),
2075 path_suffixes: vec!["rs".to_string()],
2076 ..Default::default()
2077 },
2078 Some(tree_sitter_rust::language()),
2079 );
2080 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2081
2082 let fs = FakeFs::new(cx.background());
2083 fs.insert_tree(
2084 "/dir",
2085 json!({
2086 "a.rs": "const fn a() { A }",
2087 "b.rs": "const y: i32 = crate::a()",
2088 }),
2089 )
2090 .await;
2091
2092 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2093 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2094
2095 let buffer = project
2096 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2097 .await
2098 .unwrap();
2099
2100 let fake_server = fake_servers.next().await.unwrap();
2101 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2102 let params = params.text_document_position_params;
2103 assert_eq!(
2104 params.text_document.uri.to_file_path().unwrap(),
2105 Path::new("/dir/b.rs"),
2106 );
2107 assert_eq!(params.position, lsp::Position::new(0, 22));
2108
2109 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2110 lsp::Location::new(
2111 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2112 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2113 ),
2114 )))
2115 });
2116
2117 let mut definitions = project
2118 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2119 .await
2120 .unwrap();
2121
2122 // Assert no new language server started
2123 cx.foreground().run_until_parked();
2124 assert!(fake_servers.try_next().is_err());
2125
2126 assert_eq!(definitions.len(), 1);
2127 let definition = definitions.pop().unwrap();
2128 cx.update(|cx| {
2129 let target_buffer = definition.target.buffer.read(cx);
2130 assert_eq!(
2131 target_buffer
2132 .file()
2133 .unwrap()
2134 .as_local()
2135 .unwrap()
2136 .abs_path(cx),
2137 Path::new("/dir/a.rs"),
2138 );
2139 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2140 assert_eq!(
2141 list_worktrees(&project, cx),
2142 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2143 );
2144
2145 drop(definition);
2146 });
2147 cx.read(|cx| {
2148 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2149 });
2150
2151 fn list_worktrees<'a>(
2152 project: &'a ModelHandle<Project>,
2153 cx: &'a AppContext,
2154 ) -> Vec<(&'a Path, bool)> {
2155 project
2156 .read(cx)
2157 .worktrees(cx)
2158 .map(|worktree| {
2159 let worktree = worktree.read(cx);
2160 (
2161 worktree.as_local().unwrap().abs_path().as_ref(),
2162 worktree.is_visible(),
2163 )
2164 })
2165 .collect::<Vec<_>>()
2166 }
2167}
2168
2169#[gpui::test]
2170async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2171 init_test(cx);
2172
2173 let mut language = Language::new(
2174 LanguageConfig {
2175 name: "TypeScript".into(),
2176 path_suffixes: vec!["ts".to_string()],
2177 ..Default::default()
2178 },
2179 Some(tree_sitter_typescript::language_typescript()),
2180 );
2181 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2182
2183 let fs = FakeFs::new(cx.background());
2184 fs.insert_tree(
2185 "/dir",
2186 json!({
2187 "a.ts": "",
2188 }),
2189 )
2190 .await;
2191
2192 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2193 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2194 let buffer = project
2195 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2196 .await
2197 .unwrap();
2198
2199 let fake_server = fake_language_servers.next().await.unwrap();
2200
2201 let text = "let a = b.fqn";
2202 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2203 let completions = project.update(cx, |project, cx| {
2204 project.completions(&buffer, text.len(), cx)
2205 });
2206
2207 fake_server
2208 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2209 Ok(Some(lsp::CompletionResponse::Array(vec![
2210 lsp::CompletionItem {
2211 label: "fullyQualifiedName?".into(),
2212 insert_text: Some("fullyQualifiedName".into()),
2213 ..Default::default()
2214 },
2215 ])))
2216 })
2217 .next()
2218 .await;
2219 let completions = completions.await.unwrap();
2220 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2221 assert_eq!(completions.len(), 1);
2222 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2223 assert_eq!(
2224 completions[0].old_range.to_offset(&snapshot),
2225 text.len() - 3..text.len()
2226 );
2227
2228 let text = "let a = \"atoms/cmp\"";
2229 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2230 let completions = project.update(cx, |project, cx| {
2231 project.completions(&buffer, text.len() - 1, cx)
2232 });
2233
2234 fake_server
2235 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2236 Ok(Some(lsp::CompletionResponse::Array(vec![
2237 lsp::CompletionItem {
2238 label: "component".into(),
2239 ..Default::default()
2240 },
2241 ])))
2242 })
2243 .next()
2244 .await;
2245 let completions = completions.await.unwrap();
2246 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2247 assert_eq!(completions.len(), 1);
2248 assert_eq!(completions[0].new_text, "component");
2249 assert_eq!(
2250 completions[0].old_range.to_offset(&snapshot),
2251 text.len() - 4..text.len() - 1
2252 );
2253}
2254
2255#[gpui::test]
2256async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2257 init_test(cx);
2258
2259 let mut language = Language::new(
2260 LanguageConfig {
2261 name: "TypeScript".into(),
2262 path_suffixes: vec!["ts".to_string()],
2263 ..Default::default()
2264 },
2265 Some(tree_sitter_typescript::language_typescript()),
2266 );
2267 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2268
2269 let fs = FakeFs::new(cx.background());
2270 fs.insert_tree(
2271 "/dir",
2272 json!({
2273 "a.ts": "",
2274 }),
2275 )
2276 .await;
2277
2278 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2279 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2280 let buffer = project
2281 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2282 .await
2283 .unwrap();
2284
2285 let fake_server = fake_language_servers.next().await.unwrap();
2286
2287 let text = "let a = b.fqn";
2288 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2289 let completions = project.update(cx, |project, cx| {
2290 project.completions(&buffer, text.len(), cx)
2291 });
2292
2293 fake_server
2294 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2295 Ok(Some(lsp::CompletionResponse::Array(vec![
2296 lsp::CompletionItem {
2297 label: "fullyQualifiedName?".into(),
2298 insert_text: Some("fully\rQualified\r\nName".into()),
2299 ..Default::default()
2300 },
2301 ])))
2302 })
2303 .next()
2304 .await;
2305 let completions = completions.await.unwrap();
2306 assert_eq!(completions.len(), 1);
2307 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2308}
2309
2310#[gpui::test(iterations = 10)]
2311async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2312 init_test(cx);
2313
2314 let mut language = Language::new(
2315 LanguageConfig {
2316 name: "TypeScript".into(),
2317 path_suffixes: vec!["ts".to_string()],
2318 ..Default::default()
2319 },
2320 None,
2321 );
2322 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2323
2324 let fs = FakeFs::new(cx.background());
2325 fs.insert_tree(
2326 "/dir",
2327 json!({
2328 "a.ts": "a",
2329 }),
2330 )
2331 .await;
2332
2333 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2334 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2335 let buffer = project
2336 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2337 .await
2338 .unwrap();
2339
2340 let fake_server = fake_language_servers.next().await.unwrap();
2341
2342 // Language server returns code actions that contain commands, and not edits.
2343 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2344 fake_server
2345 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2346 Ok(Some(vec![
2347 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2348 title: "The code action".into(),
2349 command: Some(lsp::Command {
2350 title: "The command".into(),
2351 command: "_the/command".into(),
2352 arguments: Some(vec![json!("the-argument")]),
2353 }),
2354 ..Default::default()
2355 }),
2356 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2357 title: "two".into(),
2358 ..Default::default()
2359 }),
2360 ]))
2361 })
2362 .next()
2363 .await;
2364
2365 let action = actions.await.unwrap()[0].clone();
2366 let apply = project.update(cx, |project, cx| {
2367 project.apply_code_action(buffer.clone(), action, true, cx)
2368 });
2369
2370 // Resolving the code action does not populate its edits. In absence of
2371 // edits, we must execute the given command.
2372 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2373 |action, _| async move { Ok(action) },
2374 );
2375
2376 // While executing the command, the language server sends the editor
2377 // a `workspaceEdit` request.
2378 fake_server
2379 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2380 let fake = fake_server.clone();
2381 move |params, _| {
2382 assert_eq!(params.command, "_the/command");
2383 let fake = fake.clone();
2384 async move {
2385 fake.server
2386 .request::<lsp::request::ApplyWorkspaceEdit>(
2387 lsp::ApplyWorkspaceEditParams {
2388 label: None,
2389 edit: lsp::WorkspaceEdit {
2390 changes: Some(
2391 [(
2392 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2393 vec![lsp::TextEdit {
2394 range: lsp::Range::new(
2395 lsp::Position::new(0, 0),
2396 lsp::Position::new(0, 0),
2397 ),
2398 new_text: "X".into(),
2399 }],
2400 )]
2401 .into_iter()
2402 .collect(),
2403 ),
2404 ..Default::default()
2405 },
2406 },
2407 )
2408 .await
2409 .unwrap();
2410 Ok(Some(json!(null)))
2411 }
2412 }
2413 })
2414 .next()
2415 .await;
2416
2417 // Applying the code action returns a project transaction containing the edits
2418 // sent by the language server in its `workspaceEdit` request.
2419 let transaction = apply.await.unwrap();
2420 assert!(transaction.0.contains_key(&buffer));
2421 buffer.update(cx, |buffer, cx| {
2422 assert_eq!(buffer.text(), "Xa");
2423 buffer.undo(cx);
2424 assert_eq!(buffer.text(), "a");
2425 });
2426}
2427
2428#[gpui::test(iterations = 10)]
2429async fn test_save_file(cx: &mut gpui::TestAppContext) {
2430 init_test(cx);
2431
2432 let fs = FakeFs::new(cx.background());
2433 fs.insert_tree(
2434 "/dir",
2435 json!({
2436 "file1": "the old contents",
2437 }),
2438 )
2439 .await;
2440
2441 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2442 let buffer = project
2443 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2444 .await
2445 .unwrap();
2446 buffer.update(cx, |buffer, cx| {
2447 assert_eq!(buffer.text(), "the old contents");
2448 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2449 });
2450
2451 project
2452 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2453 .await
2454 .unwrap();
2455
2456 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2457 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2458}
2459
2460#[gpui::test]
2461async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2462 init_test(cx);
2463
2464 let fs = FakeFs::new(cx.background());
2465 fs.insert_tree(
2466 "/dir",
2467 json!({
2468 "file1": "the old contents",
2469 }),
2470 )
2471 .await;
2472
2473 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2474 let buffer = project
2475 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2476 .await
2477 .unwrap();
2478 buffer.update(cx, |buffer, cx| {
2479 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2480 });
2481
2482 project
2483 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2484 .await
2485 .unwrap();
2486
2487 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2488 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2489}
2490
2491#[gpui::test]
2492async fn test_save_as(cx: &mut gpui::TestAppContext) {
2493 init_test(cx);
2494
2495 let fs = FakeFs::new(cx.background());
2496 fs.insert_tree("/dir", json!({})).await;
2497
2498 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2499
2500 let languages = project.read_with(cx, |project, _| project.languages().clone());
2501 languages.register(
2502 "/some/path",
2503 LanguageConfig {
2504 name: "Rust".into(),
2505 path_suffixes: vec!["rs".into()],
2506 ..Default::default()
2507 },
2508 tree_sitter_rust::language(),
2509 vec![],
2510 |_| Default::default(),
2511 );
2512
2513 let buffer = project.update(cx, |project, cx| {
2514 project.create_buffer("", None, cx).unwrap()
2515 });
2516 buffer.update(cx, |buffer, cx| {
2517 buffer.edit([(0..0, "abc")], None, cx);
2518 assert!(buffer.is_dirty());
2519 assert!(!buffer.has_conflict());
2520 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2521 });
2522 project
2523 .update(cx, |project, cx| {
2524 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2525 })
2526 .await
2527 .unwrap();
2528 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2529
2530 cx.foreground().run_until_parked();
2531 buffer.read_with(cx, |buffer, cx| {
2532 assert_eq!(
2533 buffer.file().unwrap().full_path(cx),
2534 Path::new("dir/file1.rs")
2535 );
2536 assert!(!buffer.is_dirty());
2537 assert!(!buffer.has_conflict());
2538 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2539 });
2540
2541 let opened_buffer = project
2542 .update(cx, |project, cx| {
2543 project.open_local_buffer("/dir/file1.rs", cx)
2544 })
2545 .await
2546 .unwrap();
2547 assert_eq!(opened_buffer, buffer);
2548}
2549
2550#[gpui::test(retries = 5)]
2551async fn test_rescan_and_remote_updates(
2552 deterministic: Arc<Deterministic>,
2553 cx: &mut gpui::TestAppContext,
2554) {
2555 init_test(cx);
2556 cx.foreground().allow_parking();
2557
2558 let dir = temp_tree(json!({
2559 "a": {
2560 "file1": "",
2561 "file2": "",
2562 "file3": "",
2563 },
2564 "b": {
2565 "c": {
2566 "file4": "",
2567 "file5": "",
2568 }
2569 }
2570 }));
2571
2572 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2573 let rpc = project.read_with(cx, |p, _| p.client.clone());
2574
2575 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2576 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2577 async move { buffer.await.unwrap() }
2578 };
2579 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2580 project.read_with(cx, |project, cx| {
2581 let tree = project.worktrees(cx).next().unwrap();
2582 tree.read(cx)
2583 .entry_for_path(path)
2584 .unwrap_or_else(|| panic!("no entry for path {}", path))
2585 .id
2586 })
2587 };
2588
2589 let buffer2 = buffer_for_path("a/file2", cx).await;
2590 let buffer3 = buffer_for_path("a/file3", cx).await;
2591 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2592 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2593
2594 let file2_id = id_for_path("a/file2", cx);
2595 let file3_id = id_for_path("a/file3", cx);
2596 let file4_id = id_for_path("b/c/file4", cx);
2597
2598 // Create a remote copy of this worktree.
2599 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2600
2601 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2602
2603 let updates = Arc::new(Mutex::new(Vec::new()));
2604 tree.update(cx, |tree, cx| {
2605 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2606 let updates = updates.clone();
2607 move |update| {
2608 updates.lock().push(update);
2609 async { true }
2610 }
2611 });
2612 });
2613
2614 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2615 deterministic.run_until_parked();
2616
2617 cx.read(|cx| {
2618 assert!(!buffer2.read(cx).is_dirty());
2619 assert!(!buffer3.read(cx).is_dirty());
2620 assert!(!buffer4.read(cx).is_dirty());
2621 assert!(!buffer5.read(cx).is_dirty());
2622 });
2623
2624 // Rename and delete files and directories.
2625 tree.flush_fs_events(cx).await;
2626 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2627 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2628 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2629 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2630 tree.flush_fs_events(cx).await;
2631
2632 let expected_paths = vec![
2633 "a",
2634 "a/file1",
2635 "a/file2.new",
2636 "b",
2637 "d",
2638 "d/file3",
2639 "d/file4",
2640 ];
2641
2642 cx.read(|app| {
2643 assert_eq!(
2644 tree.read(app)
2645 .paths()
2646 .map(|p| p.to_str().unwrap())
2647 .collect::<Vec<_>>(),
2648 expected_paths
2649 );
2650
2651 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2652 assert_eq!(id_for_path("d/file3", cx), file3_id);
2653 assert_eq!(id_for_path("d/file4", cx), file4_id);
2654
2655 assert_eq!(
2656 buffer2.read(app).file().unwrap().path().as_ref(),
2657 Path::new("a/file2.new")
2658 );
2659 assert_eq!(
2660 buffer3.read(app).file().unwrap().path().as_ref(),
2661 Path::new("d/file3")
2662 );
2663 assert_eq!(
2664 buffer4.read(app).file().unwrap().path().as_ref(),
2665 Path::new("d/file4")
2666 );
2667 assert_eq!(
2668 buffer5.read(app).file().unwrap().path().as_ref(),
2669 Path::new("b/c/file5")
2670 );
2671
2672 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2673 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2674 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2675 assert!(buffer5.read(app).file().unwrap().is_deleted());
2676 });
2677
2678 // Update the remote worktree. Check that it becomes consistent with the
2679 // local worktree.
2680 deterministic.run_until_parked();
2681 remote.update(cx, |remote, _| {
2682 for update in updates.lock().drain(..) {
2683 remote.as_remote_mut().unwrap().update_from_remote(update);
2684 }
2685 });
2686 deterministic.run_until_parked();
2687 remote.read_with(cx, |remote, _| {
2688 assert_eq!(
2689 remote
2690 .paths()
2691 .map(|p| p.to_str().unwrap())
2692 .collect::<Vec<_>>(),
2693 expected_paths
2694 );
2695 });
2696}
2697
2698#[gpui::test(iterations = 10)]
2699async fn test_buffer_identity_across_renames(
2700 deterministic: Arc<Deterministic>,
2701 cx: &mut gpui::TestAppContext,
2702) {
2703 init_test(cx);
2704
2705 let fs = FakeFs::new(cx.background());
2706 fs.insert_tree(
2707 "/dir",
2708 json!({
2709 "a": {
2710 "file1": "",
2711 }
2712 }),
2713 )
2714 .await;
2715
2716 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2717 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2718 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2719
2720 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2721 project.read_with(cx, |project, cx| {
2722 let tree = project.worktrees(cx).next().unwrap();
2723 tree.read(cx)
2724 .entry_for_path(path)
2725 .unwrap_or_else(|| panic!("no entry for path {}", path))
2726 .id
2727 })
2728 };
2729
2730 let dir_id = id_for_path("a", cx);
2731 let file_id = id_for_path("a/file1", cx);
2732 let buffer = project
2733 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2734 .await
2735 .unwrap();
2736 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2737
2738 project
2739 .update(cx, |project, cx| {
2740 project.rename_entry(dir_id, Path::new("b"), cx)
2741 })
2742 .unwrap()
2743 .await
2744 .unwrap();
2745 deterministic.run_until_parked();
2746 assert_eq!(id_for_path("b", cx), dir_id);
2747 assert_eq!(id_for_path("b/file1", cx), file_id);
2748 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2749}
2750
2751#[gpui::test]
2752async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2753 init_test(cx);
2754
2755 let fs = FakeFs::new(cx.background());
2756 fs.insert_tree(
2757 "/dir",
2758 json!({
2759 "a.txt": "a-contents",
2760 "b.txt": "b-contents",
2761 }),
2762 )
2763 .await;
2764
2765 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2766
2767 // Spawn multiple tasks to open paths, repeating some paths.
2768 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2769 (
2770 p.open_local_buffer("/dir/a.txt", cx),
2771 p.open_local_buffer("/dir/b.txt", cx),
2772 p.open_local_buffer("/dir/a.txt", cx),
2773 )
2774 });
2775
2776 let buffer_a_1 = buffer_a_1.await.unwrap();
2777 let buffer_a_2 = buffer_a_2.await.unwrap();
2778 let buffer_b = buffer_b.await.unwrap();
2779 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2780 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2781
2782 // There is only one buffer per path.
2783 let buffer_a_id = buffer_a_1.id();
2784 assert_eq!(buffer_a_2.id(), buffer_a_id);
2785
2786 // Open the same path again while it is still open.
2787 drop(buffer_a_1);
2788 let buffer_a_3 = project
2789 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2790 .await
2791 .unwrap();
2792
2793 // There's still only one buffer per path.
2794 assert_eq!(buffer_a_3.id(), buffer_a_id);
2795}
2796
2797#[gpui::test]
2798async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2799 init_test(cx);
2800
2801 let fs = FakeFs::new(cx.background());
2802 fs.insert_tree(
2803 "/dir",
2804 json!({
2805 "file1": "abc",
2806 "file2": "def",
2807 "file3": "ghi",
2808 }),
2809 )
2810 .await;
2811
2812 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2813
2814 let buffer1 = project
2815 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2816 .await
2817 .unwrap();
2818 let events = Rc::new(RefCell::new(Vec::new()));
2819
2820 // initially, the buffer isn't dirty.
2821 buffer1.update(cx, |buffer, cx| {
2822 cx.subscribe(&buffer1, {
2823 let events = events.clone();
2824 move |_, _, event, _| match event {
2825 BufferEvent::Operation(_) => {}
2826 _ => events.borrow_mut().push(event.clone()),
2827 }
2828 })
2829 .detach();
2830
2831 assert!(!buffer.is_dirty());
2832 assert!(events.borrow().is_empty());
2833
2834 buffer.edit([(1..2, "")], None, cx);
2835 });
2836
2837 // after the first edit, the buffer is dirty, and emits a dirtied event.
2838 buffer1.update(cx, |buffer, cx| {
2839 assert!(buffer.text() == "ac");
2840 assert!(buffer.is_dirty());
2841 assert_eq!(
2842 *events.borrow(),
2843 &[language::Event::Edited, language::Event::DirtyChanged]
2844 );
2845 events.borrow_mut().clear();
2846 buffer.did_save(
2847 buffer.version(),
2848 buffer.as_rope().fingerprint(),
2849 buffer.file().unwrap().mtime(),
2850 cx,
2851 );
2852 });
2853
2854 // after saving, the buffer is not dirty, and emits a saved event.
2855 buffer1.update(cx, |buffer, cx| {
2856 assert!(!buffer.is_dirty());
2857 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2858 events.borrow_mut().clear();
2859
2860 buffer.edit([(1..1, "B")], None, cx);
2861 buffer.edit([(2..2, "D")], None, cx);
2862 });
2863
2864 // after editing again, the buffer is dirty, and emits another dirty event.
2865 buffer1.update(cx, |buffer, cx| {
2866 assert!(buffer.text() == "aBDc");
2867 assert!(buffer.is_dirty());
2868 assert_eq!(
2869 *events.borrow(),
2870 &[
2871 language::Event::Edited,
2872 language::Event::DirtyChanged,
2873 language::Event::Edited,
2874 ],
2875 );
2876 events.borrow_mut().clear();
2877
2878 // After restoring the buffer to its previously-saved state,
2879 // the buffer is not considered dirty anymore.
2880 buffer.edit([(1..3, "")], None, cx);
2881 assert!(buffer.text() == "ac");
2882 assert!(!buffer.is_dirty());
2883 });
2884
2885 assert_eq!(
2886 *events.borrow(),
2887 &[language::Event::Edited, language::Event::DirtyChanged]
2888 );
2889
2890 // When a file is deleted, the buffer is considered dirty.
2891 let events = Rc::new(RefCell::new(Vec::new()));
2892 let buffer2 = project
2893 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2894 .await
2895 .unwrap();
2896 buffer2.update(cx, |_, cx| {
2897 cx.subscribe(&buffer2, {
2898 let events = events.clone();
2899 move |_, _, event, _| events.borrow_mut().push(event.clone())
2900 })
2901 .detach();
2902 });
2903
2904 fs.remove_file("/dir/file2".as_ref(), Default::default())
2905 .await
2906 .unwrap();
2907 cx.foreground().run_until_parked();
2908 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2909 assert_eq!(
2910 *events.borrow(),
2911 &[
2912 language::Event::DirtyChanged,
2913 language::Event::FileHandleChanged
2914 ]
2915 );
2916
2917 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2918 let events = Rc::new(RefCell::new(Vec::new()));
2919 let buffer3 = project
2920 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2921 .await
2922 .unwrap();
2923 buffer3.update(cx, |_, cx| {
2924 cx.subscribe(&buffer3, {
2925 let events = events.clone();
2926 move |_, _, event, _| events.borrow_mut().push(event.clone())
2927 })
2928 .detach();
2929 });
2930
2931 buffer3.update(cx, |buffer, cx| {
2932 buffer.edit([(0..0, "x")], None, cx);
2933 });
2934 events.borrow_mut().clear();
2935 fs.remove_file("/dir/file3".as_ref(), Default::default())
2936 .await
2937 .unwrap();
2938 cx.foreground().run_until_parked();
2939 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2940 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2941}
2942
2943#[gpui::test]
2944async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2945 init_test(cx);
2946
2947 let initial_contents = "aaa\nbbbbb\nc\n";
2948 let fs = FakeFs::new(cx.background());
2949 fs.insert_tree(
2950 "/dir",
2951 json!({
2952 "the-file": initial_contents,
2953 }),
2954 )
2955 .await;
2956 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2957 let buffer = project
2958 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2959 .await
2960 .unwrap();
2961
2962 let anchors = (0..3)
2963 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2964 .collect::<Vec<_>>();
2965
2966 // Change the file on disk, adding two new lines of text, and removing
2967 // one line.
2968 buffer.read_with(cx, |buffer, _| {
2969 assert!(!buffer.is_dirty());
2970 assert!(!buffer.has_conflict());
2971 });
2972 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2973 fs.save(
2974 "/dir/the-file".as_ref(),
2975 &new_contents.into(),
2976 LineEnding::Unix,
2977 )
2978 .await
2979 .unwrap();
2980
2981 // Because the buffer was not modified, it is reloaded from disk. Its
2982 // contents are edited according to the diff between the old and new
2983 // file contents.
2984 cx.foreground().run_until_parked();
2985 buffer.update(cx, |buffer, _| {
2986 assert_eq!(buffer.text(), new_contents);
2987 assert!(!buffer.is_dirty());
2988 assert!(!buffer.has_conflict());
2989
2990 let anchor_positions = anchors
2991 .iter()
2992 .map(|anchor| anchor.to_point(&*buffer))
2993 .collect::<Vec<_>>();
2994 assert_eq!(
2995 anchor_positions,
2996 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2997 );
2998 });
2999
3000 // Modify the buffer
3001 buffer.update(cx, |buffer, cx| {
3002 buffer.edit([(0..0, " ")], None, cx);
3003 assert!(buffer.is_dirty());
3004 assert!(!buffer.has_conflict());
3005 });
3006
3007 // Change the file on disk again, adding blank lines to the beginning.
3008 fs.save(
3009 "/dir/the-file".as_ref(),
3010 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3011 LineEnding::Unix,
3012 )
3013 .await
3014 .unwrap();
3015
3016 // Because the buffer is modified, it doesn't reload from disk, but is
3017 // marked as having a conflict.
3018 cx.foreground().run_until_parked();
3019 buffer.read_with(cx, |buffer, _| {
3020 assert!(buffer.has_conflict());
3021 });
3022}
3023
3024#[gpui::test]
3025async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3026 init_test(cx);
3027
3028 let fs = FakeFs::new(cx.background());
3029 fs.insert_tree(
3030 "/dir",
3031 json!({
3032 "file1": "a\nb\nc\n",
3033 "file2": "one\r\ntwo\r\nthree\r\n",
3034 }),
3035 )
3036 .await;
3037
3038 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3039 let buffer1 = project
3040 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3041 .await
3042 .unwrap();
3043 let buffer2 = project
3044 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3045 .await
3046 .unwrap();
3047
3048 buffer1.read_with(cx, |buffer, _| {
3049 assert_eq!(buffer.text(), "a\nb\nc\n");
3050 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3051 });
3052 buffer2.read_with(cx, |buffer, _| {
3053 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3054 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3055 });
3056
3057 // Change a file's line endings on disk from unix to windows. The buffer's
3058 // state updates correctly.
3059 fs.save(
3060 "/dir/file1".as_ref(),
3061 &"aaa\nb\nc\n".into(),
3062 LineEnding::Windows,
3063 )
3064 .await
3065 .unwrap();
3066 cx.foreground().run_until_parked();
3067 buffer1.read_with(cx, |buffer, _| {
3068 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3069 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3070 });
3071
3072 // Save a file with windows line endings. The file is written correctly.
3073 buffer2.update(cx, |buffer, cx| {
3074 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3075 });
3076 project
3077 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3078 .await
3079 .unwrap();
3080 assert_eq!(
3081 fs.load("/dir/file2".as_ref()).await.unwrap(),
3082 "one\r\ntwo\r\nthree\r\nfour\r\n",
3083 );
3084}
3085
3086#[gpui::test]
3087async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3088 init_test(cx);
3089
3090 let fs = FakeFs::new(cx.background());
3091 fs.insert_tree(
3092 "/the-dir",
3093 json!({
3094 "a.rs": "
3095 fn foo(mut v: Vec<usize>) {
3096 for x in &v {
3097 v.push(1);
3098 }
3099 }
3100 "
3101 .unindent(),
3102 }),
3103 )
3104 .await;
3105
3106 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3107 let buffer = project
3108 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3109 .await
3110 .unwrap();
3111
3112 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3113 let message = lsp::PublishDiagnosticsParams {
3114 uri: buffer_uri.clone(),
3115 diagnostics: vec![
3116 lsp::Diagnostic {
3117 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3118 severity: Some(DiagnosticSeverity::WARNING),
3119 message: "error 1".to_string(),
3120 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3121 location: lsp::Location {
3122 uri: buffer_uri.clone(),
3123 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3124 },
3125 message: "error 1 hint 1".to_string(),
3126 }]),
3127 ..Default::default()
3128 },
3129 lsp::Diagnostic {
3130 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3131 severity: Some(DiagnosticSeverity::HINT),
3132 message: "error 1 hint 1".to_string(),
3133 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3134 location: lsp::Location {
3135 uri: buffer_uri.clone(),
3136 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3137 },
3138 message: "original diagnostic".to_string(),
3139 }]),
3140 ..Default::default()
3141 },
3142 lsp::Diagnostic {
3143 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3144 severity: Some(DiagnosticSeverity::ERROR),
3145 message: "error 2".to_string(),
3146 related_information: Some(vec![
3147 lsp::DiagnosticRelatedInformation {
3148 location: lsp::Location {
3149 uri: buffer_uri.clone(),
3150 range: lsp::Range::new(
3151 lsp::Position::new(1, 13),
3152 lsp::Position::new(1, 15),
3153 ),
3154 },
3155 message: "error 2 hint 1".to_string(),
3156 },
3157 lsp::DiagnosticRelatedInformation {
3158 location: lsp::Location {
3159 uri: buffer_uri.clone(),
3160 range: lsp::Range::new(
3161 lsp::Position::new(1, 13),
3162 lsp::Position::new(1, 15),
3163 ),
3164 },
3165 message: "error 2 hint 2".to_string(),
3166 },
3167 ]),
3168 ..Default::default()
3169 },
3170 lsp::Diagnostic {
3171 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3172 severity: Some(DiagnosticSeverity::HINT),
3173 message: "error 2 hint 1".to_string(),
3174 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3175 location: lsp::Location {
3176 uri: buffer_uri.clone(),
3177 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3178 },
3179 message: "original diagnostic".to_string(),
3180 }]),
3181 ..Default::default()
3182 },
3183 lsp::Diagnostic {
3184 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3185 severity: Some(DiagnosticSeverity::HINT),
3186 message: "error 2 hint 2".to_string(),
3187 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3188 location: lsp::Location {
3189 uri: buffer_uri,
3190 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3191 },
3192 message: "original diagnostic".to_string(),
3193 }]),
3194 ..Default::default()
3195 },
3196 ],
3197 version: None,
3198 };
3199
3200 project
3201 .update(cx, |p, cx| {
3202 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3203 })
3204 .unwrap();
3205 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3206
3207 assert_eq!(
3208 buffer
3209 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3210 .collect::<Vec<_>>(),
3211 &[
3212 DiagnosticEntry {
3213 range: Point::new(1, 8)..Point::new(1, 9),
3214 diagnostic: Diagnostic {
3215 severity: DiagnosticSeverity::WARNING,
3216 message: "error 1".to_string(),
3217 group_id: 1,
3218 is_primary: true,
3219 ..Default::default()
3220 }
3221 },
3222 DiagnosticEntry {
3223 range: Point::new(1, 8)..Point::new(1, 9),
3224 diagnostic: Diagnostic {
3225 severity: DiagnosticSeverity::HINT,
3226 message: "error 1 hint 1".to_string(),
3227 group_id: 1,
3228 is_primary: false,
3229 ..Default::default()
3230 }
3231 },
3232 DiagnosticEntry {
3233 range: Point::new(1, 13)..Point::new(1, 15),
3234 diagnostic: Diagnostic {
3235 severity: DiagnosticSeverity::HINT,
3236 message: "error 2 hint 1".to_string(),
3237 group_id: 0,
3238 is_primary: false,
3239 ..Default::default()
3240 }
3241 },
3242 DiagnosticEntry {
3243 range: Point::new(1, 13)..Point::new(1, 15),
3244 diagnostic: Diagnostic {
3245 severity: DiagnosticSeverity::HINT,
3246 message: "error 2 hint 2".to_string(),
3247 group_id: 0,
3248 is_primary: false,
3249 ..Default::default()
3250 }
3251 },
3252 DiagnosticEntry {
3253 range: Point::new(2, 8)..Point::new(2, 17),
3254 diagnostic: Diagnostic {
3255 severity: DiagnosticSeverity::ERROR,
3256 message: "error 2".to_string(),
3257 group_id: 0,
3258 is_primary: true,
3259 ..Default::default()
3260 }
3261 }
3262 ]
3263 );
3264
3265 assert_eq!(
3266 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3267 &[
3268 DiagnosticEntry {
3269 range: Point::new(1, 13)..Point::new(1, 15),
3270 diagnostic: Diagnostic {
3271 severity: DiagnosticSeverity::HINT,
3272 message: "error 2 hint 1".to_string(),
3273 group_id: 0,
3274 is_primary: false,
3275 ..Default::default()
3276 }
3277 },
3278 DiagnosticEntry {
3279 range: Point::new(1, 13)..Point::new(1, 15),
3280 diagnostic: Diagnostic {
3281 severity: DiagnosticSeverity::HINT,
3282 message: "error 2 hint 2".to_string(),
3283 group_id: 0,
3284 is_primary: false,
3285 ..Default::default()
3286 }
3287 },
3288 DiagnosticEntry {
3289 range: Point::new(2, 8)..Point::new(2, 17),
3290 diagnostic: Diagnostic {
3291 severity: DiagnosticSeverity::ERROR,
3292 message: "error 2".to_string(),
3293 group_id: 0,
3294 is_primary: true,
3295 ..Default::default()
3296 }
3297 }
3298 ]
3299 );
3300
3301 assert_eq!(
3302 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3303 &[
3304 DiagnosticEntry {
3305 range: Point::new(1, 8)..Point::new(1, 9),
3306 diagnostic: Diagnostic {
3307 severity: DiagnosticSeverity::WARNING,
3308 message: "error 1".to_string(),
3309 group_id: 1,
3310 is_primary: true,
3311 ..Default::default()
3312 }
3313 },
3314 DiagnosticEntry {
3315 range: Point::new(1, 8)..Point::new(1, 9),
3316 diagnostic: Diagnostic {
3317 severity: DiagnosticSeverity::HINT,
3318 message: "error 1 hint 1".to_string(),
3319 group_id: 1,
3320 is_primary: false,
3321 ..Default::default()
3322 }
3323 },
3324 ]
3325 );
3326}
3327
3328#[gpui::test]
3329async fn test_rename(cx: &mut gpui::TestAppContext) {
3330 init_test(cx);
3331
3332 let mut language = Language::new(
3333 LanguageConfig {
3334 name: "Rust".into(),
3335 path_suffixes: vec!["rs".to_string()],
3336 ..Default::default()
3337 },
3338 Some(tree_sitter_rust::language()),
3339 );
3340 let mut fake_servers = language
3341 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3342 capabilities: lsp::ServerCapabilities {
3343 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3344 prepare_provider: Some(true),
3345 work_done_progress_options: Default::default(),
3346 })),
3347 ..Default::default()
3348 },
3349 ..Default::default()
3350 }))
3351 .await;
3352
3353 let fs = FakeFs::new(cx.background());
3354 fs.insert_tree(
3355 "/dir",
3356 json!({
3357 "one.rs": "const ONE: usize = 1;",
3358 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3359 }),
3360 )
3361 .await;
3362
3363 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3364 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3365 let buffer = project
3366 .update(cx, |project, cx| {
3367 project.open_local_buffer("/dir/one.rs", cx)
3368 })
3369 .await
3370 .unwrap();
3371
3372 let fake_server = fake_servers.next().await.unwrap();
3373
3374 let response = project.update(cx, |project, cx| {
3375 project.prepare_rename(buffer.clone(), 7, cx)
3376 });
3377 fake_server
3378 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3379 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3380 assert_eq!(params.position, lsp::Position::new(0, 7));
3381 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3382 lsp::Position::new(0, 6),
3383 lsp::Position::new(0, 9),
3384 ))))
3385 })
3386 .next()
3387 .await
3388 .unwrap();
3389 let range = response.await.unwrap().unwrap();
3390 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3391 assert_eq!(range, 6..9);
3392
3393 let response = project.update(cx, |project, cx| {
3394 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3395 });
3396 fake_server
3397 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3398 assert_eq!(
3399 params.text_document_position.text_document.uri.as_str(),
3400 "file:///dir/one.rs"
3401 );
3402 assert_eq!(
3403 params.text_document_position.position,
3404 lsp::Position::new(0, 7)
3405 );
3406 assert_eq!(params.new_name, "THREE");
3407 Ok(Some(lsp::WorkspaceEdit {
3408 changes: Some(
3409 [
3410 (
3411 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3412 vec![lsp::TextEdit::new(
3413 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3414 "THREE".to_string(),
3415 )],
3416 ),
3417 (
3418 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3419 vec![
3420 lsp::TextEdit::new(
3421 lsp::Range::new(
3422 lsp::Position::new(0, 24),
3423 lsp::Position::new(0, 27),
3424 ),
3425 "THREE".to_string(),
3426 ),
3427 lsp::TextEdit::new(
3428 lsp::Range::new(
3429 lsp::Position::new(0, 35),
3430 lsp::Position::new(0, 38),
3431 ),
3432 "THREE".to_string(),
3433 ),
3434 ],
3435 ),
3436 ]
3437 .into_iter()
3438 .collect(),
3439 ),
3440 ..Default::default()
3441 }))
3442 })
3443 .next()
3444 .await
3445 .unwrap();
3446 let mut transaction = response.await.unwrap().0;
3447 assert_eq!(transaction.len(), 2);
3448 assert_eq!(
3449 transaction
3450 .remove_entry(&buffer)
3451 .unwrap()
3452 .0
3453 .read_with(cx, |buffer, _| buffer.text()),
3454 "const THREE: usize = 1;"
3455 );
3456 assert_eq!(
3457 transaction
3458 .into_keys()
3459 .next()
3460 .unwrap()
3461 .read_with(cx, |buffer, _| buffer.text()),
3462 "const TWO: usize = one::THREE + one::THREE;"
3463 );
3464}
3465
3466#[gpui::test]
3467async fn test_search(cx: &mut gpui::TestAppContext) {
3468 init_test(cx);
3469
3470 let fs = FakeFs::new(cx.background());
3471 fs.insert_tree(
3472 "/dir",
3473 json!({
3474 "one.rs": "const ONE: usize = 1;",
3475 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3476 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3477 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3478 }),
3479 )
3480 .await;
3481 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3482 assert_eq!(
3483 search(
3484 &project,
3485 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3486 cx
3487 )
3488 .await
3489 .unwrap(),
3490 HashMap::from_iter([
3491 ("two.rs".to_string(), vec![6..9]),
3492 ("three.rs".to_string(), vec![37..40])
3493 ])
3494 );
3495
3496 let buffer_4 = project
3497 .update(cx, |project, cx| {
3498 project.open_local_buffer("/dir/four.rs", cx)
3499 })
3500 .await
3501 .unwrap();
3502 buffer_4.update(cx, |buffer, cx| {
3503 let text = "two::TWO";
3504 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3505 });
3506
3507 assert_eq!(
3508 search(
3509 &project,
3510 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3511 cx
3512 )
3513 .await
3514 .unwrap(),
3515 HashMap::from_iter([
3516 ("two.rs".to_string(), vec![6..9]),
3517 ("three.rs".to_string(), vec![37..40]),
3518 ("four.rs".to_string(), vec![25..28, 36..39])
3519 ])
3520 );
3521}
3522
3523#[gpui::test]
3524async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3525 init_test(cx);
3526
3527 let search_query = "file";
3528
3529 let fs = FakeFs::new(cx.background());
3530 fs.insert_tree(
3531 "/dir",
3532 json!({
3533 "one.rs": r#"// Rust file one"#,
3534 "one.ts": r#"// TypeScript file one"#,
3535 "two.rs": r#"// Rust file two"#,
3536 "two.ts": r#"// TypeScript file two"#,
3537 }),
3538 )
3539 .await;
3540 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3541
3542 assert!(
3543 search(
3544 &project,
3545 SearchQuery::text(
3546 search_query,
3547 false,
3548 true,
3549 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3550 Vec::new()
3551 ),
3552 cx
3553 )
3554 .await
3555 .unwrap()
3556 .is_empty(),
3557 "If no inclusions match, no files should be returned"
3558 );
3559
3560 assert_eq!(
3561 search(
3562 &project,
3563 SearchQuery::text(
3564 search_query,
3565 false,
3566 true,
3567 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3568 Vec::new()
3569 ),
3570 cx
3571 )
3572 .await
3573 .unwrap(),
3574 HashMap::from_iter([
3575 ("one.rs".to_string(), vec![8..12]),
3576 ("two.rs".to_string(), vec![8..12]),
3577 ]),
3578 "Rust only search should give only Rust files"
3579 );
3580
3581 assert_eq!(
3582 search(
3583 &project,
3584 SearchQuery::text(
3585 search_query,
3586 false,
3587 true,
3588 vec![
3589 Glob::new("*.ts").unwrap().compile_matcher(),
3590 Glob::new("*.odd").unwrap().compile_matcher(),
3591 ],
3592 Vec::new()
3593 ),
3594 cx
3595 )
3596 .await
3597 .unwrap(),
3598 HashMap::from_iter([
3599 ("one.ts".to_string(), vec![14..18]),
3600 ("two.ts".to_string(), vec![14..18]),
3601 ]),
3602 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3603 );
3604
3605 assert_eq!(
3606 search(
3607 &project,
3608 SearchQuery::text(
3609 search_query,
3610 false,
3611 true,
3612 vec![
3613 Glob::new("*.rs").unwrap().compile_matcher(),
3614 Glob::new("*.ts").unwrap().compile_matcher(),
3615 Glob::new("*.odd").unwrap().compile_matcher(),
3616 ],
3617 Vec::new()
3618 ),
3619 cx
3620 )
3621 .await
3622 .unwrap(),
3623 HashMap::from_iter([
3624 ("one.rs".to_string(), vec![8..12]),
3625 ("one.ts".to_string(), vec![14..18]),
3626 ("two.rs".to_string(), vec![8..12]),
3627 ("two.ts".to_string(), vec![14..18]),
3628 ]),
3629 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3630 );
3631}
3632
3633#[gpui::test]
3634async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3635 init_test(cx);
3636
3637 let search_query = "file";
3638
3639 let fs = FakeFs::new(cx.background());
3640 fs.insert_tree(
3641 "/dir",
3642 json!({
3643 "one.rs": r#"// Rust file one"#,
3644 "one.ts": r#"// TypeScript file one"#,
3645 "two.rs": r#"// Rust file two"#,
3646 "two.ts": r#"// TypeScript file two"#,
3647 }),
3648 )
3649 .await;
3650 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3651
3652 assert_eq!(
3653 search(
3654 &project,
3655 SearchQuery::text(
3656 search_query,
3657 false,
3658 true,
3659 Vec::new(),
3660 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3661 ),
3662 cx
3663 )
3664 .await
3665 .unwrap(),
3666 HashMap::from_iter([
3667 ("one.rs".to_string(), vec![8..12]),
3668 ("one.ts".to_string(), vec![14..18]),
3669 ("two.rs".to_string(), vec![8..12]),
3670 ("two.ts".to_string(), vec![14..18]),
3671 ]),
3672 "If no exclusions match, all files should be returned"
3673 );
3674
3675 assert_eq!(
3676 search(
3677 &project,
3678 SearchQuery::text(
3679 search_query,
3680 false,
3681 true,
3682 Vec::new(),
3683 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3684 ),
3685 cx
3686 )
3687 .await
3688 .unwrap(),
3689 HashMap::from_iter([
3690 ("one.ts".to_string(), vec![14..18]),
3691 ("two.ts".to_string(), vec![14..18]),
3692 ]),
3693 "Rust exclusion search should give only TypeScript files"
3694 );
3695
3696 assert_eq!(
3697 search(
3698 &project,
3699 SearchQuery::text(
3700 search_query,
3701 false,
3702 true,
3703 Vec::new(),
3704 vec![
3705 Glob::new("*.ts").unwrap().compile_matcher(),
3706 Glob::new("*.odd").unwrap().compile_matcher(),
3707 ],
3708 ),
3709 cx
3710 )
3711 .await
3712 .unwrap(),
3713 HashMap::from_iter([
3714 ("one.rs".to_string(), vec![8..12]),
3715 ("two.rs".to_string(), vec![8..12]),
3716 ]),
3717 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3718 );
3719
3720 assert!(
3721 search(
3722 &project,
3723 SearchQuery::text(
3724 search_query,
3725 false,
3726 true,
3727 Vec::new(),
3728 vec![
3729 Glob::new("*.rs").unwrap().compile_matcher(),
3730 Glob::new("*.ts").unwrap().compile_matcher(),
3731 Glob::new("*.odd").unwrap().compile_matcher(),
3732 ],
3733 ),
3734 cx
3735 )
3736 .await
3737 .unwrap().is_empty(),
3738 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3739 );
3740}
3741
3742#[gpui::test]
3743async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3744 init_test(cx);
3745
3746 let search_query = "file";
3747
3748 let fs = FakeFs::new(cx.background());
3749 fs.insert_tree(
3750 "/dir",
3751 json!({
3752 "one.rs": r#"// Rust file one"#,
3753 "one.ts": r#"// TypeScript file one"#,
3754 "two.rs": r#"// Rust file two"#,
3755 "two.ts": r#"// TypeScript file two"#,
3756 }),
3757 )
3758 .await;
3759 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3760
3761 assert!(
3762 search(
3763 &project,
3764 SearchQuery::text(
3765 search_query,
3766 false,
3767 true,
3768 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3769 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3770 ),
3771 cx
3772 )
3773 .await
3774 .unwrap()
3775 .is_empty(),
3776 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3777 );
3778
3779 assert!(
3780 search(
3781 &project,
3782 SearchQuery::text(
3783 search_query,
3784 false,
3785 true,
3786 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3787 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3788 ),
3789 cx
3790 )
3791 .await
3792 .unwrap()
3793 .is_empty(),
3794 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3795 );
3796
3797 assert!(
3798 search(
3799 &project,
3800 SearchQuery::text(
3801 search_query,
3802 false,
3803 true,
3804 vec![
3805 Glob::new("*.ts").unwrap().compile_matcher(),
3806 Glob::new("*.odd").unwrap().compile_matcher()
3807 ],
3808 vec![
3809 Glob::new("*.ts").unwrap().compile_matcher(),
3810 Glob::new("*.odd").unwrap().compile_matcher()
3811 ],
3812 ),
3813 cx
3814 )
3815 .await
3816 .unwrap()
3817 .is_empty(),
3818 "Non-matching inclusions and exclusions should not change that."
3819 );
3820
3821 assert_eq!(
3822 search(
3823 &project,
3824 SearchQuery::text(
3825 search_query,
3826 false,
3827 true,
3828 vec![
3829 Glob::new("*.ts").unwrap().compile_matcher(),
3830 Glob::new("*.odd").unwrap().compile_matcher()
3831 ],
3832 vec![
3833 Glob::new("*.rs").unwrap().compile_matcher(),
3834 Glob::new("*.odd").unwrap().compile_matcher()
3835 ],
3836 ),
3837 cx
3838 )
3839 .await
3840 .unwrap(),
3841 HashMap::from_iter([
3842 ("one.ts".to_string(), vec![14..18]),
3843 ("two.ts".to_string(), vec![14..18]),
3844 ]),
3845 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3846 );
3847}
3848
3849async fn search(
3850 project: &ModelHandle<Project>,
3851 query: SearchQuery,
3852 cx: &mut gpui::TestAppContext,
3853) -> Result<HashMap<String, Vec<Range<usize>>>> {
3854 let results = project
3855 .update(cx, |project, cx| project.search(query, cx))
3856 .await?;
3857
3858 Ok(results
3859 .into_iter()
3860 .map(|(buffer, ranges)| {
3861 buffer.read_with(cx, |buffer, _| {
3862 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3863 let ranges = ranges
3864 .into_iter()
3865 .map(|range| range.to_offset(buffer))
3866 .collect::<Vec<_>>();
3867 (path, ranges)
3868 })
3869 })
3870 .collect())
3871}
3872
3873fn init_test(cx: &mut gpui::TestAppContext) {
3874 cx.foreground().forbid_parking();
3875
3876 cx.update(|cx| {
3877 cx.set_global(SettingsStore::test(cx));
3878 language::init(cx);
3879 Project::init_settings(cx);
3880 });
3881}