1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_project_specific_settings(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let fs = FakeFs::new(cx.background());
74 fs.insert_tree(
75 "/the-root",
76 json!({
77 ".zed": {
78 "settings.json": r#"{ "tab_size": 8 }"#
79 },
80 "a": {
81 "a.rs": "fn a() {\n A\n}"
82 },
83 "b": {
84 ".zed": {
85 "settings.json": r#"{ "tab_size": 2 }"#
86 },
87 "b.rs": "fn b() {\n B\n}"
88 }
89 }),
90 )
91 .await;
92
93 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
94 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
95
96 deterministic.run_until_parked();
97 cx.read(|cx| {
98 let tree = worktree.read(cx);
99
100 let settings_a = language_settings(
101 None,
102 Some(
103 &(File::for_entry(
104 tree.entry_for_path("a/a.rs").unwrap().clone(),
105 worktree.clone(),
106 ) as _),
107 ),
108 cx,
109 );
110 let settings_b = language_settings(
111 None,
112 Some(
113 &(File::for_entry(
114 tree.entry_for_path("b/b.rs").unwrap().clone(),
115 worktree.clone(),
116 ) as _),
117 ),
118 cx,
119 );
120
121 assert_eq!(settings_a.tab_size.get(), 8);
122 assert_eq!(settings_b.tab_size.get(), 2);
123 });
124}
125
126#[gpui::test]
127async fn test_managing_language_servers(
128 deterministic: Arc<Deterministic>,
129 cx: &mut gpui::TestAppContext,
130) {
131 init_test(cx);
132
133 let mut rust_language = Language::new(
134 LanguageConfig {
135 name: "Rust".into(),
136 path_suffixes: vec!["rs".to_string()],
137 ..Default::default()
138 },
139 Some(tree_sitter_rust::language()),
140 );
141 let mut json_language = Language::new(
142 LanguageConfig {
143 name: "JSON".into(),
144 path_suffixes: vec!["json".to_string()],
145 ..Default::default()
146 },
147 None,
148 );
149 let mut fake_rust_servers = rust_language
150 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
151 name: "the-rust-language-server",
152 capabilities: lsp::ServerCapabilities {
153 completion_provider: Some(lsp::CompletionOptions {
154 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
155 ..Default::default()
156 }),
157 ..Default::default()
158 },
159 ..Default::default()
160 }))
161 .await;
162 let mut fake_json_servers = json_language
163 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
164 name: "the-json-language-server",
165 capabilities: lsp::ServerCapabilities {
166 completion_provider: Some(lsp::CompletionOptions {
167 trigger_characters: Some(vec![":".to_string()]),
168 ..Default::default()
169 }),
170 ..Default::default()
171 },
172 ..Default::default()
173 }))
174 .await;
175
176 let fs = FakeFs::new(cx.background());
177 fs.insert_tree(
178 "/the-root",
179 json!({
180 "test.rs": "const A: i32 = 1;",
181 "test2.rs": "",
182 "Cargo.toml": "a = 1",
183 "package.json": "{\"a\": 1}",
184 }),
185 )
186 .await;
187
188 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
189
190 // Open a buffer without an associated language server.
191 let toml_buffer = project
192 .update(cx, |project, cx| {
193 project.open_local_buffer("/the-root/Cargo.toml", cx)
194 })
195 .await
196 .unwrap();
197
198 // Open a buffer with an associated language server before the language for it has been loaded.
199 let rust_buffer = project
200 .update(cx, |project, cx| {
201 project.open_local_buffer("/the-root/test.rs", cx)
202 })
203 .await
204 .unwrap();
205 rust_buffer.read_with(cx, |buffer, _| {
206 assert_eq!(buffer.language().map(|l| l.name()), None);
207 });
208
209 // Now we add the languages to the project, and ensure they get assigned to all
210 // the relevant open buffers.
211 project.update(cx, |project, _| {
212 project.languages.add(Arc::new(json_language));
213 project.languages.add(Arc::new(rust_language));
214 });
215 deterministic.run_until_parked();
216 rust_buffer.read_with(cx, |buffer, _| {
217 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
218 });
219
220 // A server is started up, and it is notified about Rust files.
221 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
222 assert_eq!(
223 fake_rust_server
224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
225 .await
226 .text_document,
227 lsp::TextDocumentItem {
228 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
229 version: 0,
230 text: "const A: i32 = 1;".to_string(),
231 language_id: Default::default()
232 }
233 );
234
235 // The buffer is configured based on the language server's capabilities.
236 rust_buffer.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242 toml_buffer.read_with(cx, |buffer, _| {
243 assert!(buffer.completion_triggers().is_empty());
244 });
245
246 // Edit a buffer. The changes are reported to the language server.
247 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
255 1
256 )
257 );
258
259 // Open a third buffer with a different associated language server.
260 let json_buffer = project
261 .update(cx, |project, cx| {
262 project.open_local_buffer("/the-root/package.json", cx)
263 })
264 .await
265 .unwrap();
266
267 // A json language server is started up and is only notified about the json buffer.
268 let mut fake_json_server = fake_json_servers.next().await.unwrap();
269 assert_eq!(
270 fake_json_server
271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
272 .await
273 .text_document,
274 lsp::TextDocumentItem {
275 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
276 version: 0,
277 text: "{\"a\": 1}".to_string(),
278 language_id: Default::default()
279 }
280 );
281
282 // This buffer is configured based on the second language server's
283 // capabilities.
284 json_buffer.read_with(cx, |buffer, _| {
285 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
286 });
287
288 // When opening another buffer whose language server is already running,
289 // it is also configured based on the existing language server's capabilities.
290 let rust_buffer2 = project
291 .update(cx, |project, cx| {
292 project.open_local_buffer("/the-root/test2.rs", cx)
293 })
294 .await
295 .unwrap();
296 rust_buffer2.read_with(cx, |buffer, _| {
297 assert_eq!(
298 buffer.completion_triggers(),
299 &[".".to_string(), "::".to_string()]
300 );
301 });
302
303 // Changes are reported only to servers matching the buffer's language.
304 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
305 rust_buffer2.update(cx, |buffer, cx| {
306 buffer.edit([(0..0, "let x = 1;")], None, cx)
307 });
308 assert_eq!(
309 fake_rust_server
310 .receive_notification::<lsp::notification::DidChangeTextDocument>()
311 .await
312 .text_document,
313 lsp::VersionedTextDocumentIdentifier::new(
314 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
315 1
316 )
317 );
318
319 // Save notifications are reported to all servers.
320 project
321 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
322 .await
323 .unwrap();
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidSaveTextDocument>()
327 .await
328 .text_document,
329 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
330 );
331 assert_eq!(
332 fake_json_server
333 .receive_notification::<lsp::notification::DidSaveTextDocument>()
334 .await
335 .text_document,
336 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
337 );
338
339 // Renames are reported only to servers matching the buffer's language.
340 fs.rename(
341 Path::new("/the-root/test2.rs"),
342 Path::new("/the-root/test3.rs"),
343 Default::default(),
344 )
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
353 );
354 assert_eq!(
355 fake_rust_server
356 .receive_notification::<lsp::notification::DidOpenTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentItem {
360 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
361 version: 0,
362 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
363 language_id: Default::default()
364 },
365 );
366
367 rust_buffer2.update(cx, |buffer, cx| {
368 buffer.update_diagnostics(
369 LanguageServerId(0),
370 DiagnosticSet::from_sorted_entries(
371 vec![DiagnosticEntry {
372 diagnostic: Default::default(),
373 range: Anchor::MIN..Anchor::MAX,
374 }],
375 &buffer.snapshot(),
376 ),
377 cx,
378 );
379 assert_eq!(
380 buffer
381 .snapshot()
382 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
383 .count(),
384 1
385 );
386 });
387
388 // When the rename changes the extension of the file, the buffer gets closed on the old
389 // language server and gets opened on the new one.
390 fs.rename(
391 Path::new("/the-root/test3.rs"),
392 Path::new("/the-root/test3.json"),
393 Default::default(),
394 )
395 .await
396 .unwrap();
397 assert_eq!(
398 fake_rust_server
399 .receive_notification::<lsp::notification::DidCloseTextDocument>()
400 .await
401 .text_document,
402 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
403 );
404 assert_eq!(
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
411 version: 0,
412 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 );
416
417 // We clear the diagnostics, since the language has changed.
418 rust_buffer2.read_with(cx, |buffer, _| {
419 assert_eq!(
420 buffer
421 .snapshot()
422 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
423 .count(),
424 0
425 );
426 });
427
428 // The renamed file's version resets after changing language server.
429 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidChangeTextDocument>()
433 .await
434 .text_document,
435 lsp::VersionedTextDocumentIdentifier::new(
436 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
437 1
438 )
439 );
440
441 // Restart language servers
442 project.update(cx, |project, cx| {
443 project.restart_language_servers_for_buffers(
444 vec![rust_buffer.clone(), json_buffer.clone()],
445 cx,
446 );
447 });
448
449 let mut rust_shutdown_requests = fake_rust_server
450 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
451 let mut json_shutdown_requests = fake_json_server
452 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
453 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
454
455 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
456 let mut fake_json_server = fake_json_servers.next().await.unwrap();
457
458 // Ensure rust document is reopened in new rust language server
459 assert_eq!(
460 fake_rust_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
466 version: 0,
467 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
468 language_id: Default::default()
469 }
470 );
471
472 // Ensure json documents are reopened in new json language server
473 assert_set_eq!(
474 [
475 fake_json_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 fake_json_server
480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
481 .await
482 .text_document,
483 ],
484 [
485 lsp::TextDocumentItem {
486 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
487 version: 0,
488 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
489 language_id: Default::default()
490 },
491 lsp::TextDocumentItem {
492 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
493 version: 0,
494 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
495 language_id: Default::default()
496 }
497 ]
498 );
499
500 // Close notifications are reported only to servers matching the buffer's language.
501 cx.update(|_| drop(json_buffer));
502 let close_message = lsp::DidCloseTextDocumentParams {
503 text_document: lsp::TextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
505 ),
506 };
507 assert_eq!(
508 fake_json_server
509 .receive_notification::<lsp::notification::DidCloseTextDocument>()
510 .await,
511 close_message,
512 );
513}
514
515#[gpui::test]
516async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let mut language = Language::new(
520 LanguageConfig {
521 name: "Rust".into(),
522 path_suffixes: vec!["rs".to_string()],
523 ..Default::default()
524 },
525 Some(tree_sitter_rust::language()),
526 );
527 let mut fake_servers = language
528 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
529 name: "the-language-server",
530 ..Default::default()
531 }))
532 .await;
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/the-root",
537 json!({
538 "a.rs": "",
539 "b.rs": "",
540 }),
541 )
542 .await;
543
544 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
545 project.update(cx, |project, _| {
546 project.languages.add(Arc::new(language));
547 });
548 cx.foreground().run_until_parked();
549
550 // Start the language server by opening a buffer with a compatible file extension.
551 let _buffer = project
552 .update(cx, |project, cx| {
553 project.open_local_buffer("/the-root/a.rs", cx)
554 })
555 .await
556 .unwrap();
557
558 // Keep track of the FS events reported to the language server.
559 let fake_server = fake_servers.next().await.unwrap();
560 let file_changes = Arc::new(Mutex::new(Vec::new()));
561 fake_server
562 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
563 registrations: vec![lsp::Registration {
564 id: Default::default(),
565 method: "workspace/didChangeWatchedFiles".to_string(),
566 register_options: serde_json::to_value(
567 lsp::DidChangeWatchedFilesRegistrationOptions {
568 watchers: vec![lsp::FileSystemWatcher {
569 glob_pattern: lsp::GlobPattern::String(
570 "/the-root/*.{rs,c}".to_string(),
571 ),
572 kind: None,
573 }],
574 },
575 )
576 .ok(),
577 }],
578 })
579 .await
580 .unwrap();
581 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
582 let file_changes = file_changes.clone();
583 move |params, _| {
584 let mut file_changes = file_changes.lock();
585 file_changes.extend(params.changes);
586 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
587 }
588 });
589
590 cx.foreground().run_until_parked();
591 assert_eq!(file_changes.lock().len(), 0);
592
593 // Perform some file system mutations, two of which match the watched patterns,
594 // and one of which does not.
595 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
596 .await
597 .unwrap();
598 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
599 .await
600 .unwrap();
601 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
602 .await
603 .unwrap();
604
605 // The language server receives events for the FS mutations that match its watch patterns.
606 cx.foreground().run_until_parked();
607 assert_eq!(
608 &*file_changes.lock(),
609 &[
610 lsp::FileEvent {
611 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
612 typ: lsp::FileChangeType::DELETED,
613 },
614 lsp::FileEvent {
615 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
616 typ: lsp::FileChangeType::CREATED,
617 },
618 ]
619 );
620}
621
622#[gpui::test]
623async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
624 init_test(cx);
625
626 let fs = FakeFs::new(cx.background());
627 fs.insert_tree(
628 "/dir",
629 json!({
630 "a.rs": "let a = 1;",
631 "b.rs": "let b = 2;"
632 }),
633 )
634 .await;
635
636 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
637
638 let buffer_a = project
639 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
640 .await
641 .unwrap();
642 let buffer_b = project
643 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
644 .await
645 .unwrap();
646
647 project.update(cx, |project, cx| {
648 project
649 .update_diagnostics(
650 LanguageServerId(0),
651 lsp::PublishDiagnosticsParams {
652 uri: Url::from_file_path("/dir/a.rs").unwrap(),
653 version: None,
654 diagnostics: vec![lsp::Diagnostic {
655 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
656 severity: Some(lsp::DiagnosticSeverity::ERROR),
657 message: "error 1".to_string(),
658 ..Default::default()
659 }],
660 },
661 &[],
662 cx,
663 )
664 .unwrap();
665 project
666 .update_diagnostics(
667 LanguageServerId(0),
668 lsp::PublishDiagnosticsParams {
669 uri: Url::from_file_path("/dir/b.rs").unwrap(),
670 version: None,
671 diagnostics: vec![lsp::Diagnostic {
672 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
673 severity: Some(lsp::DiagnosticSeverity::WARNING),
674 message: "error 2".to_string(),
675 ..Default::default()
676 }],
677 },
678 &[],
679 cx,
680 )
681 .unwrap();
682 });
683
684 buffer_a.read_with(cx, |buffer, _| {
685 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
686 assert_eq!(
687 chunks
688 .iter()
689 .map(|(s, d)| (s.as_str(), *d))
690 .collect::<Vec<_>>(),
691 &[
692 ("let ", None),
693 ("a", Some(DiagnosticSeverity::ERROR)),
694 (" = 1;", None),
695 ]
696 );
697 });
698 buffer_b.read_with(cx, |buffer, _| {
699 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
700 assert_eq!(
701 chunks
702 .iter()
703 .map(|(s, d)| (s.as_str(), *d))
704 .collect::<Vec<_>>(),
705 &[
706 ("let ", None),
707 ("b", Some(DiagnosticSeverity::WARNING)),
708 (" = 2;", None),
709 ]
710 );
711 });
712}
713
714#[gpui::test]
715async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
716 init_test(cx);
717
718 let fs = FakeFs::new(cx.background());
719 fs.insert_tree(
720 "/root",
721 json!({
722 "dir": {
723 "a.rs": "let a = 1;",
724 },
725 "other.rs": "let b = c;"
726 }),
727 )
728 .await;
729
730 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
731
732 let (worktree, _) = project
733 .update(cx, |project, cx| {
734 project.find_or_create_local_worktree("/root/other.rs", false, cx)
735 })
736 .await
737 .unwrap();
738 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
739
740 project.update(cx, |project, cx| {
741 project
742 .update_diagnostics(
743 LanguageServerId(0),
744 lsp::PublishDiagnosticsParams {
745 uri: Url::from_file_path("/root/other.rs").unwrap(),
746 version: None,
747 diagnostics: vec![lsp::Diagnostic {
748 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
749 severity: Some(lsp::DiagnosticSeverity::ERROR),
750 message: "unknown variable 'c'".to_string(),
751 ..Default::default()
752 }],
753 },
754 &[],
755 cx,
756 )
757 .unwrap();
758 });
759
760 let buffer = project
761 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
762 .await
763 .unwrap();
764 buffer.read_with(cx, |buffer, _| {
765 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
766 assert_eq!(
767 chunks
768 .iter()
769 .map(|(s, d)| (s.as_str(), *d))
770 .collect::<Vec<_>>(),
771 &[
772 ("let b = ", None),
773 ("c", Some(DiagnosticSeverity::ERROR)),
774 (";", None),
775 ]
776 );
777 });
778
779 project.read_with(cx, |project, cx| {
780 assert_eq!(project.diagnostic_summaries(cx).next(), None);
781 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
782 });
783}
784
785#[gpui::test]
786async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
787 init_test(cx);
788
789 let progress_token = "the-progress-token";
790 let mut language = Language::new(
791 LanguageConfig {
792 name: "Rust".into(),
793 path_suffixes: vec!["rs".to_string()],
794 ..Default::default()
795 },
796 Some(tree_sitter_rust::language()),
797 );
798 let mut fake_servers = language
799 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
800 disk_based_diagnostics_progress_token: Some(progress_token.into()),
801 disk_based_diagnostics_sources: vec!["disk".into()],
802 ..Default::default()
803 }))
804 .await;
805
806 let fs = FakeFs::new(cx.background());
807 fs.insert_tree(
808 "/dir",
809 json!({
810 "a.rs": "fn a() { A }",
811 "b.rs": "const y: i32 = 1",
812 }),
813 )
814 .await;
815
816 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
817 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
818 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
819
820 // Cause worktree to start the fake language server
821 let _buffer = project
822 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
823 .await
824 .unwrap();
825
826 let mut events = subscribe(&project, cx);
827
828 let fake_server = fake_servers.next().await.unwrap();
829 fake_server
830 .start_progress(format!("{}/0", progress_token))
831 .await;
832 assert_eq!(
833 events.next().await.unwrap(),
834 Event::DiskBasedDiagnosticsStarted {
835 language_server_id: LanguageServerId(0),
836 }
837 );
838
839 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
840 uri: Url::from_file_path("/dir/a.rs").unwrap(),
841 version: None,
842 diagnostics: vec![lsp::Diagnostic {
843 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
844 severity: Some(lsp::DiagnosticSeverity::ERROR),
845 message: "undefined variable 'A'".to_string(),
846 ..Default::default()
847 }],
848 });
849 assert_eq!(
850 events.next().await.unwrap(),
851 Event::DiagnosticsUpdated {
852 language_server_id: LanguageServerId(0),
853 path: (worktree_id, Path::new("a.rs")).into()
854 }
855 );
856
857 fake_server.end_progress(format!("{}/0", progress_token));
858 assert_eq!(
859 events.next().await.unwrap(),
860 Event::DiskBasedDiagnosticsFinished {
861 language_server_id: LanguageServerId(0)
862 }
863 );
864
865 let buffer = project
866 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
867 .await
868 .unwrap();
869
870 buffer.read_with(cx, |buffer, _| {
871 let snapshot = buffer.snapshot();
872 let diagnostics = snapshot
873 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
874 .collect::<Vec<_>>();
875 assert_eq!(
876 diagnostics,
877 &[DiagnosticEntry {
878 range: Point::new(0, 9)..Point::new(0, 10),
879 diagnostic: Diagnostic {
880 severity: lsp::DiagnosticSeverity::ERROR,
881 message: "undefined variable 'A'".to_string(),
882 group_id: 0,
883 is_primary: true,
884 ..Default::default()
885 }
886 }]
887 )
888 });
889
890 // Ensure publishing empty diagnostics twice only results in one update event.
891 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
892 uri: Url::from_file_path("/dir/a.rs").unwrap(),
893 version: None,
894 diagnostics: Default::default(),
895 });
896 assert_eq!(
897 events.next().await.unwrap(),
898 Event::DiagnosticsUpdated {
899 language_server_id: LanguageServerId(0),
900 path: (worktree_id, Path::new("a.rs")).into()
901 }
902 );
903
904 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
905 uri: Url::from_file_path("/dir/a.rs").unwrap(),
906 version: None,
907 diagnostics: Default::default(),
908 });
909 cx.foreground().run_until_parked();
910 assert_eq!(futures::poll!(events.next()), Poll::Pending);
911}
912
913#[gpui::test]
914async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
915 init_test(cx);
916
917 let progress_token = "the-progress-token";
918 let mut language = Language::new(
919 LanguageConfig {
920 path_suffixes: vec!["rs".to_string()],
921 ..Default::default()
922 },
923 None,
924 );
925 let mut fake_servers = language
926 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
927 disk_based_diagnostics_sources: vec!["disk".into()],
928 disk_based_diagnostics_progress_token: Some(progress_token.into()),
929 ..Default::default()
930 }))
931 .await;
932
933 let fs = FakeFs::new(cx.background());
934 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
935
936 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
937 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
938
939 let buffer = project
940 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
941 .await
942 .unwrap();
943
944 // Simulate diagnostics starting to update.
945 let fake_server = fake_servers.next().await.unwrap();
946 fake_server.start_progress(progress_token).await;
947
948 // Restart the server before the diagnostics finish updating.
949 project.update(cx, |project, cx| {
950 project.restart_language_servers_for_buffers([buffer], cx);
951 });
952 let mut events = subscribe(&project, cx);
953
954 // Simulate the newly started server sending more diagnostics.
955 let fake_server = fake_servers.next().await.unwrap();
956 fake_server.start_progress(progress_token).await;
957 assert_eq!(
958 events.next().await.unwrap(),
959 Event::DiskBasedDiagnosticsStarted {
960 language_server_id: LanguageServerId(1)
961 }
962 );
963 project.read_with(cx, |project, _| {
964 assert_eq!(
965 project
966 .language_servers_running_disk_based_diagnostics()
967 .collect::<Vec<_>>(),
968 [LanguageServerId(1)]
969 );
970 });
971
972 // All diagnostics are considered done, despite the old server's diagnostic
973 // task never completing.
974 fake_server.end_progress(progress_token);
975 assert_eq!(
976 events.next().await.unwrap(),
977 Event::DiskBasedDiagnosticsFinished {
978 language_server_id: LanguageServerId(1)
979 }
980 );
981 project.read_with(cx, |project, _| {
982 assert_eq!(
983 project
984 .language_servers_running_disk_based_diagnostics()
985 .collect::<Vec<_>>(),
986 [LanguageServerId(0); 0]
987 );
988 });
989}
990
991#[gpui::test]
992async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
993 init_test(cx);
994
995 let mut language = Language::new(
996 LanguageConfig {
997 path_suffixes: vec!["rs".to_string()],
998 ..Default::default()
999 },
1000 None,
1001 );
1002 let mut fake_servers = language
1003 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1004 ..Default::default()
1005 }))
1006 .await;
1007
1008 let fs = FakeFs::new(cx.background());
1009 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1010
1011 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1012 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1013
1014 let buffer = project
1015 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1016 .await
1017 .unwrap();
1018
1019 // Publish diagnostics
1020 let fake_server = fake_servers.next().await.unwrap();
1021 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1022 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1023 version: None,
1024 diagnostics: vec![lsp::Diagnostic {
1025 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1026 severity: Some(lsp::DiagnosticSeverity::ERROR),
1027 message: "the message".to_string(),
1028 ..Default::default()
1029 }],
1030 });
1031
1032 cx.foreground().run_until_parked();
1033 buffer.read_with(cx, |buffer, _| {
1034 assert_eq!(
1035 buffer
1036 .snapshot()
1037 .diagnostics_in_range::<_, usize>(0..1, false)
1038 .map(|entry| entry.diagnostic.message.clone())
1039 .collect::<Vec<_>>(),
1040 ["the message".to_string()]
1041 );
1042 });
1043 project.read_with(cx, |project, cx| {
1044 assert_eq!(
1045 project.diagnostic_summary(cx),
1046 DiagnosticSummary {
1047 error_count: 1,
1048 warning_count: 0,
1049 }
1050 );
1051 });
1052
1053 project.update(cx, |project, cx| {
1054 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1055 });
1056
1057 // The diagnostics are cleared.
1058 cx.foreground().run_until_parked();
1059 buffer.read_with(cx, |buffer, _| {
1060 assert_eq!(
1061 buffer
1062 .snapshot()
1063 .diagnostics_in_range::<_, usize>(0..1, false)
1064 .map(|entry| entry.diagnostic.message.clone())
1065 .collect::<Vec<_>>(),
1066 Vec::<String>::new(),
1067 );
1068 });
1069 project.read_with(cx, |project, cx| {
1070 assert_eq!(
1071 project.diagnostic_summary(cx),
1072 DiagnosticSummary {
1073 error_count: 0,
1074 warning_count: 0,
1075 }
1076 );
1077 });
1078}
1079
1080#[gpui::test]
1081async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1082 init_test(cx);
1083
1084 let mut language = Language::new(
1085 LanguageConfig {
1086 path_suffixes: vec!["rs".to_string()],
1087 ..Default::default()
1088 },
1089 None,
1090 );
1091 let mut fake_servers = language
1092 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1093 name: "the-lsp",
1094 ..Default::default()
1095 }))
1096 .await;
1097
1098 let fs = FakeFs::new(cx.background());
1099 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1100
1101 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1102 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1103
1104 let buffer = project
1105 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1106 .await
1107 .unwrap();
1108
1109 // Before restarting the server, report diagnostics with an unknown buffer version.
1110 let fake_server = fake_servers.next().await.unwrap();
1111 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1112 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1113 version: Some(10000),
1114 diagnostics: Vec::new(),
1115 });
1116 cx.foreground().run_until_parked();
1117
1118 project.update(cx, |project, cx| {
1119 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1120 });
1121 let mut fake_server = fake_servers.next().await.unwrap();
1122 let notification = fake_server
1123 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1124 .await
1125 .text_document;
1126 assert_eq!(notification.version, 0);
1127}
1128
1129#[gpui::test]
1130async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1131 init_test(cx);
1132
1133 let mut rust = Language::new(
1134 LanguageConfig {
1135 name: Arc::from("Rust"),
1136 path_suffixes: vec!["rs".to_string()],
1137 ..Default::default()
1138 },
1139 None,
1140 );
1141 let mut fake_rust_servers = rust
1142 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1143 name: "rust-lsp",
1144 ..Default::default()
1145 }))
1146 .await;
1147 let mut js = Language::new(
1148 LanguageConfig {
1149 name: Arc::from("JavaScript"),
1150 path_suffixes: vec!["js".to_string()],
1151 ..Default::default()
1152 },
1153 None,
1154 );
1155 let mut fake_js_servers = js
1156 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1157 name: "js-lsp",
1158 ..Default::default()
1159 }))
1160 .await;
1161
1162 let fs = FakeFs::new(cx.background());
1163 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1164 .await;
1165
1166 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1167 project.update(cx, |project, _| {
1168 project.languages.add(Arc::new(rust));
1169 project.languages.add(Arc::new(js));
1170 });
1171
1172 let _rs_buffer = project
1173 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1174 .await
1175 .unwrap();
1176 let _js_buffer = project
1177 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1178 .await
1179 .unwrap();
1180
1181 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1182 assert_eq!(
1183 fake_rust_server_1
1184 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1185 .await
1186 .text_document
1187 .uri
1188 .as_str(),
1189 "file:///dir/a.rs"
1190 );
1191
1192 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1193 assert_eq!(
1194 fake_js_server
1195 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1196 .await
1197 .text_document
1198 .uri
1199 .as_str(),
1200 "file:///dir/b.js"
1201 );
1202
1203 // Disable Rust language server, ensuring only that server gets stopped.
1204 cx.update(|cx| {
1205 cx.update_global(|settings: &mut SettingsStore, cx| {
1206 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1207 settings.languages.insert(
1208 Arc::from("Rust"),
1209 LanguageSettingsContent {
1210 enable_language_server: Some(false),
1211 ..Default::default()
1212 },
1213 );
1214 });
1215 })
1216 });
1217 fake_rust_server_1
1218 .receive_notification::<lsp::notification::Exit>()
1219 .await;
1220
1221 // Enable Rust and disable JavaScript language servers, ensuring that the
1222 // former gets started again and that the latter stops.
1223 cx.update(|cx| {
1224 cx.update_global(|settings: &mut SettingsStore, cx| {
1225 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1226 settings.languages.insert(
1227 Arc::from("Rust"),
1228 LanguageSettingsContent {
1229 enable_language_server: Some(true),
1230 ..Default::default()
1231 },
1232 );
1233 settings.languages.insert(
1234 Arc::from("JavaScript"),
1235 LanguageSettingsContent {
1236 enable_language_server: Some(false),
1237 ..Default::default()
1238 },
1239 );
1240 });
1241 })
1242 });
1243 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1244 assert_eq!(
1245 fake_rust_server_2
1246 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1247 .await
1248 .text_document
1249 .uri
1250 .as_str(),
1251 "file:///dir/a.rs"
1252 );
1253 fake_js_server
1254 .receive_notification::<lsp::notification::Exit>()
1255 .await;
1256}
1257
1258#[gpui::test(iterations = 3)]
1259async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1260 init_test(cx);
1261
1262 let mut language = Language::new(
1263 LanguageConfig {
1264 name: "Rust".into(),
1265 path_suffixes: vec!["rs".to_string()],
1266 ..Default::default()
1267 },
1268 Some(tree_sitter_rust::language()),
1269 );
1270 let mut fake_servers = language
1271 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1272 disk_based_diagnostics_sources: vec!["disk".into()],
1273 ..Default::default()
1274 }))
1275 .await;
1276
1277 let text = "
1278 fn a() { A }
1279 fn b() { BB }
1280 fn c() { CCC }
1281 "
1282 .unindent();
1283
1284 let fs = FakeFs::new(cx.background());
1285 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1286
1287 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1288 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1289
1290 let buffer = project
1291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1292 .await
1293 .unwrap();
1294
1295 let mut fake_server = fake_servers.next().await.unwrap();
1296 let open_notification = fake_server
1297 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1298 .await;
1299
1300 // Edit the buffer, moving the content down
1301 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1302 let change_notification_1 = fake_server
1303 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1304 .await;
1305 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1306
1307 // Report some diagnostics for the initial version of the buffer
1308 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1309 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1310 version: Some(open_notification.text_document.version),
1311 diagnostics: vec![
1312 lsp::Diagnostic {
1313 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1314 severity: Some(DiagnosticSeverity::ERROR),
1315 message: "undefined variable 'A'".to_string(),
1316 source: Some("disk".to_string()),
1317 ..Default::default()
1318 },
1319 lsp::Diagnostic {
1320 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1321 severity: Some(DiagnosticSeverity::ERROR),
1322 message: "undefined variable 'BB'".to_string(),
1323 source: Some("disk".to_string()),
1324 ..Default::default()
1325 },
1326 lsp::Diagnostic {
1327 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1328 severity: Some(DiagnosticSeverity::ERROR),
1329 source: Some("disk".to_string()),
1330 message: "undefined variable 'CCC'".to_string(),
1331 ..Default::default()
1332 },
1333 ],
1334 });
1335
1336 // The diagnostics have moved down since they were created.
1337 buffer.next_notification(cx).await;
1338 cx.foreground().run_until_parked();
1339 buffer.read_with(cx, |buffer, _| {
1340 assert_eq!(
1341 buffer
1342 .snapshot()
1343 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1344 .collect::<Vec<_>>(),
1345 &[
1346 DiagnosticEntry {
1347 range: Point::new(3, 9)..Point::new(3, 11),
1348 diagnostic: Diagnostic {
1349 source: Some("disk".into()),
1350 severity: DiagnosticSeverity::ERROR,
1351 message: "undefined variable 'BB'".to_string(),
1352 is_disk_based: true,
1353 group_id: 1,
1354 is_primary: true,
1355 ..Default::default()
1356 },
1357 },
1358 DiagnosticEntry {
1359 range: Point::new(4, 9)..Point::new(4, 12),
1360 diagnostic: Diagnostic {
1361 source: Some("disk".into()),
1362 severity: DiagnosticSeverity::ERROR,
1363 message: "undefined variable 'CCC'".to_string(),
1364 is_disk_based: true,
1365 group_id: 2,
1366 is_primary: true,
1367 ..Default::default()
1368 }
1369 }
1370 ]
1371 );
1372 assert_eq!(
1373 chunks_with_diagnostics(buffer, 0..buffer.len()),
1374 [
1375 ("\n\nfn a() { ".to_string(), None),
1376 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1377 (" }\nfn b() { ".to_string(), None),
1378 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1379 (" }\nfn c() { ".to_string(), None),
1380 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1381 (" }\n".to_string(), None),
1382 ]
1383 );
1384 assert_eq!(
1385 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1386 [
1387 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1388 (" }\nfn c() { ".to_string(), None),
1389 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1390 ]
1391 );
1392 });
1393
1394 // Ensure overlapping diagnostics are highlighted correctly.
1395 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1396 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1397 version: Some(open_notification.text_document.version),
1398 diagnostics: vec![
1399 lsp::Diagnostic {
1400 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1401 severity: Some(DiagnosticSeverity::ERROR),
1402 message: "undefined variable 'A'".to_string(),
1403 source: Some("disk".to_string()),
1404 ..Default::default()
1405 },
1406 lsp::Diagnostic {
1407 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1408 severity: Some(DiagnosticSeverity::WARNING),
1409 message: "unreachable statement".to_string(),
1410 source: Some("disk".to_string()),
1411 ..Default::default()
1412 },
1413 ],
1414 });
1415
1416 buffer.next_notification(cx).await;
1417 cx.foreground().run_until_parked();
1418 buffer.read_with(cx, |buffer, _| {
1419 assert_eq!(
1420 buffer
1421 .snapshot()
1422 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1423 .collect::<Vec<_>>(),
1424 &[
1425 DiagnosticEntry {
1426 range: Point::new(2, 9)..Point::new(2, 12),
1427 diagnostic: Diagnostic {
1428 source: Some("disk".into()),
1429 severity: DiagnosticSeverity::WARNING,
1430 message: "unreachable statement".to_string(),
1431 is_disk_based: true,
1432 group_id: 4,
1433 is_primary: true,
1434 ..Default::default()
1435 }
1436 },
1437 DiagnosticEntry {
1438 range: Point::new(2, 9)..Point::new(2, 10),
1439 diagnostic: Diagnostic {
1440 source: Some("disk".into()),
1441 severity: DiagnosticSeverity::ERROR,
1442 message: "undefined variable 'A'".to_string(),
1443 is_disk_based: true,
1444 group_id: 3,
1445 is_primary: true,
1446 ..Default::default()
1447 },
1448 }
1449 ]
1450 );
1451 assert_eq!(
1452 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1453 [
1454 ("fn a() { ".to_string(), None),
1455 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1456 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1457 ("\n".to_string(), None),
1458 ]
1459 );
1460 assert_eq!(
1461 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1462 [
1463 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1464 ("\n".to_string(), None),
1465 ]
1466 );
1467 });
1468
1469 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1470 // changes since the last save.
1471 buffer.update(cx, |buffer, cx| {
1472 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1473 buffer.edit(
1474 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1475 None,
1476 cx,
1477 );
1478 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1479 });
1480 let change_notification_2 = fake_server
1481 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1482 .await;
1483 assert!(
1484 change_notification_2.text_document.version > change_notification_1.text_document.version
1485 );
1486
1487 // Handle out-of-order diagnostics
1488 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1489 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1490 version: Some(change_notification_2.text_document.version),
1491 diagnostics: vec![
1492 lsp::Diagnostic {
1493 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1494 severity: Some(DiagnosticSeverity::ERROR),
1495 message: "undefined variable 'BB'".to_string(),
1496 source: Some("disk".to_string()),
1497 ..Default::default()
1498 },
1499 lsp::Diagnostic {
1500 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1501 severity: Some(DiagnosticSeverity::WARNING),
1502 message: "undefined variable 'A'".to_string(),
1503 source: Some("disk".to_string()),
1504 ..Default::default()
1505 },
1506 ],
1507 });
1508
1509 buffer.next_notification(cx).await;
1510 cx.foreground().run_until_parked();
1511 buffer.read_with(cx, |buffer, _| {
1512 assert_eq!(
1513 buffer
1514 .snapshot()
1515 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1516 .collect::<Vec<_>>(),
1517 &[
1518 DiagnosticEntry {
1519 range: Point::new(2, 21)..Point::new(2, 22),
1520 diagnostic: Diagnostic {
1521 source: Some("disk".into()),
1522 severity: DiagnosticSeverity::WARNING,
1523 message: "undefined variable 'A'".to_string(),
1524 is_disk_based: true,
1525 group_id: 6,
1526 is_primary: true,
1527 ..Default::default()
1528 }
1529 },
1530 DiagnosticEntry {
1531 range: Point::new(3, 9)..Point::new(3, 14),
1532 diagnostic: Diagnostic {
1533 source: Some("disk".into()),
1534 severity: DiagnosticSeverity::ERROR,
1535 message: "undefined variable 'BB'".to_string(),
1536 is_disk_based: true,
1537 group_id: 5,
1538 is_primary: true,
1539 ..Default::default()
1540 },
1541 }
1542 ]
1543 );
1544 });
1545}
1546
1547#[gpui::test]
1548async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1549 init_test(cx);
1550
1551 let text = concat!(
1552 "let one = ;\n", //
1553 "let two = \n",
1554 "let three = 3;\n",
1555 );
1556
1557 let fs = FakeFs::new(cx.background());
1558 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1559
1560 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1561 let buffer = project
1562 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1563 .await
1564 .unwrap();
1565
1566 project.update(cx, |project, cx| {
1567 project
1568 .update_buffer_diagnostics(
1569 &buffer,
1570 LanguageServerId(0),
1571 None,
1572 vec![
1573 DiagnosticEntry {
1574 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1575 diagnostic: Diagnostic {
1576 severity: DiagnosticSeverity::ERROR,
1577 message: "syntax error 1".to_string(),
1578 ..Default::default()
1579 },
1580 },
1581 DiagnosticEntry {
1582 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1583 diagnostic: Diagnostic {
1584 severity: DiagnosticSeverity::ERROR,
1585 message: "syntax error 2".to_string(),
1586 ..Default::default()
1587 },
1588 },
1589 ],
1590 cx,
1591 )
1592 .unwrap();
1593 });
1594
1595 // An empty range is extended forward to include the following character.
1596 // At the end of a line, an empty range is extended backward to include
1597 // the preceding character.
1598 buffer.read_with(cx, |buffer, _| {
1599 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1600 assert_eq!(
1601 chunks
1602 .iter()
1603 .map(|(s, d)| (s.as_str(), *d))
1604 .collect::<Vec<_>>(),
1605 &[
1606 ("let one = ", None),
1607 (";", Some(DiagnosticSeverity::ERROR)),
1608 ("\nlet two =", None),
1609 (" ", Some(DiagnosticSeverity::ERROR)),
1610 ("\nlet three = 3;\n", None)
1611 ]
1612 );
1613 });
1614}
1615
1616#[gpui::test]
1617async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1618 init_test(cx);
1619
1620 let fs = FakeFs::new(cx.background());
1621 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1622 .await;
1623
1624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1625
1626 project.update(cx, |project, cx| {
1627 project
1628 .update_diagnostic_entries(
1629 LanguageServerId(0),
1630 Path::new("/dir/a.rs").to_owned(),
1631 None,
1632 vec![DiagnosticEntry {
1633 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1634 diagnostic: Diagnostic {
1635 severity: DiagnosticSeverity::ERROR,
1636 is_primary: true,
1637 message: "syntax error a1".to_string(),
1638 ..Default::default()
1639 },
1640 }],
1641 cx,
1642 )
1643 .unwrap();
1644 project
1645 .update_diagnostic_entries(
1646 LanguageServerId(1),
1647 Path::new("/dir/a.rs").to_owned(),
1648 None,
1649 vec![DiagnosticEntry {
1650 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1651 diagnostic: Diagnostic {
1652 severity: DiagnosticSeverity::ERROR,
1653 is_primary: true,
1654 message: "syntax error b1".to_string(),
1655 ..Default::default()
1656 },
1657 }],
1658 cx,
1659 )
1660 .unwrap();
1661
1662 assert_eq!(
1663 project.diagnostic_summary(cx),
1664 DiagnosticSummary {
1665 error_count: 2,
1666 warning_count: 0,
1667 }
1668 );
1669 });
1670}
1671
1672#[gpui::test]
1673async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1674 init_test(cx);
1675
1676 let mut language = Language::new(
1677 LanguageConfig {
1678 name: "Rust".into(),
1679 path_suffixes: vec!["rs".to_string()],
1680 ..Default::default()
1681 },
1682 Some(tree_sitter_rust::language()),
1683 );
1684 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1685
1686 let text = "
1687 fn a() {
1688 f1();
1689 }
1690 fn b() {
1691 f2();
1692 }
1693 fn c() {
1694 f3();
1695 }
1696 "
1697 .unindent();
1698
1699 let fs = FakeFs::new(cx.background());
1700 fs.insert_tree(
1701 "/dir",
1702 json!({
1703 "a.rs": text.clone(),
1704 }),
1705 )
1706 .await;
1707
1708 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1709 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1710 let buffer = project
1711 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1712 .await
1713 .unwrap();
1714
1715 let mut fake_server = fake_servers.next().await.unwrap();
1716 let lsp_document_version = fake_server
1717 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1718 .await
1719 .text_document
1720 .version;
1721
1722 // Simulate editing the buffer after the language server computes some edits.
1723 buffer.update(cx, |buffer, cx| {
1724 buffer.edit(
1725 [(
1726 Point::new(0, 0)..Point::new(0, 0),
1727 "// above first function\n",
1728 )],
1729 None,
1730 cx,
1731 );
1732 buffer.edit(
1733 [(
1734 Point::new(2, 0)..Point::new(2, 0),
1735 " // inside first function\n",
1736 )],
1737 None,
1738 cx,
1739 );
1740 buffer.edit(
1741 [(
1742 Point::new(6, 4)..Point::new(6, 4),
1743 "// inside second function ",
1744 )],
1745 None,
1746 cx,
1747 );
1748
1749 assert_eq!(
1750 buffer.text(),
1751 "
1752 // above first function
1753 fn a() {
1754 // inside first function
1755 f1();
1756 }
1757 fn b() {
1758 // inside second function f2();
1759 }
1760 fn c() {
1761 f3();
1762 }
1763 "
1764 .unindent()
1765 );
1766 });
1767
1768 let edits = project
1769 .update(cx, |project, cx| {
1770 project.edits_from_lsp(
1771 &buffer,
1772 vec![
1773 // replace body of first function
1774 lsp::TextEdit {
1775 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1776 new_text: "
1777 fn a() {
1778 f10();
1779 }
1780 "
1781 .unindent(),
1782 },
1783 // edit inside second function
1784 lsp::TextEdit {
1785 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1786 new_text: "00".into(),
1787 },
1788 // edit inside third function via two distinct edits
1789 lsp::TextEdit {
1790 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1791 new_text: "4000".into(),
1792 },
1793 lsp::TextEdit {
1794 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1795 new_text: "".into(),
1796 },
1797 ],
1798 LanguageServerId(0),
1799 Some(lsp_document_version),
1800 cx,
1801 )
1802 })
1803 .await
1804 .unwrap();
1805
1806 buffer.update(cx, |buffer, cx| {
1807 for (range, new_text) in edits {
1808 buffer.edit([(range, new_text)], None, cx);
1809 }
1810 assert_eq!(
1811 buffer.text(),
1812 "
1813 // above first function
1814 fn a() {
1815 // inside first function
1816 f10();
1817 }
1818 fn b() {
1819 // inside second function f200();
1820 }
1821 fn c() {
1822 f4000();
1823 }
1824 "
1825 .unindent()
1826 );
1827 });
1828}
1829
1830#[gpui::test]
1831async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1832 init_test(cx);
1833
1834 let text = "
1835 use a::b;
1836 use a::c;
1837
1838 fn f() {
1839 b();
1840 c();
1841 }
1842 "
1843 .unindent();
1844
1845 let fs = FakeFs::new(cx.background());
1846 fs.insert_tree(
1847 "/dir",
1848 json!({
1849 "a.rs": text.clone(),
1850 }),
1851 )
1852 .await;
1853
1854 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1855 let buffer = project
1856 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1857 .await
1858 .unwrap();
1859
1860 // Simulate the language server sending us a small edit in the form of a very large diff.
1861 // Rust-analyzer does this when performing a merge-imports code action.
1862 let edits = project
1863 .update(cx, |project, cx| {
1864 project.edits_from_lsp(
1865 &buffer,
1866 [
1867 // Replace the first use statement without editing the semicolon.
1868 lsp::TextEdit {
1869 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1870 new_text: "a::{b, c}".into(),
1871 },
1872 // Reinsert the remainder of the file between the semicolon and the final
1873 // newline of the file.
1874 lsp::TextEdit {
1875 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1876 new_text: "\n\n".into(),
1877 },
1878 lsp::TextEdit {
1879 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1880 new_text: "
1881 fn f() {
1882 b();
1883 c();
1884 }"
1885 .unindent(),
1886 },
1887 // Delete everything after the first newline of the file.
1888 lsp::TextEdit {
1889 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1890 new_text: "".into(),
1891 },
1892 ],
1893 LanguageServerId(0),
1894 None,
1895 cx,
1896 )
1897 })
1898 .await
1899 .unwrap();
1900
1901 buffer.update(cx, |buffer, cx| {
1902 let edits = edits
1903 .into_iter()
1904 .map(|(range, text)| {
1905 (
1906 range.start.to_point(buffer)..range.end.to_point(buffer),
1907 text,
1908 )
1909 })
1910 .collect::<Vec<_>>();
1911
1912 assert_eq!(
1913 edits,
1914 [
1915 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1916 (Point::new(1, 0)..Point::new(2, 0), "".into())
1917 ]
1918 );
1919
1920 for (range, new_text) in edits {
1921 buffer.edit([(range, new_text)], None, cx);
1922 }
1923 assert_eq!(
1924 buffer.text(),
1925 "
1926 use a::{b, c};
1927
1928 fn f() {
1929 b();
1930 c();
1931 }
1932 "
1933 .unindent()
1934 );
1935 });
1936}
1937
1938#[gpui::test]
1939async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1940 init_test(cx);
1941
1942 let text = "
1943 use a::b;
1944 use a::c;
1945
1946 fn f() {
1947 b();
1948 c();
1949 }
1950 "
1951 .unindent();
1952
1953 let fs = FakeFs::new(cx.background());
1954 fs.insert_tree(
1955 "/dir",
1956 json!({
1957 "a.rs": text.clone(),
1958 }),
1959 )
1960 .await;
1961
1962 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1963 let buffer = project
1964 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1965 .await
1966 .unwrap();
1967
1968 // Simulate the language server sending us edits in a non-ordered fashion,
1969 // with ranges sometimes being inverted or pointing to invalid locations.
1970 let edits = project
1971 .update(cx, |project, cx| {
1972 project.edits_from_lsp(
1973 &buffer,
1974 [
1975 lsp::TextEdit {
1976 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1977 new_text: "\n\n".into(),
1978 },
1979 lsp::TextEdit {
1980 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1981 new_text: "a::{b, c}".into(),
1982 },
1983 lsp::TextEdit {
1984 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1985 new_text: "".into(),
1986 },
1987 lsp::TextEdit {
1988 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1989 new_text: "
1990 fn f() {
1991 b();
1992 c();
1993 }"
1994 .unindent(),
1995 },
1996 ],
1997 LanguageServerId(0),
1998 None,
1999 cx,
2000 )
2001 })
2002 .await
2003 .unwrap();
2004
2005 buffer.update(cx, |buffer, cx| {
2006 let edits = edits
2007 .into_iter()
2008 .map(|(range, text)| {
2009 (
2010 range.start.to_point(buffer)..range.end.to_point(buffer),
2011 text,
2012 )
2013 })
2014 .collect::<Vec<_>>();
2015
2016 assert_eq!(
2017 edits,
2018 [
2019 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2020 (Point::new(1, 0)..Point::new(2, 0), "".into())
2021 ]
2022 );
2023
2024 for (range, new_text) in edits {
2025 buffer.edit([(range, new_text)], None, cx);
2026 }
2027 assert_eq!(
2028 buffer.text(),
2029 "
2030 use a::{b, c};
2031
2032 fn f() {
2033 b();
2034 c();
2035 }
2036 "
2037 .unindent()
2038 );
2039 });
2040}
2041
2042fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2043 buffer: &Buffer,
2044 range: Range<T>,
2045) -> Vec<(String, Option<DiagnosticSeverity>)> {
2046 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2047 for chunk in buffer.snapshot().chunks(range, true) {
2048 if chunks.last().map_or(false, |prev_chunk| {
2049 prev_chunk.1 == chunk.diagnostic_severity
2050 }) {
2051 chunks.last_mut().unwrap().0.push_str(chunk.text);
2052 } else {
2053 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2054 }
2055 }
2056 chunks
2057}
2058
2059#[gpui::test(iterations = 10)]
2060async fn test_definition(cx: &mut gpui::TestAppContext) {
2061 init_test(cx);
2062
2063 let mut language = Language::new(
2064 LanguageConfig {
2065 name: "Rust".into(),
2066 path_suffixes: vec!["rs".to_string()],
2067 ..Default::default()
2068 },
2069 Some(tree_sitter_rust::language()),
2070 );
2071 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2072
2073 let fs = FakeFs::new(cx.background());
2074 fs.insert_tree(
2075 "/dir",
2076 json!({
2077 "a.rs": "const fn a() { A }",
2078 "b.rs": "const y: i32 = crate::a()",
2079 }),
2080 )
2081 .await;
2082
2083 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2084 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2085
2086 let buffer = project
2087 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2088 .await
2089 .unwrap();
2090
2091 let fake_server = fake_servers.next().await.unwrap();
2092 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2093 let params = params.text_document_position_params;
2094 assert_eq!(
2095 params.text_document.uri.to_file_path().unwrap(),
2096 Path::new("/dir/b.rs"),
2097 );
2098 assert_eq!(params.position, lsp::Position::new(0, 22));
2099
2100 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2101 lsp::Location::new(
2102 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2103 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2104 ),
2105 )))
2106 });
2107
2108 let mut definitions = project
2109 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2110 .await
2111 .unwrap();
2112
2113 // Assert no new language server started
2114 cx.foreground().run_until_parked();
2115 assert!(fake_servers.try_next().is_err());
2116
2117 assert_eq!(definitions.len(), 1);
2118 let definition = definitions.pop().unwrap();
2119 cx.update(|cx| {
2120 let target_buffer = definition.target.buffer.read(cx);
2121 assert_eq!(
2122 target_buffer
2123 .file()
2124 .unwrap()
2125 .as_local()
2126 .unwrap()
2127 .abs_path(cx),
2128 Path::new("/dir/a.rs"),
2129 );
2130 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2131 assert_eq!(
2132 list_worktrees(&project, cx),
2133 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2134 );
2135
2136 drop(definition);
2137 });
2138 cx.read(|cx| {
2139 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2140 });
2141
2142 fn list_worktrees<'a>(
2143 project: &'a ModelHandle<Project>,
2144 cx: &'a AppContext,
2145 ) -> Vec<(&'a Path, bool)> {
2146 project
2147 .read(cx)
2148 .worktrees(cx)
2149 .map(|worktree| {
2150 let worktree = worktree.read(cx);
2151 (
2152 worktree.as_local().unwrap().abs_path().as_ref(),
2153 worktree.is_visible(),
2154 )
2155 })
2156 .collect::<Vec<_>>()
2157 }
2158}
2159
2160#[gpui::test]
2161async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2162 init_test(cx);
2163
2164 let mut language = Language::new(
2165 LanguageConfig {
2166 name: "TypeScript".into(),
2167 path_suffixes: vec!["ts".to_string()],
2168 ..Default::default()
2169 },
2170 Some(tree_sitter_typescript::language_typescript()),
2171 );
2172 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2173
2174 let fs = FakeFs::new(cx.background());
2175 fs.insert_tree(
2176 "/dir",
2177 json!({
2178 "a.ts": "",
2179 }),
2180 )
2181 .await;
2182
2183 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2184 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2185 let buffer = project
2186 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2187 .await
2188 .unwrap();
2189
2190 let fake_server = fake_language_servers.next().await.unwrap();
2191
2192 let text = "let a = b.fqn";
2193 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2194 let completions = project.update(cx, |project, cx| {
2195 project.completions(&buffer, text.len(), cx)
2196 });
2197
2198 fake_server
2199 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2200 Ok(Some(lsp::CompletionResponse::Array(vec![
2201 lsp::CompletionItem {
2202 label: "fullyQualifiedName?".into(),
2203 insert_text: Some("fullyQualifiedName".into()),
2204 ..Default::default()
2205 },
2206 ])))
2207 })
2208 .next()
2209 .await;
2210 let completions = completions.await.unwrap();
2211 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2212 assert_eq!(completions.len(), 1);
2213 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2214 assert_eq!(
2215 completions[0].old_range.to_offset(&snapshot),
2216 text.len() - 3..text.len()
2217 );
2218
2219 let text = "let a = \"atoms/cmp\"";
2220 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2221 let completions = project.update(cx, |project, cx| {
2222 project.completions(&buffer, text.len() - 1, cx)
2223 });
2224
2225 fake_server
2226 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2227 Ok(Some(lsp::CompletionResponse::Array(vec![
2228 lsp::CompletionItem {
2229 label: "component".into(),
2230 ..Default::default()
2231 },
2232 ])))
2233 })
2234 .next()
2235 .await;
2236 let completions = completions.await.unwrap();
2237 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2238 assert_eq!(completions.len(), 1);
2239 assert_eq!(completions[0].new_text, "component");
2240 assert_eq!(
2241 completions[0].old_range.to_offset(&snapshot),
2242 text.len() - 4..text.len() - 1
2243 );
2244}
2245
2246#[gpui::test]
2247async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2248 init_test(cx);
2249
2250 let mut language = Language::new(
2251 LanguageConfig {
2252 name: "TypeScript".into(),
2253 path_suffixes: vec!["ts".to_string()],
2254 ..Default::default()
2255 },
2256 Some(tree_sitter_typescript::language_typescript()),
2257 );
2258 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2259
2260 let fs = FakeFs::new(cx.background());
2261 fs.insert_tree(
2262 "/dir",
2263 json!({
2264 "a.ts": "",
2265 }),
2266 )
2267 .await;
2268
2269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2270 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2271 let buffer = project
2272 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2273 .await
2274 .unwrap();
2275
2276 let fake_server = fake_language_servers.next().await.unwrap();
2277
2278 let text = "let a = b.fqn";
2279 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2280 let completions = project.update(cx, |project, cx| {
2281 project.completions(&buffer, text.len(), cx)
2282 });
2283
2284 fake_server
2285 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2286 Ok(Some(lsp::CompletionResponse::Array(vec![
2287 lsp::CompletionItem {
2288 label: "fullyQualifiedName?".into(),
2289 insert_text: Some("fully\rQualified\r\nName".into()),
2290 ..Default::default()
2291 },
2292 ])))
2293 })
2294 .next()
2295 .await;
2296 let completions = completions.await.unwrap();
2297 assert_eq!(completions.len(), 1);
2298 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2299}
2300
2301#[gpui::test(iterations = 10)]
2302async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2303 init_test(cx);
2304
2305 let mut language = Language::new(
2306 LanguageConfig {
2307 name: "TypeScript".into(),
2308 path_suffixes: vec!["ts".to_string()],
2309 ..Default::default()
2310 },
2311 None,
2312 );
2313 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2314
2315 let fs = FakeFs::new(cx.background());
2316 fs.insert_tree(
2317 "/dir",
2318 json!({
2319 "a.ts": "a",
2320 }),
2321 )
2322 .await;
2323
2324 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2325 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2326 let buffer = project
2327 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2328 .await
2329 .unwrap();
2330
2331 let fake_server = fake_language_servers.next().await.unwrap();
2332
2333 // Language server returns code actions that contain commands, and not edits.
2334 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2335 fake_server
2336 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2337 Ok(Some(vec![
2338 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2339 title: "The code action".into(),
2340 command: Some(lsp::Command {
2341 title: "The command".into(),
2342 command: "_the/command".into(),
2343 arguments: Some(vec![json!("the-argument")]),
2344 }),
2345 ..Default::default()
2346 }),
2347 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2348 title: "two".into(),
2349 ..Default::default()
2350 }),
2351 ]))
2352 })
2353 .next()
2354 .await;
2355
2356 let action = actions.await.unwrap()[0].clone();
2357 let apply = project.update(cx, |project, cx| {
2358 project.apply_code_action(buffer.clone(), action, true, cx)
2359 });
2360
2361 // Resolving the code action does not populate its edits. In absence of
2362 // edits, we must execute the given command.
2363 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2364 |action, _| async move { Ok(action) },
2365 );
2366
2367 // While executing the command, the language server sends the editor
2368 // a `workspaceEdit` request.
2369 fake_server
2370 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2371 let fake = fake_server.clone();
2372 move |params, _| {
2373 assert_eq!(params.command, "_the/command");
2374 let fake = fake.clone();
2375 async move {
2376 fake.server
2377 .request::<lsp::request::ApplyWorkspaceEdit>(
2378 lsp::ApplyWorkspaceEditParams {
2379 label: None,
2380 edit: lsp::WorkspaceEdit {
2381 changes: Some(
2382 [(
2383 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2384 vec![lsp::TextEdit {
2385 range: lsp::Range::new(
2386 lsp::Position::new(0, 0),
2387 lsp::Position::new(0, 0),
2388 ),
2389 new_text: "X".into(),
2390 }],
2391 )]
2392 .into_iter()
2393 .collect(),
2394 ),
2395 ..Default::default()
2396 },
2397 },
2398 )
2399 .await
2400 .unwrap();
2401 Ok(Some(json!(null)))
2402 }
2403 }
2404 })
2405 .next()
2406 .await;
2407
2408 // Applying the code action returns a project transaction containing the edits
2409 // sent by the language server in its `workspaceEdit` request.
2410 let transaction = apply.await.unwrap();
2411 assert!(transaction.0.contains_key(&buffer));
2412 buffer.update(cx, |buffer, cx| {
2413 assert_eq!(buffer.text(), "Xa");
2414 buffer.undo(cx);
2415 assert_eq!(buffer.text(), "a");
2416 });
2417}
2418
2419#[gpui::test(iterations = 10)]
2420async fn test_save_file(cx: &mut gpui::TestAppContext) {
2421 init_test(cx);
2422
2423 let fs = FakeFs::new(cx.background());
2424 fs.insert_tree(
2425 "/dir",
2426 json!({
2427 "file1": "the old contents",
2428 }),
2429 )
2430 .await;
2431
2432 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2433 let buffer = project
2434 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2435 .await
2436 .unwrap();
2437 buffer.update(cx, |buffer, cx| {
2438 assert_eq!(buffer.text(), "the old contents");
2439 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2440 });
2441
2442 project
2443 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2444 .await
2445 .unwrap();
2446
2447 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2448 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2449}
2450
2451#[gpui::test]
2452async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2453 init_test(cx);
2454
2455 let fs = FakeFs::new(cx.background());
2456 fs.insert_tree(
2457 "/dir",
2458 json!({
2459 "file1": "the old contents",
2460 }),
2461 )
2462 .await;
2463
2464 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2465 let buffer = project
2466 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2467 .await
2468 .unwrap();
2469 buffer.update(cx, |buffer, cx| {
2470 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2471 });
2472
2473 project
2474 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2475 .await
2476 .unwrap();
2477
2478 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2479 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2480}
2481
2482#[gpui::test]
2483async fn test_save_as(cx: &mut gpui::TestAppContext) {
2484 init_test(cx);
2485
2486 let fs = FakeFs::new(cx.background());
2487 fs.insert_tree("/dir", json!({})).await;
2488
2489 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2490
2491 let languages = project.read_with(cx, |project, _| project.languages().clone());
2492 languages.register(
2493 "/some/path",
2494 LanguageConfig {
2495 name: "Rust".into(),
2496 path_suffixes: vec!["rs".into()],
2497 ..Default::default()
2498 },
2499 tree_sitter_rust::language(),
2500 vec![],
2501 |_| Default::default(),
2502 );
2503
2504 let buffer = project.update(cx, |project, cx| {
2505 project.create_buffer("", None, cx).unwrap()
2506 });
2507 buffer.update(cx, |buffer, cx| {
2508 buffer.edit([(0..0, "abc")], None, cx);
2509 assert!(buffer.is_dirty());
2510 assert!(!buffer.has_conflict());
2511 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2512 });
2513 project
2514 .update(cx, |project, cx| {
2515 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2516 })
2517 .await
2518 .unwrap();
2519 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2520
2521 cx.foreground().run_until_parked();
2522 buffer.read_with(cx, |buffer, cx| {
2523 assert_eq!(
2524 buffer.file().unwrap().full_path(cx),
2525 Path::new("dir/file1.rs")
2526 );
2527 assert!(!buffer.is_dirty());
2528 assert!(!buffer.has_conflict());
2529 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2530 });
2531
2532 let opened_buffer = project
2533 .update(cx, |project, cx| {
2534 project.open_local_buffer("/dir/file1.rs", cx)
2535 })
2536 .await
2537 .unwrap();
2538 assert_eq!(opened_buffer, buffer);
2539}
2540
2541#[gpui::test(retries = 5)]
2542async fn test_rescan_and_remote_updates(
2543 deterministic: Arc<Deterministic>,
2544 cx: &mut gpui::TestAppContext,
2545) {
2546 init_test(cx);
2547 cx.foreground().allow_parking();
2548
2549 let dir = temp_tree(json!({
2550 "a": {
2551 "file1": "",
2552 "file2": "",
2553 "file3": "",
2554 },
2555 "b": {
2556 "c": {
2557 "file4": "",
2558 "file5": "",
2559 }
2560 }
2561 }));
2562
2563 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2564 let rpc = project.read_with(cx, |p, _| p.client.clone());
2565
2566 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2567 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2568 async move { buffer.await.unwrap() }
2569 };
2570 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2571 project.read_with(cx, |project, cx| {
2572 let tree = project.worktrees(cx).next().unwrap();
2573 tree.read(cx)
2574 .entry_for_path(path)
2575 .unwrap_or_else(|| panic!("no entry for path {}", path))
2576 .id
2577 })
2578 };
2579
2580 let buffer2 = buffer_for_path("a/file2", cx).await;
2581 let buffer3 = buffer_for_path("a/file3", cx).await;
2582 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2583 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2584
2585 let file2_id = id_for_path("a/file2", cx);
2586 let file3_id = id_for_path("a/file3", cx);
2587 let file4_id = id_for_path("b/c/file4", cx);
2588
2589 // Create a remote copy of this worktree.
2590 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2591
2592 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2593
2594 let updates = Arc::new(Mutex::new(Vec::new()));
2595 tree.update(cx, |tree, cx| {
2596 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2597 let updates = updates.clone();
2598 move |update| {
2599 updates.lock().push(update);
2600 async { true }
2601 }
2602 });
2603 });
2604
2605 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2606 deterministic.run_until_parked();
2607
2608 cx.read(|cx| {
2609 assert!(!buffer2.read(cx).is_dirty());
2610 assert!(!buffer3.read(cx).is_dirty());
2611 assert!(!buffer4.read(cx).is_dirty());
2612 assert!(!buffer5.read(cx).is_dirty());
2613 });
2614
2615 // Rename and delete files and directories.
2616 tree.flush_fs_events(cx).await;
2617 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2618 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2619 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2620 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2621 tree.flush_fs_events(cx).await;
2622
2623 let expected_paths = vec![
2624 "a",
2625 "a/file1",
2626 "a/file2.new",
2627 "b",
2628 "d",
2629 "d/file3",
2630 "d/file4",
2631 ];
2632
2633 cx.read(|app| {
2634 assert_eq!(
2635 tree.read(app)
2636 .paths()
2637 .map(|p| p.to_str().unwrap())
2638 .collect::<Vec<_>>(),
2639 expected_paths
2640 );
2641
2642 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2643 assert_eq!(id_for_path("d/file3", cx), file3_id);
2644 assert_eq!(id_for_path("d/file4", cx), file4_id);
2645
2646 assert_eq!(
2647 buffer2.read(app).file().unwrap().path().as_ref(),
2648 Path::new("a/file2.new")
2649 );
2650 assert_eq!(
2651 buffer3.read(app).file().unwrap().path().as_ref(),
2652 Path::new("d/file3")
2653 );
2654 assert_eq!(
2655 buffer4.read(app).file().unwrap().path().as_ref(),
2656 Path::new("d/file4")
2657 );
2658 assert_eq!(
2659 buffer5.read(app).file().unwrap().path().as_ref(),
2660 Path::new("b/c/file5")
2661 );
2662
2663 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2664 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2665 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2666 assert!(buffer5.read(app).file().unwrap().is_deleted());
2667 });
2668
2669 // Update the remote worktree. Check that it becomes consistent with the
2670 // local worktree.
2671 deterministic.run_until_parked();
2672 remote.update(cx, |remote, _| {
2673 for update in updates.lock().drain(..) {
2674 remote.as_remote_mut().unwrap().update_from_remote(update);
2675 }
2676 });
2677 deterministic.run_until_parked();
2678 remote.read_with(cx, |remote, _| {
2679 assert_eq!(
2680 remote
2681 .paths()
2682 .map(|p| p.to_str().unwrap())
2683 .collect::<Vec<_>>(),
2684 expected_paths
2685 );
2686 });
2687}
2688
2689#[gpui::test(iterations = 10)]
2690async fn test_buffer_identity_across_renames(
2691 deterministic: Arc<Deterministic>,
2692 cx: &mut gpui::TestAppContext,
2693) {
2694 init_test(cx);
2695
2696 let fs = FakeFs::new(cx.background());
2697 fs.insert_tree(
2698 "/dir",
2699 json!({
2700 "a": {
2701 "file1": "",
2702 }
2703 }),
2704 )
2705 .await;
2706
2707 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2708 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2709 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2710
2711 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2712 project.read_with(cx, |project, cx| {
2713 let tree = project.worktrees(cx).next().unwrap();
2714 tree.read(cx)
2715 .entry_for_path(path)
2716 .unwrap_or_else(|| panic!("no entry for path {}", path))
2717 .id
2718 })
2719 };
2720
2721 let dir_id = id_for_path("a", cx);
2722 let file_id = id_for_path("a/file1", cx);
2723 let buffer = project
2724 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2725 .await
2726 .unwrap();
2727 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2728
2729 project
2730 .update(cx, |project, cx| {
2731 project.rename_entry(dir_id, Path::new("b"), cx)
2732 })
2733 .unwrap()
2734 .await
2735 .unwrap();
2736 deterministic.run_until_parked();
2737 assert_eq!(id_for_path("b", cx), dir_id);
2738 assert_eq!(id_for_path("b/file1", cx), file_id);
2739 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2740}
2741
2742#[gpui::test]
2743async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2744 init_test(cx);
2745
2746 let fs = FakeFs::new(cx.background());
2747 fs.insert_tree(
2748 "/dir",
2749 json!({
2750 "a.txt": "a-contents",
2751 "b.txt": "b-contents",
2752 }),
2753 )
2754 .await;
2755
2756 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2757
2758 // Spawn multiple tasks to open paths, repeating some paths.
2759 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2760 (
2761 p.open_local_buffer("/dir/a.txt", cx),
2762 p.open_local_buffer("/dir/b.txt", cx),
2763 p.open_local_buffer("/dir/a.txt", cx),
2764 )
2765 });
2766
2767 let buffer_a_1 = buffer_a_1.await.unwrap();
2768 let buffer_a_2 = buffer_a_2.await.unwrap();
2769 let buffer_b = buffer_b.await.unwrap();
2770 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2771 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2772
2773 // There is only one buffer per path.
2774 let buffer_a_id = buffer_a_1.id();
2775 assert_eq!(buffer_a_2.id(), buffer_a_id);
2776
2777 // Open the same path again while it is still open.
2778 drop(buffer_a_1);
2779 let buffer_a_3 = project
2780 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2781 .await
2782 .unwrap();
2783
2784 // There's still only one buffer per path.
2785 assert_eq!(buffer_a_3.id(), buffer_a_id);
2786}
2787
2788#[gpui::test]
2789async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2790 init_test(cx);
2791
2792 let fs = FakeFs::new(cx.background());
2793 fs.insert_tree(
2794 "/dir",
2795 json!({
2796 "file1": "abc",
2797 "file2": "def",
2798 "file3": "ghi",
2799 }),
2800 )
2801 .await;
2802
2803 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2804
2805 let buffer1 = project
2806 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2807 .await
2808 .unwrap();
2809 let events = Rc::new(RefCell::new(Vec::new()));
2810
2811 // initially, the buffer isn't dirty.
2812 buffer1.update(cx, |buffer, cx| {
2813 cx.subscribe(&buffer1, {
2814 let events = events.clone();
2815 move |_, _, event, _| match event {
2816 BufferEvent::Operation(_) => {}
2817 _ => events.borrow_mut().push(event.clone()),
2818 }
2819 })
2820 .detach();
2821
2822 assert!(!buffer.is_dirty());
2823 assert!(events.borrow().is_empty());
2824
2825 buffer.edit([(1..2, "")], None, cx);
2826 });
2827
2828 // after the first edit, the buffer is dirty, and emits a dirtied event.
2829 buffer1.update(cx, |buffer, cx| {
2830 assert!(buffer.text() == "ac");
2831 assert!(buffer.is_dirty());
2832 assert_eq!(
2833 *events.borrow(),
2834 &[language::Event::Edited, language::Event::DirtyChanged]
2835 );
2836 events.borrow_mut().clear();
2837 buffer.did_save(
2838 buffer.version(),
2839 buffer.as_rope().fingerprint(),
2840 buffer.file().unwrap().mtime(),
2841 cx,
2842 );
2843 });
2844
2845 // after saving, the buffer is not dirty, and emits a saved event.
2846 buffer1.update(cx, |buffer, cx| {
2847 assert!(!buffer.is_dirty());
2848 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2849 events.borrow_mut().clear();
2850
2851 buffer.edit([(1..1, "B")], None, cx);
2852 buffer.edit([(2..2, "D")], None, cx);
2853 });
2854
2855 // after editing again, the buffer is dirty, and emits another dirty event.
2856 buffer1.update(cx, |buffer, cx| {
2857 assert!(buffer.text() == "aBDc");
2858 assert!(buffer.is_dirty());
2859 assert_eq!(
2860 *events.borrow(),
2861 &[
2862 language::Event::Edited,
2863 language::Event::DirtyChanged,
2864 language::Event::Edited,
2865 ],
2866 );
2867 events.borrow_mut().clear();
2868
2869 // After restoring the buffer to its previously-saved state,
2870 // the buffer is not considered dirty anymore.
2871 buffer.edit([(1..3, "")], None, cx);
2872 assert!(buffer.text() == "ac");
2873 assert!(!buffer.is_dirty());
2874 });
2875
2876 assert_eq!(
2877 *events.borrow(),
2878 &[language::Event::Edited, language::Event::DirtyChanged]
2879 );
2880
2881 // When a file is deleted, the buffer is considered dirty.
2882 let events = Rc::new(RefCell::new(Vec::new()));
2883 let buffer2 = project
2884 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2885 .await
2886 .unwrap();
2887 buffer2.update(cx, |_, cx| {
2888 cx.subscribe(&buffer2, {
2889 let events = events.clone();
2890 move |_, _, event, _| events.borrow_mut().push(event.clone())
2891 })
2892 .detach();
2893 });
2894
2895 fs.remove_file("/dir/file2".as_ref(), Default::default())
2896 .await
2897 .unwrap();
2898 cx.foreground().run_until_parked();
2899 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2900 assert_eq!(
2901 *events.borrow(),
2902 &[
2903 language::Event::DirtyChanged,
2904 language::Event::FileHandleChanged
2905 ]
2906 );
2907
2908 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2909 let events = Rc::new(RefCell::new(Vec::new()));
2910 let buffer3 = project
2911 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2912 .await
2913 .unwrap();
2914 buffer3.update(cx, |_, cx| {
2915 cx.subscribe(&buffer3, {
2916 let events = events.clone();
2917 move |_, _, event, _| events.borrow_mut().push(event.clone())
2918 })
2919 .detach();
2920 });
2921
2922 buffer3.update(cx, |buffer, cx| {
2923 buffer.edit([(0..0, "x")], None, cx);
2924 });
2925 events.borrow_mut().clear();
2926 fs.remove_file("/dir/file3".as_ref(), Default::default())
2927 .await
2928 .unwrap();
2929 cx.foreground().run_until_parked();
2930 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2931 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2932}
2933
2934#[gpui::test]
2935async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2936 init_test(cx);
2937
2938 let initial_contents = "aaa\nbbbbb\nc\n";
2939 let fs = FakeFs::new(cx.background());
2940 fs.insert_tree(
2941 "/dir",
2942 json!({
2943 "the-file": initial_contents,
2944 }),
2945 )
2946 .await;
2947 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2948 let buffer = project
2949 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2950 .await
2951 .unwrap();
2952
2953 let anchors = (0..3)
2954 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2955 .collect::<Vec<_>>();
2956
2957 // Change the file on disk, adding two new lines of text, and removing
2958 // one line.
2959 buffer.read_with(cx, |buffer, _| {
2960 assert!(!buffer.is_dirty());
2961 assert!(!buffer.has_conflict());
2962 });
2963 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2964 fs.save(
2965 "/dir/the-file".as_ref(),
2966 &new_contents.into(),
2967 LineEnding::Unix,
2968 )
2969 .await
2970 .unwrap();
2971
2972 // Because the buffer was not modified, it is reloaded from disk. Its
2973 // contents are edited according to the diff between the old and new
2974 // file contents.
2975 cx.foreground().run_until_parked();
2976 buffer.update(cx, |buffer, _| {
2977 assert_eq!(buffer.text(), new_contents);
2978 assert!(!buffer.is_dirty());
2979 assert!(!buffer.has_conflict());
2980
2981 let anchor_positions = anchors
2982 .iter()
2983 .map(|anchor| anchor.to_point(&*buffer))
2984 .collect::<Vec<_>>();
2985 assert_eq!(
2986 anchor_positions,
2987 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2988 );
2989 });
2990
2991 // Modify the buffer
2992 buffer.update(cx, |buffer, cx| {
2993 buffer.edit([(0..0, " ")], None, cx);
2994 assert!(buffer.is_dirty());
2995 assert!(!buffer.has_conflict());
2996 });
2997
2998 // Change the file on disk again, adding blank lines to the beginning.
2999 fs.save(
3000 "/dir/the-file".as_ref(),
3001 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3002 LineEnding::Unix,
3003 )
3004 .await
3005 .unwrap();
3006
3007 // Because the buffer is modified, it doesn't reload from disk, but is
3008 // marked as having a conflict.
3009 cx.foreground().run_until_parked();
3010 buffer.read_with(cx, |buffer, _| {
3011 assert!(buffer.has_conflict());
3012 });
3013}
3014
3015#[gpui::test]
3016async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3017 init_test(cx);
3018
3019 let fs = FakeFs::new(cx.background());
3020 fs.insert_tree(
3021 "/dir",
3022 json!({
3023 "file1": "a\nb\nc\n",
3024 "file2": "one\r\ntwo\r\nthree\r\n",
3025 }),
3026 )
3027 .await;
3028
3029 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3030 let buffer1 = project
3031 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3032 .await
3033 .unwrap();
3034 let buffer2 = project
3035 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3036 .await
3037 .unwrap();
3038
3039 buffer1.read_with(cx, |buffer, _| {
3040 assert_eq!(buffer.text(), "a\nb\nc\n");
3041 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3042 });
3043 buffer2.read_with(cx, |buffer, _| {
3044 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3045 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3046 });
3047
3048 // Change a file's line endings on disk from unix to windows. The buffer's
3049 // state updates correctly.
3050 fs.save(
3051 "/dir/file1".as_ref(),
3052 &"aaa\nb\nc\n".into(),
3053 LineEnding::Windows,
3054 )
3055 .await
3056 .unwrap();
3057 cx.foreground().run_until_parked();
3058 buffer1.read_with(cx, |buffer, _| {
3059 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3060 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3061 });
3062
3063 // Save a file with windows line endings. The file is written correctly.
3064 buffer2.update(cx, |buffer, cx| {
3065 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3066 });
3067 project
3068 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3069 .await
3070 .unwrap();
3071 assert_eq!(
3072 fs.load("/dir/file2".as_ref()).await.unwrap(),
3073 "one\r\ntwo\r\nthree\r\nfour\r\n",
3074 );
3075}
3076
3077#[gpui::test]
3078async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3079 init_test(cx);
3080
3081 let fs = FakeFs::new(cx.background());
3082 fs.insert_tree(
3083 "/the-dir",
3084 json!({
3085 "a.rs": "
3086 fn foo(mut v: Vec<usize>) {
3087 for x in &v {
3088 v.push(1);
3089 }
3090 }
3091 "
3092 .unindent(),
3093 }),
3094 )
3095 .await;
3096
3097 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3098 let buffer = project
3099 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3100 .await
3101 .unwrap();
3102
3103 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3104 let message = lsp::PublishDiagnosticsParams {
3105 uri: buffer_uri.clone(),
3106 diagnostics: vec![
3107 lsp::Diagnostic {
3108 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3109 severity: Some(DiagnosticSeverity::WARNING),
3110 message: "error 1".to_string(),
3111 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3112 location: lsp::Location {
3113 uri: buffer_uri.clone(),
3114 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3115 },
3116 message: "error 1 hint 1".to_string(),
3117 }]),
3118 ..Default::default()
3119 },
3120 lsp::Diagnostic {
3121 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3122 severity: Some(DiagnosticSeverity::HINT),
3123 message: "error 1 hint 1".to_string(),
3124 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3125 location: lsp::Location {
3126 uri: buffer_uri.clone(),
3127 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3128 },
3129 message: "original diagnostic".to_string(),
3130 }]),
3131 ..Default::default()
3132 },
3133 lsp::Diagnostic {
3134 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3135 severity: Some(DiagnosticSeverity::ERROR),
3136 message: "error 2".to_string(),
3137 related_information: Some(vec![
3138 lsp::DiagnosticRelatedInformation {
3139 location: lsp::Location {
3140 uri: buffer_uri.clone(),
3141 range: lsp::Range::new(
3142 lsp::Position::new(1, 13),
3143 lsp::Position::new(1, 15),
3144 ),
3145 },
3146 message: "error 2 hint 1".to_string(),
3147 },
3148 lsp::DiagnosticRelatedInformation {
3149 location: lsp::Location {
3150 uri: buffer_uri.clone(),
3151 range: lsp::Range::new(
3152 lsp::Position::new(1, 13),
3153 lsp::Position::new(1, 15),
3154 ),
3155 },
3156 message: "error 2 hint 2".to_string(),
3157 },
3158 ]),
3159 ..Default::default()
3160 },
3161 lsp::Diagnostic {
3162 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3163 severity: Some(DiagnosticSeverity::HINT),
3164 message: "error 2 hint 1".to_string(),
3165 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3166 location: lsp::Location {
3167 uri: buffer_uri.clone(),
3168 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3169 },
3170 message: "original diagnostic".to_string(),
3171 }]),
3172 ..Default::default()
3173 },
3174 lsp::Diagnostic {
3175 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3176 severity: Some(DiagnosticSeverity::HINT),
3177 message: "error 2 hint 2".to_string(),
3178 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3179 location: lsp::Location {
3180 uri: buffer_uri,
3181 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3182 },
3183 message: "original diagnostic".to_string(),
3184 }]),
3185 ..Default::default()
3186 },
3187 ],
3188 version: None,
3189 };
3190
3191 project
3192 .update(cx, |p, cx| {
3193 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3194 })
3195 .unwrap();
3196 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3197
3198 assert_eq!(
3199 buffer
3200 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3201 .collect::<Vec<_>>(),
3202 &[
3203 DiagnosticEntry {
3204 range: Point::new(1, 8)..Point::new(1, 9),
3205 diagnostic: Diagnostic {
3206 severity: DiagnosticSeverity::WARNING,
3207 message: "error 1".to_string(),
3208 group_id: 1,
3209 is_primary: true,
3210 ..Default::default()
3211 }
3212 },
3213 DiagnosticEntry {
3214 range: Point::new(1, 8)..Point::new(1, 9),
3215 diagnostic: Diagnostic {
3216 severity: DiagnosticSeverity::HINT,
3217 message: "error 1 hint 1".to_string(),
3218 group_id: 1,
3219 is_primary: false,
3220 ..Default::default()
3221 }
3222 },
3223 DiagnosticEntry {
3224 range: Point::new(1, 13)..Point::new(1, 15),
3225 diagnostic: Diagnostic {
3226 severity: DiagnosticSeverity::HINT,
3227 message: "error 2 hint 1".to_string(),
3228 group_id: 0,
3229 is_primary: false,
3230 ..Default::default()
3231 }
3232 },
3233 DiagnosticEntry {
3234 range: Point::new(1, 13)..Point::new(1, 15),
3235 diagnostic: Diagnostic {
3236 severity: DiagnosticSeverity::HINT,
3237 message: "error 2 hint 2".to_string(),
3238 group_id: 0,
3239 is_primary: false,
3240 ..Default::default()
3241 }
3242 },
3243 DiagnosticEntry {
3244 range: Point::new(2, 8)..Point::new(2, 17),
3245 diagnostic: Diagnostic {
3246 severity: DiagnosticSeverity::ERROR,
3247 message: "error 2".to_string(),
3248 group_id: 0,
3249 is_primary: true,
3250 ..Default::default()
3251 }
3252 }
3253 ]
3254 );
3255
3256 assert_eq!(
3257 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3258 &[
3259 DiagnosticEntry {
3260 range: Point::new(1, 13)..Point::new(1, 15),
3261 diagnostic: Diagnostic {
3262 severity: DiagnosticSeverity::HINT,
3263 message: "error 2 hint 1".to_string(),
3264 group_id: 0,
3265 is_primary: false,
3266 ..Default::default()
3267 }
3268 },
3269 DiagnosticEntry {
3270 range: Point::new(1, 13)..Point::new(1, 15),
3271 diagnostic: Diagnostic {
3272 severity: DiagnosticSeverity::HINT,
3273 message: "error 2 hint 2".to_string(),
3274 group_id: 0,
3275 is_primary: false,
3276 ..Default::default()
3277 }
3278 },
3279 DiagnosticEntry {
3280 range: Point::new(2, 8)..Point::new(2, 17),
3281 diagnostic: Diagnostic {
3282 severity: DiagnosticSeverity::ERROR,
3283 message: "error 2".to_string(),
3284 group_id: 0,
3285 is_primary: true,
3286 ..Default::default()
3287 }
3288 }
3289 ]
3290 );
3291
3292 assert_eq!(
3293 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3294 &[
3295 DiagnosticEntry {
3296 range: Point::new(1, 8)..Point::new(1, 9),
3297 diagnostic: Diagnostic {
3298 severity: DiagnosticSeverity::WARNING,
3299 message: "error 1".to_string(),
3300 group_id: 1,
3301 is_primary: true,
3302 ..Default::default()
3303 }
3304 },
3305 DiagnosticEntry {
3306 range: Point::new(1, 8)..Point::new(1, 9),
3307 diagnostic: Diagnostic {
3308 severity: DiagnosticSeverity::HINT,
3309 message: "error 1 hint 1".to_string(),
3310 group_id: 1,
3311 is_primary: false,
3312 ..Default::default()
3313 }
3314 },
3315 ]
3316 );
3317}
3318
3319#[gpui::test]
3320async fn test_rename(cx: &mut gpui::TestAppContext) {
3321 init_test(cx);
3322
3323 let mut language = Language::new(
3324 LanguageConfig {
3325 name: "Rust".into(),
3326 path_suffixes: vec!["rs".to_string()],
3327 ..Default::default()
3328 },
3329 Some(tree_sitter_rust::language()),
3330 );
3331 let mut fake_servers = language
3332 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3333 capabilities: lsp::ServerCapabilities {
3334 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3335 prepare_provider: Some(true),
3336 work_done_progress_options: Default::default(),
3337 })),
3338 ..Default::default()
3339 },
3340 ..Default::default()
3341 }))
3342 .await;
3343
3344 let fs = FakeFs::new(cx.background());
3345 fs.insert_tree(
3346 "/dir",
3347 json!({
3348 "one.rs": "const ONE: usize = 1;",
3349 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3350 }),
3351 )
3352 .await;
3353
3354 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3355 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3356 let buffer = project
3357 .update(cx, |project, cx| {
3358 project.open_local_buffer("/dir/one.rs", cx)
3359 })
3360 .await
3361 .unwrap();
3362
3363 let fake_server = fake_servers.next().await.unwrap();
3364
3365 let response = project.update(cx, |project, cx| {
3366 project.prepare_rename(buffer.clone(), 7, cx)
3367 });
3368 fake_server
3369 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3370 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3371 assert_eq!(params.position, lsp::Position::new(0, 7));
3372 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3373 lsp::Position::new(0, 6),
3374 lsp::Position::new(0, 9),
3375 ))))
3376 })
3377 .next()
3378 .await
3379 .unwrap();
3380 let range = response.await.unwrap().unwrap();
3381 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3382 assert_eq!(range, 6..9);
3383
3384 let response = project.update(cx, |project, cx| {
3385 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3386 });
3387 fake_server
3388 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3389 assert_eq!(
3390 params.text_document_position.text_document.uri.as_str(),
3391 "file:///dir/one.rs"
3392 );
3393 assert_eq!(
3394 params.text_document_position.position,
3395 lsp::Position::new(0, 7)
3396 );
3397 assert_eq!(params.new_name, "THREE");
3398 Ok(Some(lsp::WorkspaceEdit {
3399 changes: Some(
3400 [
3401 (
3402 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3403 vec![lsp::TextEdit::new(
3404 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3405 "THREE".to_string(),
3406 )],
3407 ),
3408 (
3409 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3410 vec![
3411 lsp::TextEdit::new(
3412 lsp::Range::new(
3413 lsp::Position::new(0, 24),
3414 lsp::Position::new(0, 27),
3415 ),
3416 "THREE".to_string(),
3417 ),
3418 lsp::TextEdit::new(
3419 lsp::Range::new(
3420 lsp::Position::new(0, 35),
3421 lsp::Position::new(0, 38),
3422 ),
3423 "THREE".to_string(),
3424 ),
3425 ],
3426 ),
3427 ]
3428 .into_iter()
3429 .collect(),
3430 ),
3431 ..Default::default()
3432 }))
3433 })
3434 .next()
3435 .await
3436 .unwrap();
3437 let mut transaction = response.await.unwrap().0;
3438 assert_eq!(transaction.len(), 2);
3439 assert_eq!(
3440 transaction
3441 .remove_entry(&buffer)
3442 .unwrap()
3443 .0
3444 .read_with(cx, |buffer, _| buffer.text()),
3445 "const THREE: usize = 1;"
3446 );
3447 assert_eq!(
3448 transaction
3449 .into_keys()
3450 .next()
3451 .unwrap()
3452 .read_with(cx, |buffer, _| buffer.text()),
3453 "const TWO: usize = one::THREE + one::THREE;"
3454 );
3455}
3456
3457#[gpui::test]
3458async fn test_search(cx: &mut gpui::TestAppContext) {
3459 init_test(cx);
3460
3461 let fs = FakeFs::new(cx.background());
3462 fs.insert_tree(
3463 "/dir",
3464 json!({
3465 "one.rs": "const ONE: usize = 1;",
3466 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3467 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3468 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3469 }),
3470 )
3471 .await;
3472 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3473 assert_eq!(
3474 search(
3475 &project,
3476 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3477 cx
3478 )
3479 .await
3480 .unwrap(),
3481 HashMap::from_iter([
3482 ("two.rs".to_string(), vec![6..9]),
3483 ("three.rs".to_string(), vec![37..40])
3484 ])
3485 );
3486
3487 let buffer_4 = project
3488 .update(cx, |project, cx| {
3489 project.open_local_buffer("/dir/four.rs", cx)
3490 })
3491 .await
3492 .unwrap();
3493 buffer_4.update(cx, |buffer, cx| {
3494 let text = "two::TWO";
3495 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3496 });
3497
3498 assert_eq!(
3499 search(
3500 &project,
3501 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3502 cx
3503 )
3504 .await
3505 .unwrap(),
3506 HashMap::from_iter([
3507 ("two.rs".to_string(), vec![6..9]),
3508 ("three.rs".to_string(), vec![37..40]),
3509 ("four.rs".to_string(), vec![25..28, 36..39])
3510 ])
3511 );
3512}
3513
3514#[gpui::test]
3515async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3516 init_test(cx);
3517
3518 let search_query = "file";
3519
3520 let fs = FakeFs::new(cx.background());
3521 fs.insert_tree(
3522 "/dir",
3523 json!({
3524 "one.rs": r#"// Rust file one"#,
3525 "one.ts": r#"// TypeScript file one"#,
3526 "two.rs": r#"// Rust file two"#,
3527 "two.ts": r#"// TypeScript file two"#,
3528 }),
3529 )
3530 .await;
3531 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3532
3533 assert!(
3534 search(
3535 &project,
3536 SearchQuery::text(
3537 search_query,
3538 false,
3539 true,
3540 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3541 Vec::new()
3542 ),
3543 cx
3544 )
3545 .await
3546 .unwrap()
3547 .is_empty(),
3548 "If no inclusions match, no files should be returned"
3549 );
3550
3551 assert_eq!(
3552 search(
3553 &project,
3554 SearchQuery::text(
3555 search_query,
3556 false,
3557 true,
3558 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3559 Vec::new()
3560 ),
3561 cx
3562 )
3563 .await
3564 .unwrap(),
3565 HashMap::from_iter([
3566 ("one.rs".to_string(), vec![8..12]),
3567 ("two.rs".to_string(), vec![8..12]),
3568 ]),
3569 "Rust only search should give only Rust files"
3570 );
3571
3572 assert_eq!(
3573 search(
3574 &project,
3575 SearchQuery::text(
3576 search_query,
3577 false,
3578 true,
3579 vec![
3580 Glob::new("*.ts").unwrap().compile_matcher(),
3581 Glob::new("*.odd").unwrap().compile_matcher(),
3582 ],
3583 Vec::new()
3584 ),
3585 cx
3586 )
3587 .await
3588 .unwrap(),
3589 HashMap::from_iter([
3590 ("one.ts".to_string(), vec![14..18]),
3591 ("two.ts".to_string(), vec![14..18]),
3592 ]),
3593 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3594 );
3595
3596 assert_eq!(
3597 search(
3598 &project,
3599 SearchQuery::text(
3600 search_query,
3601 false,
3602 true,
3603 vec![
3604 Glob::new("*.rs").unwrap().compile_matcher(),
3605 Glob::new("*.ts").unwrap().compile_matcher(),
3606 Glob::new("*.odd").unwrap().compile_matcher(),
3607 ],
3608 Vec::new()
3609 ),
3610 cx
3611 )
3612 .await
3613 .unwrap(),
3614 HashMap::from_iter([
3615 ("one.rs".to_string(), vec![8..12]),
3616 ("one.ts".to_string(), vec![14..18]),
3617 ("two.rs".to_string(), vec![8..12]),
3618 ("two.ts".to_string(), vec![14..18]),
3619 ]),
3620 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3621 );
3622}
3623
3624#[gpui::test]
3625async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3626 init_test(cx);
3627
3628 let search_query = "file";
3629
3630 let fs = FakeFs::new(cx.background());
3631 fs.insert_tree(
3632 "/dir",
3633 json!({
3634 "one.rs": r#"// Rust file one"#,
3635 "one.ts": r#"// TypeScript file one"#,
3636 "two.rs": r#"// Rust file two"#,
3637 "two.ts": r#"// TypeScript file two"#,
3638 }),
3639 )
3640 .await;
3641 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3642
3643 assert_eq!(
3644 search(
3645 &project,
3646 SearchQuery::text(
3647 search_query,
3648 false,
3649 true,
3650 Vec::new(),
3651 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3652 ),
3653 cx
3654 )
3655 .await
3656 .unwrap(),
3657 HashMap::from_iter([
3658 ("one.rs".to_string(), vec![8..12]),
3659 ("one.ts".to_string(), vec![14..18]),
3660 ("two.rs".to_string(), vec![8..12]),
3661 ("two.ts".to_string(), vec![14..18]),
3662 ]),
3663 "If no exclusions match, all files should be returned"
3664 );
3665
3666 assert_eq!(
3667 search(
3668 &project,
3669 SearchQuery::text(
3670 search_query,
3671 false,
3672 true,
3673 Vec::new(),
3674 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3675 ),
3676 cx
3677 )
3678 .await
3679 .unwrap(),
3680 HashMap::from_iter([
3681 ("one.ts".to_string(), vec![14..18]),
3682 ("two.ts".to_string(), vec![14..18]),
3683 ]),
3684 "Rust exclusion search should give only TypeScript files"
3685 );
3686
3687 assert_eq!(
3688 search(
3689 &project,
3690 SearchQuery::text(
3691 search_query,
3692 false,
3693 true,
3694 Vec::new(),
3695 vec![
3696 Glob::new("*.ts").unwrap().compile_matcher(),
3697 Glob::new("*.odd").unwrap().compile_matcher(),
3698 ],
3699 ),
3700 cx
3701 )
3702 .await
3703 .unwrap(),
3704 HashMap::from_iter([
3705 ("one.rs".to_string(), vec![8..12]),
3706 ("two.rs".to_string(), vec![8..12]),
3707 ]),
3708 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3709 );
3710
3711 assert!(
3712 search(
3713 &project,
3714 SearchQuery::text(
3715 search_query,
3716 false,
3717 true,
3718 Vec::new(),
3719 vec![
3720 Glob::new("*.rs").unwrap().compile_matcher(),
3721 Glob::new("*.ts").unwrap().compile_matcher(),
3722 Glob::new("*.odd").unwrap().compile_matcher(),
3723 ],
3724 ),
3725 cx
3726 )
3727 .await
3728 .unwrap().is_empty(),
3729 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3730 );
3731}
3732
3733#[gpui::test]
3734async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3735 init_test(cx);
3736
3737 let search_query = "file";
3738
3739 let fs = FakeFs::new(cx.background());
3740 fs.insert_tree(
3741 "/dir",
3742 json!({
3743 "one.rs": r#"// Rust file one"#,
3744 "one.ts": r#"// TypeScript file one"#,
3745 "two.rs": r#"// Rust file two"#,
3746 "two.ts": r#"// TypeScript file two"#,
3747 }),
3748 )
3749 .await;
3750 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3751
3752 assert!(
3753 search(
3754 &project,
3755 SearchQuery::text(
3756 search_query,
3757 false,
3758 true,
3759 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3760 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3761 ),
3762 cx
3763 )
3764 .await
3765 .unwrap()
3766 .is_empty(),
3767 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3768 );
3769
3770 assert!(
3771 search(
3772 &project,
3773 SearchQuery::text(
3774 search_query,
3775 false,
3776 true,
3777 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3778 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3779 ),
3780 cx
3781 )
3782 .await
3783 .unwrap()
3784 .is_empty(),
3785 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3786 );
3787
3788 assert!(
3789 search(
3790 &project,
3791 SearchQuery::text(
3792 search_query,
3793 false,
3794 true,
3795 vec![
3796 Glob::new("*.ts").unwrap().compile_matcher(),
3797 Glob::new("*.odd").unwrap().compile_matcher()
3798 ],
3799 vec![
3800 Glob::new("*.ts").unwrap().compile_matcher(),
3801 Glob::new("*.odd").unwrap().compile_matcher()
3802 ],
3803 ),
3804 cx
3805 )
3806 .await
3807 .unwrap()
3808 .is_empty(),
3809 "Non-matching inclusions and exclusions should not change that."
3810 );
3811
3812 assert_eq!(
3813 search(
3814 &project,
3815 SearchQuery::text(
3816 search_query,
3817 false,
3818 true,
3819 vec![
3820 Glob::new("*.ts").unwrap().compile_matcher(),
3821 Glob::new("*.odd").unwrap().compile_matcher()
3822 ],
3823 vec![
3824 Glob::new("*.rs").unwrap().compile_matcher(),
3825 Glob::new("*.odd").unwrap().compile_matcher()
3826 ],
3827 ),
3828 cx
3829 )
3830 .await
3831 .unwrap(),
3832 HashMap::from_iter([
3833 ("one.ts".to_string(), vec![14..18]),
3834 ("two.ts".to_string(), vec![14..18]),
3835 ]),
3836 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3837 );
3838}
3839
3840async fn search(
3841 project: &ModelHandle<Project>,
3842 query: SearchQuery,
3843 cx: &mut gpui::TestAppContext,
3844) -> Result<HashMap<String, Vec<Range<usize>>>> {
3845 let results = project
3846 .update(cx, |project, cx| project.search(query, cx))
3847 .await?;
3848
3849 Ok(results
3850 .into_iter()
3851 .map(|(buffer, ranges)| {
3852 buffer.read_with(cx, |buffer, _| {
3853 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3854 let ranges = ranges
3855 .into_iter()
3856 .map(|range| range.to_offset(buffer))
3857 .collect::<Vec<_>>();
3858 (path, ranges)
3859 })
3860 })
3861 .collect())
3862}
3863
3864fn init_test(cx: &mut gpui::TestAppContext) {
3865 cx.foreground().forbid_parking();
3866
3867 cx.update(|cx| {
3868 cx.set_global(SettingsStore::test(cx));
3869 language::init(cx);
3870 Project::init_settings(cx);
3871 });
3872}