1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_project_specific_settings(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let fs = FakeFs::new(cx.background());
74 fs.insert_tree(
75 "/the-root",
76 json!({
77 ".zed": {
78 "settings.json": r#"{ "tab_size": 8 }"#
79 },
80 "a": {
81 "a.rs": "fn a() {\n A\n}"
82 },
83 "b": {
84 ".zed": {
85 "settings.json": r#"{ "tab_size": 2 }"#
86 },
87 "b.rs": "fn b() {\n B\n}"
88 }
89 }),
90 )
91 .await;
92
93 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
94 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
95
96 deterministic.run_until_parked();
97 cx.read(|cx| {
98 let tree = worktree.read(cx);
99
100 let settings_a = language_settings(
101 None,
102 Some(
103 &(File::for_entry(
104 tree.entry_for_path("a/a.rs").unwrap().clone(),
105 worktree.clone(),
106 ) as _),
107 ),
108 cx,
109 );
110 let settings_b = language_settings(
111 None,
112 Some(
113 &(File::for_entry(
114 tree.entry_for_path("b/b.rs").unwrap().clone(),
115 worktree.clone(),
116 ) as _),
117 ),
118 cx,
119 );
120
121 assert_eq!(settings_a.tab_size.get(), 8);
122 assert_eq!(settings_b.tab_size.get(), 2);
123 });
124}
125
126#[gpui::test]
127async fn test_managing_language_servers(
128 deterministic: Arc<Deterministic>,
129 cx: &mut gpui::TestAppContext,
130) {
131 init_test(cx);
132
133 let mut rust_language = Language::new(
134 LanguageConfig {
135 name: "Rust".into(),
136 path_suffixes: vec!["rs".to_string()],
137 ..Default::default()
138 },
139 Some(tree_sitter_rust::language()),
140 );
141 let mut json_language = Language::new(
142 LanguageConfig {
143 name: "JSON".into(),
144 path_suffixes: vec!["json".to_string()],
145 ..Default::default()
146 },
147 None,
148 );
149 let mut fake_rust_servers = rust_language
150 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
151 name: "the-rust-language-server",
152 capabilities: lsp::ServerCapabilities {
153 completion_provider: Some(lsp::CompletionOptions {
154 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
155 ..Default::default()
156 }),
157 ..Default::default()
158 },
159 ..Default::default()
160 }))
161 .await;
162 let mut fake_json_servers = json_language
163 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
164 name: "the-json-language-server",
165 capabilities: lsp::ServerCapabilities {
166 completion_provider: Some(lsp::CompletionOptions {
167 trigger_characters: Some(vec![":".to_string()]),
168 ..Default::default()
169 }),
170 ..Default::default()
171 },
172 ..Default::default()
173 }))
174 .await;
175
176 let fs = FakeFs::new(cx.background());
177 fs.insert_tree(
178 "/the-root",
179 json!({
180 "test.rs": "const A: i32 = 1;",
181 "test2.rs": "",
182 "Cargo.toml": "a = 1",
183 "package.json": "{\"a\": 1}",
184 }),
185 )
186 .await;
187
188 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
189
190 // Open a buffer without an associated language server.
191 let toml_buffer = project
192 .update(cx, |project, cx| {
193 project.open_local_buffer("/the-root/Cargo.toml", cx)
194 })
195 .await
196 .unwrap();
197
198 // Open a buffer with an associated language server before the language for it has been loaded.
199 let rust_buffer = project
200 .update(cx, |project, cx| {
201 project.open_local_buffer("/the-root/test.rs", cx)
202 })
203 .await
204 .unwrap();
205 rust_buffer.read_with(cx, |buffer, _| {
206 assert_eq!(buffer.language().map(|l| l.name()), None);
207 });
208
209 // Now we add the languages to the project, and ensure they get assigned to all
210 // the relevant open buffers.
211 project.update(cx, |project, _| {
212 project.languages.add(Arc::new(json_language));
213 project.languages.add(Arc::new(rust_language));
214 });
215 deterministic.run_until_parked();
216 rust_buffer.read_with(cx, |buffer, _| {
217 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
218 });
219
220 // A server is started up, and it is notified about Rust files.
221 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
222 assert_eq!(
223 fake_rust_server
224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
225 .await
226 .text_document,
227 lsp::TextDocumentItem {
228 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
229 version: 0,
230 text: "const A: i32 = 1;".to_string(),
231 language_id: Default::default()
232 }
233 );
234
235 // The buffer is configured based on the language server's capabilities.
236 rust_buffer.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242 toml_buffer.read_with(cx, |buffer, _| {
243 assert!(buffer.completion_triggers().is_empty());
244 });
245
246 // Edit a buffer. The changes are reported to the language server.
247 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
255 1
256 )
257 );
258
259 // Open a third buffer with a different associated language server.
260 let json_buffer = project
261 .update(cx, |project, cx| {
262 project.open_local_buffer("/the-root/package.json", cx)
263 })
264 .await
265 .unwrap();
266
267 // A json language server is started up and is only notified about the json buffer.
268 let mut fake_json_server = fake_json_servers.next().await.unwrap();
269 assert_eq!(
270 fake_json_server
271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
272 .await
273 .text_document,
274 lsp::TextDocumentItem {
275 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
276 version: 0,
277 text: "{\"a\": 1}".to_string(),
278 language_id: Default::default()
279 }
280 );
281
282 // This buffer is configured based on the second language server's
283 // capabilities.
284 json_buffer.read_with(cx, |buffer, _| {
285 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
286 });
287
288 // When opening another buffer whose language server is already running,
289 // it is also configured based on the existing language server's capabilities.
290 let rust_buffer2 = project
291 .update(cx, |project, cx| {
292 project.open_local_buffer("/the-root/test2.rs", cx)
293 })
294 .await
295 .unwrap();
296 rust_buffer2.read_with(cx, |buffer, _| {
297 assert_eq!(
298 buffer.completion_triggers(),
299 &[".".to_string(), "::".to_string()]
300 );
301 });
302
303 // Changes are reported only to servers matching the buffer's language.
304 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
305 rust_buffer2.update(cx, |buffer, cx| {
306 buffer.edit([(0..0, "let x = 1;")], None, cx)
307 });
308 assert_eq!(
309 fake_rust_server
310 .receive_notification::<lsp::notification::DidChangeTextDocument>()
311 .await
312 .text_document,
313 lsp::VersionedTextDocumentIdentifier::new(
314 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
315 1
316 )
317 );
318
319 // Save notifications are reported to all servers.
320 project
321 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
322 .await
323 .unwrap();
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidSaveTextDocument>()
327 .await
328 .text_document,
329 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
330 );
331 assert_eq!(
332 fake_json_server
333 .receive_notification::<lsp::notification::DidSaveTextDocument>()
334 .await
335 .text_document,
336 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
337 );
338
339 // Renames are reported only to servers matching the buffer's language.
340 fs.rename(
341 Path::new("/the-root/test2.rs"),
342 Path::new("/the-root/test3.rs"),
343 Default::default(),
344 )
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
353 );
354 assert_eq!(
355 fake_rust_server
356 .receive_notification::<lsp::notification::DidOpenTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentItem {
360 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
361 version: 0,
362 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
363 language_id: Default::default()
364 },
365 );
366
367 rust_buffer2.update(cx, |buffer, cx| {
368 buffer.update_diagnostics(
369 LanguageServerId(0),
370 DiagnosticSet::from_sorted_entries(
371 vec![DiagnosticEntry {
372 diagnostic: Default::default(),
373 range: Anchor::MIN..Anchor::MAX,
374 }],
375 &buffer.snapshot(),
376 ),
377 cx,
378 );
379 assert_eq!(
380 buffer
381 .snapshot()
382 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
383 .count(),
384 1
385 );
386 });
387
388 // When the rename changes the extension of the file, the buffer gets closed on the old
389 // language server and gets opened on the new one.
390 fs.rename(
391 Path::new("/the-root/test3.rs"),
392 Path::new("/the-root/test3.json"),
393 Default::default(),
394 )
395 .await
396 .unwrap();
397 assert_eq!(
398 fake_rust_server
399 .receive_notification::<lsp::notification::DidCloseTextDocument>()
400 .await
401 .text_document,
402 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
403 );
404 assert_eq!(
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
411 version: 0,
412 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 );
416
417 // We clear the diagnostics, since the language has changed.
418 rust_buffer2.read_with(cx, |buffer, _| {
419 assert_eq!(
420 buffer
421 .snapshot()
422 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
423 .count(),
424 0
425 );
426 });
427
428 // The renamed file's version resets after changing language server.
429 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidChangeTextDocument>()
433 .await
434 .text_document,
435 lsp::VersionedTextDocumentIdentifier::new(
436 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
437 1
438 )
439 );
440
441 // Restart language servers
442 project.update(cx, |project, cx| {
443 project.restart_language_servers_for_buffers(
444 vec![rust_buffer.clone(), json_buffer.clone()],
445 cx,
446 );
447 });
448
449 let mut rust_shutdown_requests = fake_rust_server
450 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
451 let mut json_shutdown_requests = fake_json_server
452 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
453 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
454
455 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
456 let mut fake_json_server = fake_json_servers.next().await.unwrap();
457
458 // Ensure rust document is reopened in new rust language server
459 assert_eq!(
460 fake_rust_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
466 version: 0,
467 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
468 language_id: Default::default()
469 }
470 );
471
472 // Ensure json documents are reopened in new json language server
473 assert_set_eq!(
474 [
475 fake_json_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 fake_json_server
480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
481 .await
482 .text_document,
483 ],
484 [
485 lsp::TextDocumentItem {
486 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
487 version: 0,
488 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
489 language_id: Default::default()
490 },
491 lsp::TextDocumentItem {
492 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
493 version: 0,
494 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
495 language_id: Default::default()
496 }
497 ]
498 );
499
500 // Close notifications are reported only to servers matching the buffer's language.
501 cx.update(|_| drop(json_buffer));
502 let close_message = lsp::DidCloseTextDocumentParams {
503 text_document: lsp::TextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
505 ),
506 };
507 assert_eq!(
508 fake_json_server
509 .receive_notification::<lsp::notification::DidCloseTextDocument>()
510 .await,
511 close_message,
512 );
513}
514
515#[gpui::test]
516async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let mut language = Language::new(
520 LanguageConfig {
521 name: "Rust".into(),
522 path_suffixes: vec!["rs".to_string()],
523 ..Default::default()
524 },
525 Some(tree_sitter_rust::language()),
526 );
527 let mut fake_servers = language
528 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
529 name: "the-language-server",
530 ..Default::default()
531 }))
532 .await;
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/the-root",
537 json!({
538 ".gitignore": "target\n",
539 "src": {
540 "a.rs": "",
541 "b.rs": "",
542 },
543 "target": {
544 "x": {
545 "out": {
546 "x.rs": ""
547 }
548 },
549 "y": {
550 "out": {
551 "y.rs": "",
552 }
553 },
554 "z": {
555 "out": {
556 "z.rs": ""
557 }
558 }
559 }
560 }),
561 )
562 .await;
563
564 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
565 project.update(cx, |project, _| {
566 project.languages.add(Arc::new(language));
567 });
568 cx.foreground().run_until_parked();
569
570 // Start the language server by opening a buffer with a compatible file extension.
571 let _buffer = project
572 .update(cx, |project, cx| {
573 project.open_local_buffer("/the-root/src/a.rs", cx)
574 })
575 .await
576 .unwrap();
577
578 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
579 project.read_with(cx, |project, cx| {
580 let worktree = project.worktrees(cx).next().unwrap();
581 assert_eq!(
582 worktree
583 .read(cx)
584 .snapshot()
585 .entries(true)
586 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
587 .collect::<Vec<_>>(),
588 &[
589 (Path::new(""), false),
590 (Path::new(".gitignore"), false),
591 (Path::new("src"), false),
592 (Path::new("src/a.rs"), false),
593 (Path::new("src/b.rs"), false),
594 (Path::new("target"), true),
595 ]
596 );
597 });
598
599 // Keep track of the FS events reported to the language server.
600 let fake_server = fake_servers.next().await.unwrap();
601 let file_changes = Arc::new(Mutex::new(Vec::new()));
602 fake_server
603 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
604 registrations: vec![lsp::Registration {
605 id: Default::default(),
606 method: "workspace/didChangeWatchedFiles".to_string(),
607 register_options: serde_json::to_value(
608 lsp::DidChangeWatchedFilesRegistrationOptions {
609 watchers: vec![
610 lsp::FileSystemWatcher {
611 glob_pattern: lsp::GlobPattern::String(
612 "/the-root/src/*.{rs,c}".to_string(),
613 ),
614 kind: None,
615 },
616 lsp::FileSystemWatcher {
617 glob_pattern: lsp::GlobPattern::String(
618 "/the-root/target/y/**/*.rs".to_string(),
619 ),
620 kind: None,
621 },
622 ],
623 },
624 )
625 .ok(),
626 }],
627 })
628 .await
629 .unwrap();
630 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
631 let file_changes = file_changes.clone();
632 move |params, _| {
633 let mut file_changes = file_changes.lock();
634 file_changes.extend(params.changes);
635 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
636 }
637 });
638
639 cx.foreground().run_until_parked();
640 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
641
642 // Now the language server has asked us to watch an ignored directory path,
643 // so we recursively load it.
644 project.read_with(cx, |project, cx| {
645 let worktree = project.worktrees(cx).next().unwrap();
646 assert_eq!(
647 worktree
648 .read(cx)
649 .snapshot()
650 .entries(true)
651 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
652 .collect::<Vec<_>>(),
653 &[
654 (Path::new(""), false),
655 (Path::new(".gitignore"), false),
656 (Path::new("src"), false),
657 (Path::new("src/a.rs"), false),
658 (Path::new("src/b.rs"), false),
659 (Path::new("target"), true),
660 (Path::new("target/x"), true),
661 (Path::new("target/y"), true),
662 (Path::new("target/y/out"), true),
663 (Path::new("target/y/out/y.rs"), true),
664 (Path::new("target/z"), true),
665 ]
666 );
667 });
668
669 // Perform some file system mutations, two of which match the watched patterns,
670 // and one of which does not.
671 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
672 .await
673 .unwrap();
674 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
675 .await
676 .unwrap();
677 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
678 .await
679 .unwrap();
680 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
681 .await
682 .unwrap();
683 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
684 .await
685 .unwrap();
686
687 // The language server receives events for the FS mutations that match its watch patterns.
688 cx.foreground().run_until_parked();
689 assert_eq!(
690 &*file_changes.lock(),
691 &[
692 lsp::FileEvent {
693 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
694 typ: lsp::FileChangeType::DELETED,
695 },
696 lsp::FileEvent {
697 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
698 typ: lsp::FileChangeType::CREATED,
699 },
700 lsp::FileEvent {
701 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
702 typ: lsp::FileChangeType::CREATED,
703 },
704 ]
705 );
706}
707
708#[gpui::test]
709async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
710 init_test(cx);
711
712 let fs = FakeFs::new(cx.background());
713 fs.insert_tree(
714 "/dir",
715 json!({
716 "a.rs": "let a = 1;",
717 "b.rs": "let b = 2;"
718 }),
719 )
720 .await;
721
722 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
723
724 let buffer_a = project
725 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
726 .await
727 .unwrap();
728 let buffer_b = project
729 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
730 .await
731 .unwrap();
732
733 project.update(cx, |project, cx| {
734 project
735 .update_diagnostics(
736 LanguageServerId(0),
737 lsp::PublishDiagnosticsParams {
738 uri: Url::from_file_path("/dir/a.rs").unwrap(),
739 version: None,
740 diagnostics: vec![lsp::Diagnostic {
741 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
742 severity: Some(lsp::DiagnosticSeverity::ERROR),
743 message: "error 1".to_string(),
744 ..Default::default()
745 }],
746 },
747 &[],
748 cx,
749 )
750 .unwrap();
751 project
752 .update_diagnostics(
753 LanguageServerId(0),
754 lsp::PublishDiagnosticsParams {
755 uri: Url::from_file_path("/dir/b.rs").unwrap(),
756 version: None,
757 diagnostics: vec![lsp::Diagnostic {
758 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
759 severity: Some(lsp::DiagnosticSeverity::WARNING),
760 message: "error 2".to_string(),
761 ..Default::default()
762 }],
763 },
764 &[],
765 cx,
766 )
767 .unwrap();
768 });
769
770 buffer_a.read_with(cx, |buffer, _| {
771 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
772 assert_eq!(
773 chunks
774 .iter()
775 .map(|(s, d)| (s.as_str(), *d))
776 .collect::<Vec<_>>(),
777 &[
778 ("let ", None),
779 ("a", Some(DiagnosticSeverity::ERROR)),
780 (" = 1;", None),
781 ]
782 );
783 });
784 buffer_b.read_with(cx, |buffer, _| {
785 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
786 assert_eq!(
787 chunks
788 .iter()
789 .map(|(s, d)| (s.as_str(), *d))
790 .collect::<Vec<_>>(),
791 &[
792 ("let ", None),
793 ("b", Some(DiagnosticSeverity::WARNING)),
794 (" = 2;", None),
795 ]
796 );
797 });
798}
799
800#[gpui::test]
801async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
802 init_test(cx);
803
804 let fs = FakeFs::new(cx.background());
805 fs.insert_tree(
806 "/root",
807 json!({
808 "dir": {
809 "a.rs": "let a = 1;",
810 },
811 "other.rs": "let b = c;"
812 }),
813 )
814 .await;
815
816 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
817
818 let (worktree, _) = project
819 .update(cx, |project, cx| {
820 project.find_or_create_local_worktree("/root/other.rs", false, cx)
821 })
822 .await
823 .unwrap();
824 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
825
826 project.update(cx, |project, cx| {
827 project
828 .update_diagnostics(
829 LanguageServerId(0),
830 lsp::PublishDiagnosticsParams {
831 uri: Url::from_file_path("/root/other.rs").unwrap(),
832 version: None,
833 diagnostics: vec![lsp::Diagnostic {
834 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
835 severity: Some(lsp::DiagnosticSeverity::ERROR),
836 message: "unknown variable 'c'".to_string(),
837 ..Default::default()
838 }],
839 },
840 &[],
841 cx,
842 )
843 .unwrap();
844 });
845
846 let buffer = project
847 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
848 .await
849 .unwrap();
850 buffer.read_with(cx, |buffer, _| {
851 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
852 assert_eq!(
853 chunks
854 .iter()
855 .map(|(s, d)| (s.as_str(), *d))
856 .collect::<Vec<_>>(),
857 &[
858 ("let b = ", None),
859 ("c", Some(DiagnosticSeverity::ERROR)),
860 (";", None),
861 ]
862 );
863 });
864
865 project.read_with(cx, |project, cx| {
866 assert_eq!(project.diagnostic_summaries(cx).next(), None);
867 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
868 });
869}
870
871#[gpui::test]
872async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
873 init_test(cx);
874
875 let progress_token = "the-progress-token";
876 let mut language = Language::new(
877 LanguageConfig {
878 name: "Rust".into(),
879 path_suffixes: vec!["rs".to_string()],
880 ..Default::default()
881 },
882 Some(tree_sitter_rust::language()),
883 );
884 let mut fake_servers = language
885 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
886 disk_based_diagnostics_progress_token: Some(progress_token.into()),
887 disk_based_diagnostics_sources: vec!["disk".into()],
888 ..Default::default()
889 }))
890 .await;
891
892 let fs = FakeFs::new(cx.background());
893 fs.insert_tree(
894 "/dir",
895 json!({
896 "a.rs": "fn a() { A }",
897 "b.rs": "const y: i32 = 1",
898 }),
899 )
900 .await;
901
902 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
903 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
904 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
905
906 // Cause worktree to start the fake language server
907 let _buffer = project
908 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
909 .await
910 .unwrap();
911
912 let mut events = subscribe(&project, cx);
913
914 let fake_server = fake_servers.next().await.unwrap();
915 assert_eq!(
916 events.next().await.unwrap(),
917 Event::LanguageServerAdded(LanguageServerId(0)),
918 );
919
920 fake_server
921 .start_progress(format!("{}/0", progress_token))
922 .await;
923 assert_eq!(
924 events.next().await.unwrap(),
925 Event::DiskBasedDiagnosticsStarted {
926 language_server_id: LanguageServerId(0),
927 }
928 );
929
930 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
931 uri: Url::from_file_path("/dir/a.rs").unwrap(),
932 version: None,
933 diagnostics: vec![lsp::Diagnostic {
934 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
935 severity: Some(lsp::DiagnosticSeverity::ERROR),
936 message: "undefined variable 'A'".to_string(),
937 ..Default::default()
938 }],
939 });
940 assert_eq!(
941 events.next().await.unwrap(),
942 Event::DiagnosticsUpdated {
943 language_server_id: LanguageServerId(0),
944 path: (worktree_id, Path::new("a.rs")).into()
945 }
946 );
947
948 fake_server.end_progress(format!("{}/0", progress_token));
949 assert_eq!(
950 events.next().await.unwrap(),
951 Event::DiskBasedDiagnosticsFinished {
952 language_server_id: LanguageServerId(0)
953 }
954 );
955
956 let buffer = project
957 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
958 .await
959 .unwrap();
960
961 buffer.read_with(cx, |buffer, _| {
962 let snapshot = buffer.snapshot();
963 let diagnostics = snapshot
964 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
965 .collect::<Vec<_>>();
966 assert_eq!(
967 diagnostics,
968 &[DiagnosticEntry {
969 range: Point::new(0, 9)..Point::new(0, 10),
970 diagnostic: Diagnostic {
971 severity: lsp::DiagnosticSeverity::ERROR,
972 message: "undefined variable 'A'".to_string(),
973 group_id: 0,
974 is_primary: true,
975 ..Default::default()
976 }
977 }]
978 )
979 });
980
981 // Ensure publishing empty diagnostics twice only results in one update event.
982 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
983 uri: Url::from_file_path("/dir/a.rs").unwrap(),
984 version: None,
985 diagnostics: Default::default(),
986 });
987 assert_eq!(
988 events.next().await.unwrap(),
989 Event::DiagnosticsUpdated {
990 language_server_id: LanguageServerId(0),
991 path: (worktree_id, Path::new("a.rs")).into()
992 }
993 );
994
995 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
996 uri: Url::from_file_path("/dir/a.rs").unwrap(),
997 version: None,
998 diagnostics: Default::default(),
999 });
1000 cx.foreground().run_until_parked();
1001 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1002}
1003
1004#[gpui::test]
1005async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1006 init_test(cx);
1007
1008 let progress_token = "the-progress-token";
1009 let mut language = Language::new(
1010 LanguageConfig {
1011 path_suffixes: vec!["rs".to_string()],
1012 ..Default::default()
1013 },
1014 None,
1015 );
1016 let mut fake_servers = language
1017 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1018 disk_based_diagnostics_sources: vec!["disk".into()],
1019 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1020 ..Default::default()
1021 }))
1022 .await;
1023
1024 let fs = FakeFs::new(cx.background());
1025 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1026
1027 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1028 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1029
1030 let buffer = project
1031 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1032 .await
1033 .unwrap();
1034
1035 // Simulate diagnostics starting to update.
1036 let fake_server = fake_servers.next().await.unwrap();
1037 fake_server.start_progress(progress_token).await;
1038
1039 // Restart the server before the diagnostics finish updating.
1040 project.update(cx, |project, cx| {
1041 project.restart_language_servers_for_buffers([buffer], cx);
1042 });
1043 let mut events = subscribe(&project, cx);
1044
1045 // Simulate the newly started server sending more diagnostics.
1046 let fake_server = fake_servers.next().await.unwrap();
1047 assert_eq!(
1048 events.next().await.unwrap(),
1049 Event::LanguageServerAdded(LanguageServerId(1))
1050 );
1051 fake_server.start_progress(progress_token).await;
1052 assert_eq!(
1053 events.next().await.unwrap(),
1054 Event::DiskBasedDiagnosticsStarted {
1055 language_server_id: LanguageServerId(1)
1056 }
1057 );
1058 project.read_with(cx, |project, _| {
1059 assert_eq!(
1060 project
1061 .language_servers_running_disk_based_diagnostics()
1062 .collect::<Vec<_>>(),
1063 [LanguageServerId(1)]
1064 );
1065 });
1066
1067 // All diagnostics are considered done, despite the old server's diagnostic
1068 // task never completing.
1069 fake_server.end_progress(progress_token);
1070 assert_eq!(
1071 events.next().await.unwrap(),
1072 Event::DiskBasedDiagnosticsFinished {
1073 language_server_id: LanguageServerId(1)
1074 }
1075 );
1076 project.read_with(cx, |project, _| {
1077 assert_eq!(
1078 project
1079 .language_servers_running_disk_based_diagnostics()
1080 .collect::<Vec<_>>(),
1081 [LanguageServerId(0); 0]
1082 );
1083 });
1084}
1085
1086#[gpui::test]
1087async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1088 init_test(cx);
1089
1090 let mut language = Language::new(
1091 LanguageConfig {
1092 path_suffixes: vec!["rs".to_string()],
1093 ..Default::default()
1094 },
1095 None,
1096 );
1097 let mut fake_servers = language
1098 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1099 ..Default::default()
1100 }))
1101 .await;
1102
1103 let fs = FakeFs::new(cx.background());
1104 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1105
1106 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1107 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1108
1109 let buffer = project
1110 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1111 .await
1112 .unwrap();
1113
1114 // Publish diagnostics
1115 let fake_server = fake_servers.next().await.unwrap();
1116 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1117 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1118 version: None,
1119 diagnostics: vec![lsp::Diagnostic {
1120 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1121 severity: Some(lsp::DiagnosticSeverity::ERROR),
1122 message: "the message".to_string(),
1123 ..Default::default()
1124 }],
1125 });
1126
1127 cx.foreground().run_until_parked();
1128 buffer.read_with(cx, |buffer, _| {
1129 assert_eq!(
1130 buffer
1131 .snapshot()
1132 .diagnostics_in_range::<_, usize>(0..1, false)
1133 .map(|entry| entry.diagnostic.message.clone())
1134 .collect::<Vec<_>>(),
1135 ["the message".to_string()]
1136 );
1137 });
1138 project.read_with(cx, |project, cx| {
1139 assert_eq!(
1140 project.diagnostic_summary(cx),
1141 DiagnosticSummary {
1142 error_count: 1,
1143 warning_count: 0,
1144 }
1145 );
1146 });
1147
1148 project.update(cx, |project, cx| {
1149 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1150 });
1151
1152 // The diagnostics are cleared.
1153 cx.foreground().run_until_parked();
1154 buffer.read_with(cx, |buffer, _| {
1155 assert_eq!(
1156 buffer
1157 .snapshot()
1158 .diagnostics_in_range::<_, usize>(0..1, false)
1159 .map(|entry| entry.diagnostic.message.clone())
1160 .collect::<Vec<_>>(),
1161 Vec::<String>::new(),
1162 );
1163 });
1164 project.read_with(cx, |project, cx| {
1165 assert_eq!(
1166 project.diagnostic_summary(cx),
1167 DiagnosticSummary {
1168 error_count: 0,
1169 warning_count: 0,
1170 }
1171 );
1172 });
1173}
1174
1175#[gpui::test]
1176async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178
1179 let mut language = Language::new(
1180 LanguageConfig {
1181 path_suffixes: vec!["rs".to_string()],
1182 ..Default::default()
1183 },
1184 None,
1185 );
1186 let mut fake_servers = language
1187 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1188 name: "the-lsp",
1189 ..Default::default()
1190 }))
1191 .await;
1192
1193 let fs = FakeFs::new(cx.background());
1194 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1195
1196 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1197 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1198
1199 let buffer = project
1200 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1201 .await
1202 .unwrap();
1203
1204 // Before restarting the server, report diagnostics with an unknown buffer version.
1205 let fake_server = fake_servers.next().await.unwrap();
1206 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1207 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1208 version: Some(10000),
1209 diagnostics: Vec::new(),
1210 });
1211 cx.foreground().run_until_parked();
1212
1213 project.update(cx, |project, cx| {
1214 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1215 });
1216 let mut fake_server = fake_servers.next().await.unwrap();
1217 let notification = fake_server
1218 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1219 .await
1220 .text_document;
1221 assert_eq!(notification.version, 0);
1222}
1223
1224#[gpui::test]
1225async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1226 init_test(cx);
1227
1228 let mut rust = Language::new(
1229 LanguageConfig {
1230 name: Arc::from("Rust"),
1231 path_suffixes: vec!["rs".to_string()],
1232 ..Default::default()
1233 },
1234 None,
1235 );
1236 let mut fake_rust_servers = rust
1237 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1238 name: "rust-lsp",
1239 ..Default::default()
1240 }))
1241 .await;
1242 let mut js = Language::new(
1243 LanguageConfig {
1244 name: Arc::from("JavaScript"),
1245 path_suffixes: vec!["js".to_string()],
1246 ..Default::default()
1247 },
1248 None,
1249 );
1250 let mut fake_js_servers = js
1251 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1252 name: "js-lsp",
1253 ..Default::default()
1254 }))
1255 .await;
1256
1257 let fs = FakeFs::new(cx.background());
1258 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1259 .await;
1260
1261 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1262 project.update(cx, |project, _| {
1263 project.languages.add(Arc::new(rust));
1264 project.languages.add(Arc::new(js));
1265 });
1266
1267 let _rs_buffer = project
1268 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1269 .await
1270 .unwrap();
1271 let _js_buffer = project
1272 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1273 .await
1274 .unwrap();
1275
1276 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1277 assert_eq!(
1278 fake_rust_server_1
1279 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1280 .await
1281 .text_document
1282 .uri
1283 .as_str(),
1284 "file:///dir/a.rs"
1285 );
1286
1287 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1288 assert_eq!(
1289 fake_js_server
1290 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1291 .await
1292 .text_document
1293 .uri
1294 .as_str(),
1295 "file:///dir/b.js"
1296 );
1297
1298 // Disable Rust language server, ensuring only that server gets stopped.
1299 cx.update(|cx| {
1300 cx.update_global(|settings: &mut SettingsStore, cx| {
1301 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1302 settings.languages.insert(
1303 Arc::from("Rust"),
1304 LanguageSettingsContent {
1305 enable_language_server: Some(false),
1306 ..Default::default()
1307 },
1308 );
1309 });
1310 })
1311 });
1312 fake_rust_server_1
1313 .receive_notification::<lsp::notification::Exit>()
1314 .await;
1315
1316 // Enable Rust and disable JavaScript language servers, ensuring that the
1317 // former gets started again and that the latter stops.
1318 cx.update(|cx| {
1319 cx.update_global(|settings: &mut SettingsStore, cx| {
1320 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1321 settings.languages.insert(
1322 Arc::from("Rust"),
1323 LanguageSettingsContent {
1324 enable_language_server: Some(true),
1325 ..Default::default()
1326 },
1327 );
1328 settings.languages.insert(
1329 Arc::from("JavaScript"),
1330 LanguageSettingsContent {
1331 enable_language_server: Some(false),
1332 ..Default::default()
1333 },
1334 );
1335 });
1336 })
1337 });
1338 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1339 assert_eq!(
1340 fake_rust_server_2
1341 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1342 .await
1343 .text_document
1344 .uri
1345 .as_str(),
1346 "file:///dir/a.rs"
1347 );
1348 fake_js_server
1349 .receive_notification::<lsp::notification::Exit>()
1350 .await;
1351}
1352
1353#[gpui::test(iterations = 3)]
1354async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1355 init_test(cx);
1356
1357 let mut language = Language::new(
1358 LanguageConfig {
1359 name: "Rust".into(),
1360 path_suffixes: vec!["rs".to_string()],
1361 ..Default::default()
1362 },
1363 Some(tree_sitter_rust::language()),
1364 );
1365 let mut fake_servers = language
1366 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1367 disk_based_diagnostics_sources: vec!["disk".into()],
1368 ..Default::default()
1369 }))
1370 .await;
1371
1372 let text = "
1373 fn a() { A }
1374 fn b() { BB }
1375 fn c() { CCC }
1376 "
1377 .unindent();
1378
1379 let fs = FakeFs::new(cx.background());
1380 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1381
1382 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1383 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1384
1385 let buffer = project
1386 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1387 .await
1388 .unwrap();
1389
1390 let mut fake_server = fake_servers.next().await.unwrap();
1391 let open_notification = fake_server
1392 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1393 .await;
1394
1395 // Edit the buffer, moving the content down
1396 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1397 let change_notification_1 = fake_server
1398 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1399 .await;
1400 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1401
1402 // Report some diagnostics for the initial version of the buffer
1403 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1404 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1405 version: Some(open_notification.text_document.version),
1406 diagnostics: vec![
1407 lsp::Diagnostic {
1408 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1409 severity: Some(DiagnosticSeverity::ERROR),
1410 message: "undefined variable 'A'".to_string(),
1411 source: Some("disk".to_string()),
1412 ..Default::default()
1413 },
1414 lsp::Diagnostic {
1415 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1416 severity: Some(DiagnosticSeverity::ERROR),
1417 message: "undefined variable 'BB'".to_string(),
1418 source: Some("disk".to_string()),
1419 ..Default::default()
1420 },
1421 lsp::Diagnostic {
1422 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1423 severity: Some(DiagnosticSeverity::ERROR),
1424 source: Some("disk".to_string()),
1425 message: "undefined variable 'CCC'".to_string(),
1426 ..Default::default()
1427 },
1428 ],
1429 });
1430
1431 // The diagnostics have moved down since they were created.
1432 buffer.next_notification(cx).await;
1433 cx.foreground().run_until_parked();
1434 buffer.read_with(cx, |buffer, _| {
1435 assert_eq!(
1436 buffer
1437 .snapshot()
1438 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1439 .collect::<Vec<_>>(),
1440 &[
1441 DiagnosticEntry {
1442 range: Point::new(3, 9)..Point::new(3, 11),
1443 diagnostic: Diagnostic {
1444 source: Some("disk".into()),
1445 severity: DiagnosticSeverity::ERROR,
1446 message: "undefined variable 'BB'".to_string(),
1447 is_disk_based: true,
1448 group_id: 1,
1449 is_primary: true,
1450 ..Default::default()
1451 },
1452 },
1453 DiagnosticEntry {
1454 range: Point::new(4, 9)..Point::new(4, 12),
1455 diagnostic: Diagnostic {
1456 source: Some("disk".into()),
1457 severity: DiagnosticSeverity::ERROR,
1458 message: "undefined variable 'CCC'".to_string(),
1459 is_disk_based: true,
1460 group_id: 2,
1461 is_primary: true,
1462 ..Default::default()
1463 }
1464 }
1465 ]
1466 );
1467 assert_eq!(
1468 chunks_with_diagnostics(buffer, 0..buffer.len()),
1469 [
1470 ("\n\nfn a() { ".to_string(), None),
1471 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1472 (" }\nfn b() { ".to_string(), None),
1473 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1474 (" }\nfn c() { ".to_string(), None),
1475 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1476 (" }\n".to_string(), None),
1477 ]
1478 );
1479 assert_eq!(
1480 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1481 [
1482 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1483 (" }\nfn c() { ".to_string(), None),
1484 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1485 ]
1486 );
1487 });
1488
1489 // Ensure overlapping diagnostics are highlighted correctly.
1490 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1491 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1492 version: Some(open_notification.text_document.version),
1493 diagnostics: vec![
1494 lsp::Diagnostic {
1495 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1496 severity: Some(DiagnosticSeverity::ERROR),
1497 message: "undefined variable 'A'".to_string(),
1498 source: Some("disk".to_string()),
1499 ..Default::default()
1500 },
1501 lsp::Diagnostic {
1502 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1503 severity: Some(DiagnosticSeverity::WARNING),
1504 message: "unreachable statement".to_string(),
1505 source: Some("disk".to_string()),
1506 ..Default::default()
1507 },
1508 ],
1509 });
1510
1511 buffer.next_notification(cx).await;
1512 cx.foreground().run_until_parked();
1513 buffer.read_with(cx, |buffer, _| {
1514 assert_eq!(
1515 buffer
1516 .snapshot()
1517 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1518 .collect::<Vec<_>>(),
1519 &[
1520 DiagnosticEntry {
1521 range: Point::new(2, 9)..Point::new(2, 12),
1522 diagnostic: Diagnostic {
1523 source: Some("disk".into()),
1524 severity: DiagnosticSeverity::WARNING,
1525 message: "unreachable statement".to_string(),
1526 is_disk_based: true,
1527 group_id: 4,
1528 is_primary: true,
1529 ..Default::default()
1530 }
1531 },
1532 DiagnosticEntry {
1533 range: Point::new(2, 9)..Point::new(2, 10),
1534 diagnostic: Diagnostic {
1535 source: Some("disk".into()),
1536 severity: DiagnosticSeverity::ERROR,
1537 message: "undefined variable 'A'".to_string(),
1538 is_disk_based: true,
1539 group_id: 3,
1540 is_primary: true,
1541 ..Default::default()
1542 },
1543 }
1544 ]
1545 );
1546 assert_eq!(
1547 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1548 [
1549 ("fn a() { ".to_string(), None),
1550 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1551 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1552 ("\n".to_string(), None),
1553 ]
1554 );
1555 assert_eq!(
1556 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1557 [
1558 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1559 ("\n".to_string(), None),
1560 ]
1561 );
1562 });
1563
1564 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1565 // changes since the last save.
1566 buffer.update(cx, |buffer, cx| {
1567 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1568 buffer.edit(
1569 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1570 None,
1571 cx,
1572 );
1573 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1574 });
1575 let change_notification_2 = fake_server
1576 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1577 .await;
1578 assert!(
1579 change_notification_2.text_document.version > change_notification_1.text_document.version
1580 );
1581
1582 // Handle out-of-order diagnostics
1583 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1584 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1585 version: Some(change_notification_2.text_document.version),
1586 diagnostics: vec![
1587 lsp::Diagnostic {
1588 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1589 severity: Some(DiagnosticSeverity::ERROR),
1590 message: "undefined variable 'BB'".to_string(),
1591 source: Some("disk".to_string()),
1592 ..Default::default()
1593 },
1594 lsp::Diagnostic {
1595 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1596 severity: Some(DiagnosticSeverity::WARNING),
1597 message: "undefined variable 'A'".to_string(),
1598 source: Some("disk".to_string()),
1599 ..Default::default()
1600 },
1601 ],
1602 });
1603
1604 buffer.next_notification(cx).await;
1605 cx.foreground().run_until_parked();
1606 buffer.read_with(cx, |buffer, _| {
1607 assert_eq!(
1608 buffer
1609 .snapshot()
1610 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1611 .collect::<Vec<_>>(),
1612 &[
1613 DiagnosticEntry {
1614 range: Point::new(2, 21)..Point::new(2, 22),
1615 diagnostic: Diagnostic {
1616 source: Some("disk".into()),
1617 severity: DiagnosticSeverity::WARNING,
1618 message: "undefined variable 'A'".to_string(),
1619 is_disk_based: true,
1620 group_id: 6,
1621 is_primary: true,
1622 ..Default::default()
1623 }
1624 },
1625 DiagnosticEntry {
1626 range: Point::new(3, 9)..Point::new(3, 14),
1627 diagnostic: Diagnostic {
1628 source: Some("disk".into()),
1629 severity: DiagnosticSeverity::ERROR,
1630 message: "undefined variable 'BB'".to_string(),
1631 is_disk_based: true,
1632 group_id: 5,
1633 is_primary: true,
1634 ..Default::default()
1635 },
1636 }
1637 ]
1638 );
1639 });
1640}
1641
1642#[gpui::test]
1643async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1644 init_test(cx);
1645
1646 let text = concat!(
1647 "let one = ;\n", //
1648 "let two = \n",
1649 "let three = 3;\n",
1650 );
1651
1652 let fs = FakeFs::new(cx.background());
1653 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1654
1655 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1656 let buffer = project
1657 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1658 .await
1659 .unwrap();
1660
1661 project.update(cx, |project, cx| {
1662 project
1663 .update_buffer_diagnostics(
1664 &buffer,
1665 LanguageServerId(0),
1666 None,
1667 vec![
1668 DiagnosticEntry {
1669 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1670 diagnostic: Diagnostic {
1671 severity: DiagnosticSeverity::ERROR,
1672 message: "syntax error 1".to_string(),
1673 ..Default::default()
1674 },
1675 },
1676 DiagnosticEntry {
1677 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1678 diagnostic: Diagnostic {
1679 severity: DiagnosticSeverity::ERROR,
1680 message: "syntax error 2".to_string(),
1681 ..Default::default()
1682 },
1683 },
1684 ],
1685 cx,
1686 )
1687 .unwrap();
1688 });
1689
1690 // An empty range is extended forward to include the following character.
1691 // At the end of a line, an empty range is extended backward to include
1692 // the preceding character.
1693 buffer.read_with(cx, |buffer, _| {
1694 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1695 assert_eq!(
1696 chunks
1697 .iter()
1698 .map(|(s, d)| (s.as_str(), *d))
1699 .collect::<Vec<_>>(),
1700 &[
1701 ("let one = ", None),
1702 (";", Some(DiagnosticSeverity::ERROR)),
1703 ("\nlet two =", None),
1704 (" ", Some(DiagnosticSeverity::ERROR)),
1705 ("\nlet three = 3;\n", None)
1706 ]
1707 );
1708 });
1709}
1710
1711#[gpui::test]
1712async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1713 init_test(cx);
1714
1715 let fs = FakeFs::new(cx.background());
1716 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1717 .await;
1718
1719 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1720
1721 project.update(cx, |project, cx| {
1722 project
1723 .update_diagnostic_entries(
1724 LanguageServerId(0),
1725 Path::new("/dir/a.rs").to_owned(),
1726 None,
1727 vec![DiagnosticEntry {
1728 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1729 diagnostic: Diagnostic {
1730 severity: DiagnosticSeverity::ERROR,
1731 is_primary: true,
1732 message: "syntax error a1".to_string(),
1733 ..Default::default()
1734 },
1735 }],
1736 cx,
1737 )
1738 .unwrap();
1739 project
1740 .update_diagnostic_entries(
1741 LanguageServerId(1),
1742 Path::new("/dir/a.rs").to_owned(),
1743 None,
1744 vec![DiagnosticEntry {
1745 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1746 diagnostic: Diagnostic {
1747 severity: DiagnosticSeverity::ERROR,
1748 is_primary: true,
1749 message: "syntax error b1".to_string(),
1750 ..Default::default()
1751 },
1752 }],
1753 cx,
1754 )
1755 .unwrap();
1756
1757 assert_eq!(
1758 project.diagnostic_summary(cx),
1759 DiagnosticSummary {
1760 error_count: 2,
1761 warning_count: 0,
1762 }
1763 );
1764 });
1765}
1766
1767#[gpui::test]
1768async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1769 init_test(cx);
1770
1771 let mut language = Language::new(
1772 LanguageConfig {
1773 name: "Rust".into(),
1774 path_suffixes: vec!["rs".to_string()],
1775 ..Default::default()
1776 },
1777 Some(tree_sitter_rust::language()),
1778 );
1779 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1780
1781 let text = "
1782 fn a() {
1783 f1();
1784 }
1785 fn b() {
1786 f2();
1787 }
1788 fn c() {
1789 f3();
1790 }
1791 "
1792 .unindent();
1793
1794 let fs = FakeFs::new(cx.background());
1795 fs.insert_tree(
1796 "/dir",
1797 json!({
1798 "a.rs": text.clone(),
1799 }),
1800 )
1801 .await;
1802
1803 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1804 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1805 let buffer = project
1806 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1807 .await
1808 .unwrap();
1809
1810 let mut fake_server = fake_servers.next().await.unwrap();
1811 let lsp_document_version = fake_server
1812 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1813 .await
1814 .text_document
1815 .version;
1816
1817 // Simulate editing the buffer after the language server computes some edits.
1818 buffer.update(cx, |buffer, cx| {
1819 buffer.edit(
1820 [(
1821 Point::new(0, 0)..Point::new(0, 0),
1822 "// above first function\n",
1823 )],
1824 None,
1825 cx,
1826 );
1827 buffer.edit(
1828 [(
1829 Point::new(2, 0)..Point::new(2, 0),
1830 " // inside first function\n",
1831 )],
1832 None,
1833 cx,
1834 );
1835 buffer.edit(
1836 [(
1837 Point::new(6, 4)..Point::new(6, 4),
1838 "// inside second function ",
1839 )],
1840 None,
1841 cx,
1842 );
1843
1844 assert_eq!(
1845 buffer.text(),
1846 "
1847 // above first function
1848 fn a() {
1849 // inside first function
1850 f1();
1851 }
1852 fn b() {
1853 // inside second function f2();
1854 }
1855 fn c() {
1856 f3();
1857 }
1858 "
1859 .unindent()
1860 );
1861 });
1862
1863 let edits = project
1864 .update(cx, |project, cx| {
1865 project.edits_from_lsp(
1866 &buffer,
1867 vec![
1868 // replace body of first function
1869 lsp::TextEdit {
1870 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1871 new_text: "
1872 fn a() {
1873 f10();
1874 }
1875 "
1876 .unindent(),
1877 },
1878 // edit inside second function
1879 lsp::TextEdit {
1880 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1881 new_text: "00".into(),
1882 },
1883 // edit inside third function via two distinct edits
1884 lsp::TextEdit {
1885 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1886 new_text: "4000".into(),
1887 },
1888 lsp::TextEdit {
1889 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1890 new_text: "".into(),
1891 },
1892 ],
1893 LanguageServerId(0),
1894 Some(lsp_document_version),
1895 cx,
1896 )
1897 })
1898 .await
1899 .unwrap();
1900
1901 buffer.update(cx, |buffer, cx| {
1902 for (range, new_text) in edits {
1903 buffer.edit([(range, new_text)], None, cx);
1904 }
1905 assert_eq!(
1906 buffer.text(),
1907 "
1908 // above first function
1909 fn a() {
1910 // inside first function
1911 f10();
1912 }
1913 fn b() {
1914 // inside second function f200();
1915 }
1916 fn c() {
1917 f4000();
1918 }
1919 "
1920 .unindent()
1921 );
1922 });
1923}
1924
1925#[gpui::test]
1926async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1927 init_test(cx);
1928
1929 let text = "
1930 use a::b;
1931 use a::c;
1932
1933 fn f() {
1934 b();
1935 c();
1936 }
1937 "
1938 .unindent();
1939
1940 let fs = FakeFs::new(cx.background());
1941 fs.insert_tree(
1942 "/dir",
1943 json!({
1944 "a.rs": text.clone(),
1945 }),
1946 )
1947 .await;
1948
1949 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1950 let buffer = project
1951 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1952 .await
1953 .unwrap();
1954
1955 // Simulate the language server sending us a small edit in the form of a very large diff.
1956 // Rust-analyzer does this when performing a merge-imports code action.
1957 let edits = project
1958 .update(cx, |project, cx| {
1959 project.edits_from_lsp(
1960 &buffer,
1961 [
1962 // Replace the first use statement without editing the semicolon.
1963 lsp::TextEdit {
1964 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1965 new_text: "a::{b, c}".into(),
1966 },
1967 // Reinsert the remainder of the file between the semicolon and the final
1968 // newline of the file.
1969 lsp::TextEdit {
1970 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1971 new_text: "\n\n".into(),
1972 },
1973 lsp::TextEdit {
1974 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1975 new_text: "
1976 fn f() {
1977 b();
1978 c();
1979 }"
1980 .unindent(),
1981 },
1982 // Delete everything after the first newline of the file.
1983 lsp::TextEdit {
1984 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1985 new_text: "".into(),
1986 },
1987 ],
1988 LanguageServerId(0),
1989 None,
1990 cx,
1991 )
1992 })
1993 .await
1994 .unwrap();
1995
1996 buffer.update(cx, |buffer, cx| {
1997 let edits = edits
1998 .into_iter()
1999 .map(|(range, text)| {
2000 (
2001 range.start.to_point(buffer)..range.end.to_point(buffer),
2002 text,
2003 )
2004 })
2005 .collect::<Vec<_>>();
2006
2007 assert_eq!(
2008 edits,
2009 [
2010 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2011 (Point::new(1, 0)..Point::new(2, 0), "".into())
2012 ]
2013 );
2014
2015 for (range, new_text) in edits {
2016 buffer.edit([(range, new_text)], None, cx);
2017 }
2018 assert_eq!(
2019 buffer.text(),
2020 "
2021 use a::{b, c};
2022
2023 fn f() {
2024 b();
2025 c();
2026 }
2027 "
2028 .unindent()
2029 );
2030 });
2031}
2032
2033#[gpui::test]
2034async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
2035 init_test(cx);
2036
2037 let text = "
2038 use a::b;
2039 use a::c;
2040
2041 fn f() {
2042 b();
2043 c();
2044 }
2045 "
2046 .unindent();
2047
2048 let fs = FakeFs::new(cx.background());
2049 fs.insert_tree(
2050 "/dir",
2051 json!({
2052 "a.rs": text.clone(),
2053 }),
2054 )
2055 .await;
2056
2057 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2058 let buffer = project
2059 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2060 .await
2061 .unwrap();
2062
2063 // Simulate the language server sending us edits in a non-ordered fashion,
2064 // with ranges sometimes being inverted or pointing to invalid locations.
2065 let edits = project
2066 .update(cx, |project, cx| {
2067 project.edits_from_lsp(
2068 &buffer,
2069 [
2070 lsp::TextEdit {
2071 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2072 new_text: "\n\n".into(),
2073 },
2074 lsp::TextEdit {
2075 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2076 new_text: "a::{b, c}".into(),
2077 },
2078 lsp::TextEdit {
2079 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2080 new_text: "".into(),
2081 },
2082 lsp::TextEdit {
2083 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2084 new_text: "
2085 fn f() {
2086 b();
2087 c();
2088 }"
2089 .unindent(),
2090 },
2091 ],
2092 LanguageServerId(0),
2093 None,
2094 cx,
2095 )
2096 })
2097 .await
2098 .unwrap();
2099
2100 buffer.update(cx, |buffer, cx| {
2101 let edits = edits
2102 .into_iter()
2103 .map(|(range, text)| {
2104 (
2105 range.start.to_point(buffer)..range.end.to_point(buffer),
2106 text,
2107 )
2108 })
2109 .collect::<Vec<_>>();
2110
2111 assert_eq!(
2112 edits,
2113 [
2114 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2115 (Point::new(1, 0)..Point::new(2, 0), "".into())
2116 ]
2117 );
2118
2119 for (range, new_text) in edits {
2120 buffer.edit([(range, new_text)], None, cx);
2121 }
2122 assert_eq!(
2123 buffer.text(),
2124 "
2125 use a::{b, c};
2126
2127 fn f() {
2128 b();
2129 c();
2130 }
2131 "
2132 .unindent()
2133 );
2134 });
2135}
2136
2137fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2138 buffer: &Buffer,
2139 range: Range<T>,
2140) -> Vec<(String, Option<DiagnosticSeverity>)> {
2141 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2142 for chunk in buffer.snapshot().chunks(range, true) {
2143 if chunks.last().map_or(false, |prev_chunk| {
2144 prev_chunk.1 == chunk.diagnostic_severity
2145 }) {
2146 chunks.last_mut().unwrap().0.push_str(chunk.text);
2147 } else {
2148 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2149 }
2150 }
2151 chunks
2152}
2153
2154#[gpui::test(iterations = 10)]
2155async fn test_definition(cx: &mut gpui::TestAppContext) {
2156 init_test(cx);
2157
2158 let mut language = Language::new(
2159 LanguageConfig {
2160 name: "Rust".into(),
2161 path_suffixes: vec!["rs".to_string()],
2162 ..Default::default()
2163 },
2164 Some(tree_sitter_rust::language()),
2165 );
2166 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2167
2168 let fs = FakeFs::new(cx.background());
2169 fs.insert_tree(
2170 "/dir",
2171 json!({
2172 "a.rs": "const fn a() { A }",
2173 "b.rs": "const y: i32 = crate::a()",
2174 }),
2175 )
2176 .await;
2177
2178 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2179 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2180
2181 let buffer = project
2182 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2183 .await
2184 .unwrap();
2185
2186 let fake_server = fake_servers.next().await.unwrap();
2187 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2188 let params = params.text_document_position_params;
2189 assert_eq!(
2190 params.text_document.uri.to_file_path().unwrap(),
2191 Path::new("/dir/b.rs"),
2192 );
2193 assert_eq!(params.position, lsp::Position::new(0, 22));
2194
2195 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2196 lsp::Location::new(
2197 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2198 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2199 ),
2200 )))
2201 });
2202
2203 let mut definitions = project
2204 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2205 .await
2206 .unwrap();
2207
2208 // Assert no new language server started
2209 cx.foreground().run_until_parked();
2210 assert!(fake_servers.try_next().is_err());
2211
2212 assert_eq!(definitions.len(), 1);
2213 let definition = definitions.pop().unwrap();
2214 cx.update(|cx| {
2215 let target_buffer = definition.target.buffer.read(cx);
2216 assert_eq!(
2217 target_buffer
2218 .file()
2219 .unwrap()
2220 .as_local()
2221 .unwrap()
2222 .abs_path(cx),
2223 Path::new("/dir/a.rs"),
2224 );
2225 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2226 assert_eq!(
2227 list_worktrees(&project, cx),
2228 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2229 );
2230
2231 drop(definition);
2232 });
2233 cx.read(|cx| {
2234 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2235 });
2236
2237 fn list_worktrees<'a>(
2238 project: &'a ModelHandle<Project>,
2239 cx: &'a AppContext,
2240 ) -> Vec<(&'a Path, bool)> {
2241 project
2242 .read(cx)
2243 .worktrees(cx)
2244 .map(|worktree| {
2245 let worktree = worktree.read(cx);
2246 (
2247 worktree.as_local().unwrap().abs_path().as_ref(),
2248 worktree.is_visible(),
2249 )
2250 })
2251 .collect::<Vec<_>>()
2252 }
2253}
2254
2255#[gpui::test]
2256async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2257 init_test(cx);
2258
2259 let mut language = Language::new(
2260 LanguageConfig {
2261 name: "TypeScript".into(),
2262 path_suffixes: vec!["ts".to_string()],
2263 ..Default::default()
2264 },
2265 Some(tree_sitter_typescript::language_typescript()),
2266 );
2267 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2268
2269 let fs = FakeFs::new(cx.background());
2270 fs.insert_tree(
2271 "/dir",
2272 json!({
2273 "a.ts": "",
2274 }),
2275 )
2276 .await;
2277
2278 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2279 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2280 let buffer = project
2281 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2282 .await
2283 .unwrap();
2284
2285 let fake_server = fake_language_servers.next().await.unwrap();
2286
2287 let text = "let a = b.fqn";
2288 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2289 let completions = project.update(cx, |project, cx| {
2290 project.completions(&buffer, text.len(), cx)
2291 });
2292
2293 fake_server
2294 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2295 Ok(Some(lsp::CompletionResponse::Array(vec![
2296 lsp::CompletionItem {
2297 label: "fullyQualifiedName?".into(),
2298 insert_text: Some("fullyQualifiedName".into()),
2299 ..Default::default()
2300 },
2301 ])))
2302 })
2303 .next()
2304 .await;
2305 let completions = completions.await.unwrap();
2306 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2307 assert_eq!(completions.len(), 1);
2308 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2309 assert_eq!(
2310 completions[0].old_range.to_offset(&snapshot),
2311 text.len() - 3..text.len()
2312 );
2313
2314 let text = "let a = \"atoms/cmp\"";
2315 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2316 let completions = project.update(cx, |project, cx| {
2317 project.completions(&buffer, text.len() - 1, cx)
2318 });
2319
2320 fake_server
2321 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2322 Ok(Some(lsp::CompletionResponse::Array(vec![
2323 lsp::CompletionItem {
2324 label: "component".into(),
2325 ..Default::default()
2326 },
2327 ])))
2328 })
2329 .next()
2330 .await;
2331 let completions = completions.await.unwrap();
2332 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2333 assert_eq!(completions.len(), 1);
2334 assert_eq!(completions[0].new_text, "component");
2335 assert_eq!(
2336 completions[0].old_range.to_offset(&snapshot),
2337 text.len() - 4..text.len() - 1
2338 );
2339}
2340
2341#[gpui::test]
2342async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2343 init_test(cx);
2344
2345 let mut language = Language::new(
2346 LanguageConfig {
2347 name: "TypeScript".into(),
2348 path_suffixes: vec!["ts".to_string()],
2349 ..Default::default()
2350 },
2351 Some(tree_sitter_typescript::language_typescript()),
2352 );
2353 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2354
2355 let fs = FakeFs::new(cx.background());
2356 fs.insert_tree(
2357 "/dir",
2358 json!({
2359 "a.ts": "",
2360 }),
2361 )
2362 .await;
2363
2364 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2365 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2366 let buffer = project
2367 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2368 .await
2369 .unwrap();
2370
2371 let fake_server = fake_language_servers.next().await.unwrap();
2372
2373 let text = "let a = b.fqn";
2374 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2375 let completions = project.update(cx, |project, cx| {
2376 project.completions(&buffer, text.len(), cx)
2377 });
2378
2379 fake_server
2380 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2381 Ok(Some(lsp::CompletionResponse::Array(vec![
2382 lsp::CompletionItem {
2383 label: "fullyQualifiedName?".into(),
2384 insert_text: Some("fully\rQualified\r\nName".into()),
2385 ..Default::default()
2386 },
2387 ])))
2388 })
2389 .next()
2390 .await;
2391 let completions = completions.await.unwrap();
2392 assert_eq!(completions.len(), 1);
2393 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2394}
2395
2396#[gpui::test(iterations = 10)]
2397async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2398 init_test(cx);
2399
2400 let mut language = Language::new(
2401 LanguageConfig {
2402 name: "TypeScript".into(),
2403 path_suffixes: vec!["ts".to_string()],
2404 ..Default::default()
2405 },
2406 None,
2407 );
2408 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2409
2410 let fs = FakeFs::new(cx.background());
2411 fs.insert_tree(
2412 "/dir",
2413 json!({
2414 "a.ts": "a",
2415 }),
2416 )
2417 .await;
2418
2419 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2420 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2421 let buffer = project
2422 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2423 .await
2424 .unwrap();
2425
2426 let fake_server = fake_language_servers.next().await.unwrap();
2427
2428 // Language server returns code actions that contain commands, and not edits.
2429 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2430 fake_server
2431 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2432 Ok(Some(vec![
2433 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2434 title: "The code action".into(),
2435 command: Some(lsp::Command {
2436 title: "The command".into(),
2437 command: "_the/command".into(),
2438 arguments: Some(vec![json!("the-argument")]),
2439 }),
2440 ..Default::default()
2441 }),
2442 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2443 title: "two".into(),
2444 ..Default::default()
2445 }),
2446 ]))
2447 })
2448 .next()
2449 .await;
2450
2451 let action = actions.await.unwrap()[0].clone();
2452 let apply = project.update(cx, |project, cx| {
2453 project.apply_code_action(buffer.clone(), action, true, cx)
2454 });
2455
2456 // Resolving the code action does not populate its edits. In absence of
2457 // edits, we must execute the given command.
2458 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2459 |action, _| async move { Ok(action) },
2460 );
2461
2462 // While executing the command, the language server sends the editor
2463 // a `workspaceEdit` request.
2464 fake_server
2465 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2466 let fake = fake_server.clone();
2467 move |params, _| {
2468 assert_eq!(params.command, "_the/command");
2469 let fake = fake.clone();
2470 async move {
2471 fake.server
2472 .request::<lsp::request::ApplyWorkspaceEdit>(
2473 lsp::ApplyWorkspaceEditParams {
2474 label: None,
2475 edit: lsp::WorkspaceEdit {
2476 changes: Some(
2477 [(
2478 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2479 vec![lsp::TextEdit {
2480 range: lsp::Range::new(
2481 lsp::Position::new(0, 0),
2482 lsp::Position::new(0, 0),
2483 ),
2484 new_text: "X".into(),
2485 }],
2486 )]
2487 .into_iter()
2488 .collect(),
2489 ),
2490 ..Default::default()
2491 },
2492 },
2493 )
2494 .await
2495 .unwrap();
2496 Ok(Some(json!(null)))
2497 }
2498 }
2499 })
2500 .next()
2501 .await;
2502
2503 // Applying the code action returns a project transaction containing the edits
2504 // sent by the language server in its `workspaceEdit` request.
2505 let transaction = apply.await.unwrap();
2506 assert!(transaction.0.contains_key(&buffer));
2507 buffer.update(cx, |buffer, cx| {
2508 assert_eq!(buffer.text(), "Xa");
2509 buffer.undo(cx);
2510 assert_eq!(buffer.text(), "a");
2511 });
2512}
2513
2514#[gpui::test(iterations = 10)]
2515async fn test_save_file(cx: &mut gpui::TestAppContext) {
2516 init_test(cx);
2517
2518 let fs = FakeFs::new(cx.background());
2519 fs.insert_tree(
2520 "/dir",
2521 json!({
2522 "file1": "the old contents",
2523 }),
2524 )
2525 .await;
2526
2527 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2528 let buffer = project
2529 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2530 .await
2531 .unwrap();
2532 buffer.update(cx, |buffer, cx| {
2533 assert_eq!(buffer.text(), "the old contents");
2534 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2535 });
2536
2537 project
2538 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2539 .await
2540 .unwrap();
2541
2542 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2543 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2544}
2545
2546#[gpui::test]
2547async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let fs = FakeFs::new(cx.background());
2551 fs.insert_tree(
2552 "/dir",
2553 json!({
2554 "file1": "the old contents",
2555 }),
2556 )
2557 .await;
2558
2559 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2560 let buffer = project
2561 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2562 .await
2563 .unwrap();
2564 buffer.update(cx, |buffer, cx| {
2565 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2566 });
2567
2568 project
2569 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2570 .await
2571 .unwrap();
2572
2573 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2574 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2575}
2576
2577#[gpui::test]
2578async fn test_save_as(cx: &mut gpui::TestAppContext) {
2579 init_test(cx);
2580
2581 let fs = FakeFs::new(cx.background());
2582 fs.insert_tree("/dir", json!({})).await;
2583
2584 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2585
2586 let languages = project.read_with(cx, |project, _| project.languages().clone());
2587 languages.register(
2588 "/some/path",
2589 LanguageConfig {
2590 name: "Rust".into(),
2591 path_suffixes: vec!["rs".into()],
2592 ..Default::default()
2593 },
2594 tree_sitter_rust::language(),
2595 vec![],
2596 |_| Default::default(),
2597 );
2598
2599 let buffer = project.update(cx, |project, cx| {
2600 project.create_buffer("", None, cx).unwrap()
2601 });
2602 buffer.update(cx, |buffer, cx| {
2603 buffer.edit([(0..0, "abc")], None, cx);
2604 assert!(buffer.is_dirty());
2605 assert!(!buffer.has_conflict());
2606 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2607 });
2608 project
2609 .update(cx, |project, cx| {
2610 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2611 })
2612 .await
2613 .unwrap();
2614 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2615
2616 cx.foreground().run_until_parked();
2617 buffer.read_with(cx, |buffer, cx| {
2618 assert_eq!(
2619 buffer.file().unwrap().full_path(cx),
2620 Path::new("dir/file1.rs")
2621 );
2622 assert!(!buffer.is_dirty());
2623 assert!(!buffer.has_conflict());
2624 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2625 });
2626
2627 let opened_buffer = project
2628 .update(cx, |project, cx| {
2629 project.open_local_buffer("/dir/file1.rs", cx)
2630 })
2631 .await
2632 .unwrap();
2633 assert_eq!(opened_buffer, buffer);
2634}
2635
2636#[gpui::test(retries = 5)]
2637async fn test_rescan_and_remote_updates(
2638 deterministic: Arc<Deterministic>,
2639 cx: &mut gpui::TestAppContext,
2640) {
2641 init_test(cx);
2642 cx.foreground().allow_parking();
2643
2644 let dir = temp_tree(json!({
2645 "a": {
2646 "file1": "",
2647 "file2": "",
2648 "file3": "",
2649 },
2650 "b": {
2651 "c": {
2652 "file4": "",
2653 "file5": "",
2654 }
2655 }
2656 }));
2657
2658 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2659 let rpc = project.read_with(cx, |p, _| p.client.clone());
2660
2661 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2662 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2663 async move { buffer.await.unwrap() }
2664 };
2665 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2666 project.read_with(cx, |project, cx| {
2667 let tree = project.worktrees(cx).next().unwrap();
2668 tree.read(cx)
2669 .entry_for_path(path)
2670 .unwrap_or_else(|| panic!("no entry for path {}", path))
2671 .id
2672 })
2673 };
2674
2675 let buffer2 = buffer_for_path("a/file2", cx).await;
2676 let buffer3 = buffer_for_path("a/file3", cx).await;
2677 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2678 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2679
2680 let file2_id = id_for_path("a/file2", cx);
2681 let file3_id = id_for_path("a/file3", cx);
2682 let file4_id = id_for_path("b/c/file4", cx);
2683
2684 // Create a remote copy of this worktree.
2685 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2686
2687 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2688
2689 let updates = Arc::new(Mutex::new(Vec::new()));
2690 tree.update(cx, |tree, cx| {
2691 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2692 let updates = updates.clone();
2693 move |update| {
2694 updates.lock().push(update);
2695 async { true }
2696 }
2697 });
2698 });
2699
2700 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2701 deterministic.run_until_parked();
2702
2703 cx.read(|cx| {
2704 assert!(!buffer2.read(cx).is_dirty());
2705 assert!(!buffer3.read(cx).is_dirty());
2706 assert!(!buffer4.read(cx).is_dirty());
2707 assert!(!buffer5.read(cx).is_dirty());
2708 });
2709
2710 // Rename and delete files and directories.
2711 tree.flush_fs_events(cx).await;
2712 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2713 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2714 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2715 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2716 tree.flush_fs_events(cx).await;
2717
2718 let expected_paths = vec![
2719 "a",
2720 "a/file1",
2721 "a/file2.new",
2722 "b",
2723 "d",
2724 "d/file3",
2725 "d/file4",
2726 ];
2727
2728 cx.read(|app| {
2729 assert_eq!(
2730 tree.read(app)
2731 .paths()
2732 .map(|p| p.to_str().unwrap())
2733 .collect::<Vec<_>>(),
2734 expected_paths
2735 );
2736
2737 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2738 assert_eq!(id_for_path("d/file3", cx), file3_id);
2739 assert_eq!(id_for_path("d/file4", cx), file4_id);
2740
2741 assert_eq!(
2742 buffer2.read(app).file().unwrap().path().as_ref(),
2743 Path::new("a/file2.new")
2744 );
2745 assert_eq!(
2746 buffer3.read(app).file().unwrap().path().as_ref(),
2747 Path::new("d/file3")
2748 );
2749 assert_eq!(
2750 buffer4.read(app).file().unwrap().path().as_ref(),
2751 Path::new("d/file4")
2752 );
2753 assert_eq!(
2754 buffer5.read(app).file().unwrap().path().as_ref(),
2755 Path::new("b/c/file5")
2756 );
2757
2758 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2759 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2760 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2761 assert!(buffer5.read(app).file().unwrap().is_deleted());
2762 });
2763
2764 // Update the remote worktree. Check that it becomes consistent with the
2765 // local worktree.
2766 deterministic.run_until_parked();
2767 remote.update(cx, |remote, _| {
2768 for update in updates.lock().drain(..) {
2769 remote.as_remote_mut().unwrap().update_from_remote(update);
2770 }
2771 });
2772 deterministic.run_until_parked();
2773 remote.read_with(cx, |remote, _| {
2774 assert_eq!(
2775 remote
2776 .paths()
2777 .map(|p| p.to_str().unwrap())
2778 .collect::<Vec<_>>(),
2779 expected_paths
2780 );
2781 });
2782}
2783
2784#[gpui::test(iterations = 10)]
2785async fn test_buffer_identity_across_renames(
2786 deterministic: Arc<Deterministic>,
2787 cx: &mut gpui::TestAppContext,
2788) {
2789 init_test(cx);
2790
2791 let fs = FakeFs::new(cx.background());
2792 fs.insert_tree(
2793 "/dir",
2794 json!({
2795 "a": {
2796 "file1": "",
2797 }
2798 }),
2799 )
2800 .await;
2801
2802 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2803 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2804 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2805
2806 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2807 project.read_with(cx, |project, cx| {
2808 let tree = project.worktrees(cx).next().unwrap();
2809 tree.read(cx)
2810 .entry_for_path(path)
2811 .unwrap_or_else(|| panic!("no entry for path {}", path))
2812 .id
2813 })
2814 };
2815
2816 let dir_id = id_for_path("a", cx);
2817 let file_id = id_for_path("a/file1", cx);
2818 let buffer = project
2819 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2820 .await
2821 .unwrap();
2822 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2823
2824 project
2825 .update(cx, |project, cx| {
2826 project.rename_entry(dir_id, Path::new("b"), cx)
2827 })
2828 .unwrap()
2829 .await
2830 .unwrap();
2831 deterministic.run_until_parked();
2832 assert_eq!(id_for_path("b", cx), dir_id);
2833 assert_eq!(id_for_path("b/file1", cx), file_id);
2834 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2835}
2836
2837#[gpui::test]
2838async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2839 init_test(cx);
2840
2841 let fs = FakeFs::new(cx.background());
2842 fs.insert_tree(
2843 "/dir",
2844 json!({
2845 "a.txt": "a-contents",
2846 "b.txt": "b-contents",
2847 }),
2848 )
2849 .await;
2850
2851 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2852
2853 // Spawn multiple tasks to open paths, repeating some paths.
2854 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2855 (
2856 p.open_local_buffer("/dir/a.txt", cx),
2857 p.open_local_buffer("/dir/b.txt", cx),
2858 p.open_local_buffer("/dir/a.txt", cx),
2859 )
2860 });
2861
2862 let buffer_a_1 = buffer_a_1.await.unwrap();
2863 let buffer_a_2 = buffer_a_2.await.unwrap();
2864 let buffer_b = buffer_b.await.unwrap();
2865 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2866 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2867
2868 // There is only one buffer per path.
2869 let buffer_a_id = buffer_a_1.id();
2870 assert_eq!(buffer_a_2.id(), buffer_a_id);
2871
2872 // Open the same path again while it is still open.
2873 drop(buffer_a_1);
2874 let buffer_a_3 = project
2875 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2876 .await
2877 .unwrap();
2878
2879 // There's still only one buffer per path.
2880 assert_eq!(buffer_a_3.id(), buffer_a_id);
2881}
2882
2883#[gpui::test]
2884async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2885 init_test(cx);
2886
2887 let fs = FakeFs::new(cx.background());
2888 fs.insert_tree(
2889 "/dir",
2890 json!({
2891 "file1": "abc",
2892 "file2": "def",
2893 "file3": "ghi",
2894 }),
2895 )
2896 .await;
2897
2898 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2899
2900 let buffer1 = project
2901 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2902 .await
2903 .unwrap();
2904 let events = Rc::new(RefCell::new(Vec::new()));
2905
2906 // initially, the buffer isn't dirty.
2907 buffer1.update(cx, |buffer, cx| {
2908 cx.subscribe(&buffer1, {
2909 let events = events.clone();
2910 move |_, _, event, _| match event {
2911 BufferEvent::Operation(_) => {}
2912 _ => events.borrow_mut().push(event.clone()),
2913 }
2914 })
2915 .detach();
2916
2917 assert!(!buffer.is_dirty());
2918 assert!(events.borrow().is_empty());
2919
2920 buffer.edit([(1..2, "")], None, cx);
2921 });
2922
2923 // after the first edit, the buffer is dirty, and emits a dirtied event.
2924 buffer1.update(cx, |buffer, cx| {
2925 assert!(buffer.text() == "ac");
2926 assert!(buffer.is_dirty());
2927 assert_eq!(
2928 *events.borrow(),
2929 &[language::Event::Edited, language::Event::DirtyChanged]
2930 );
2931 events.borrow_mut().clear();
2932 buffer.did_save(
2933 buffer.version(),
2934 buffer.as_rope().fingerprint(),
2935 buffer.file().unwrap().mtime(),
2936 cx,
2937 );
2938 });
2939
2940 // after saving, the buffer is not dirty, and emits a saved event.
2941 buffer1.update(cx, |buffer, cx| {
2942 assert!(!buffer.is_dirty());
2943 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2944 events.borrow_mut().clear();
2945
2946 buffer.edit([(1..1, "B")], None, cx);
2947 buffer.edit([(2..2, "D")], None, cx);
2948 });
2949
2950 // after editing again, the buffer is dirty, and emits another dirty event.
2951 buffer1.update(cx, |buffer, cx| {
2952 assert!(buffer.text() == "aBDc");
2953 assert!(buffer.is_dirty());
2954 assert_eq!(
2955 *events.borrow(),
2956 &[
2957 language::Event::Edited,
2958 language::Event::DirtyChanged,
2959 language::Event::Edited,
2960 ],
2961 );
2962 events.borrow_mut().clear();
2963
2964 // After restoring the buffer to its previously-saved state,
2965 // the buffer is not considered dirty anymore.
2966 buffer.edit([(1..3, "")], None, cx);
2967 assert!(buffer.text() == "ac");
2968 assert!(!buffer.is_dirty());
2969 });
2970
2971 assert_eq!(
2972 *events.borrow(),
2973 &[language::Event::Edited, language::Event::DirtyChanged]
2974 );
2975
2976 // When a file is deleted, the buffer is considered dirty.
2977 let events = Rc::new(RefCell::new(Vec::new()));
2978 let buffer2 = project
2979 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2980 .await
2981 .unwrap();
2982 buffer2.update(cx, |_, cx| {
2983 cx.subscribe(&buffer2, {
2984 let events = events.clone();
2985 move |_, _, event, _| events.borrow_mut().push(event.clone())
2986 })
2987 .detach();
2988 });
2989
2990 fs.remove_file("/dir/file2".as_ref(), Default::default())
2991 .await
2992 .unwrap();
2993 cx.foreground().run_until_parked();
2994 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2995 assert_eq!(
2996 *events.borrow(),
2997 &[
2998 language::Event::DirtyChanged,
2999 language::Event::FileHandleChanged
3000 ]
3001 );
3002
3003 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3004 let events = Rc::new(RefCell::new(Vec::new()));
3005 let buffer3 = project
3006 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3007 .await
3008 .unwrap();
3009 buffer3.update(cx, |_, cx| {
3010 cx.subscribe(&buffer3, {
3011 let events = events.clone();
3012 move |_, _, event, _| events.borrow_mut().push(event.clone())
3013 })
3014 .detach();
3015 });
3016
3017 buffer3.update(cx, |buffer, cx| {
3018 buffer.edit([(0..0, "x")], None, cx);
3019 });
3020 events.borrow_mut().clear();
3021 fs.remove_file("/dir/file3".as_ref(), Default::default())
3022 .await
3023 .unwrap();
3024 cx.foreground().run_until_parked();
3025 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3026 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3027}
3028
3029#[gpui::test]
3030async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3031 init_test(cx);
3032
3033 let initial_contents = "aaa\nbbbbb\nc\n";
3034 let fs = FakeFs::new(cx.background());
3035 fs.insert_tree(
3036 "/dir",
3037 json!({
3038 "the-file": initial_contents,
3039 }),
3040 )
3041 .await;
3042 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3043 let buffer = project
3044 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3045 .await
3046 .unwrap();
3047
3048 let anchors = (0..3)
3049 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3050 .collect::<Vec<_>>();
3051
3052 // Change the file on disk, adding two new lines of text, and removing
3053 // one line.
3054 buffer.read_with(cx, |buffer, _| {
3055 assert!(!buffer.is_dirty());
3056 assert!(!buffer.has_conflict());
3057 });
3058 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3059 fs.save(
3060 "/dir/the-file".as_ref(),
3061 &new_contents.into(),
3062 LineEnding::Unix,
3063 )
3064 .await
3065 .unwrap();
3066
3067 // Because the buffer was not modified, it is reloaded from disk. Its
3068 // contents are edited according to the diff between the old and new
3069 // file contents.
3070 cx.foreground().run_until_parked();
3071 buffer.update(cx, |buffer, _| {
3072 assert_eq!(buffer.text(), new_contents);
3073 assert!(!buffer.is_dirty());
3074 assert!(!buffer.has_conflict());
3075
3076 let anchor_positions = anchors
3077 .iter()
3078 .map(|anchor| anchor.to_point(&*buffer))
3079 .collect::<Vec<_>>();
3080 assert_eq!(
3081 anchor_positions,
3082 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3083 );
3084 });
3085
3086 // Modify the buffer
3087 buffer.update(cx, |buffer, cx| {
3088 buffer.edit([(0..0, " ")], None, cx);
3089 assert!(buffer.is_dirty());
3090 assert!(!buffer.has_conflict());
3091 });
3092
3093 // Change the file on disk again, adding blank lines to the beginning.
3094 fs.save(
3095 "/dir/the-file".as_ref(),
3096 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3097 LineEnding::Unix,
3098 )
3099 .await
3100 .unwrap();
3101
3102 // Because the buffer is modified, it doesn't reload from disk, but is
3103 // marked as having a conflict.
3104 cx.foreground().run_until_parked();
3105 buffer.read_with(cx, |buffer, _| {
3106 assert!(buffer.has_conflict());
3107 });
3108}
3109
3110#[gpui::test]
3111async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3112 init_test(cx);
3113
3114 let fs = FakeFs::new(cx.background());
3115 fs.insert_tree(
3116 "/dir",
3117 json!({
3118 "file1": "a\nb\nc\n",
3119 "file2": "one\r\ntwo\r\nthree\r\n",
3120 }),
3121 )
3122 .await;
3123
3124 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3125 let buffer1 = project
3126 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3127 .await
3128 .unwrap();
3129 let buffer2 = project
3130 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3131 .await
3132 .unwrap();
3133
3134 buffer1.read_with(cx, |buffer, _| {
3135 assert_eq!(buffer.text(), "a\nb\nc\n");
3136 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3137 });
3138 buffer2.read_with(cx, |buffer, _| {
3139 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3140 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3141 });
3142
3143 // Change a file's line endings on disk from unix to windows. The buffer's
3144 // state updates correctly.
3145 fs.save(
3146 "/dir/file1".as_ref(),
3147 &"aaa\nb\nc\n".into(),
3148 LineEnding::Windows,
3149 )
3150 .await
3151 .unwrap();
3152 cx.foreground().run_until_parked();
3153 buffer1.read_with(cx, |buffer, _| {
3154 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3155 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3156 });
3157
3158 // Save a file with windows line endings. The file is written correctly.
3159 buffer2.update(cx, |buffer, cx| {
3160 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3161 });
3162 project
3163 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3164 .await
3165 .unwrap();
3166 assert_eq!(
3167 fs.load("/dir/file2".as_ref()).await.unwrap(),
3168 "one\r\ntwo\r\nthree\r\nfour\r\n",
3169 );
3170}
3171
3172#[gpui::test]
3173async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3174 init_test(cx);
3175
3176 let fs = FakeFs::new(cx.background());
3177 fs.insert_tree(
3178 "/the-dir",
3179 json!({
3180 "a.rs": "
3181 fn foo(mut v: Vec<usize>) {
3182 for x in &v {
3183 v.push(1);
3184 }
3185 }
3186 "
3187 .unindent(),
3188 }),
3189 )
3190 .await;
3191
3192 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3193 let buffer = project
3194 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3195 .await
3196 .unwrap();
3197
3198 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3199 let message = lsp::PublishDiagnosticsParams {
3200 uri: buffer_uri.clone(),
3201 diagnostics: vec![
3202 lsp::Diagnostic {
3203 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3204 severity: Some(DiagnosticSeverity::WARNING),
3205 message: "error 1".to_string(),
3206 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3207 location: lsp::Location {
3208 uri: buffer_uri.clone(),
3209 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3210 },
3211 message: "error 1 hint 1".to_string(),
3212 }]),
3213 ..Default::default()
3214 },
3215 lsp::Diagnostic {
3216 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3217 severity: Some(DiagnosticSeverity::HINT),
3218 message: "error 1 hint 1".to_string(),
3219 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3220 location: lsp::Location {
3221 uri: buffer_uri.clone(),
3222 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3223 },
3224 message: "original diagnostic".to_string(),
3225 }]),
3226 ..Default::default()
3227 },
3228 lsp::Diagnostic {
3229 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3230 severity: Some(DiagnosticSeverity::ERROR),
3231 message: "error 2".to_string(),
3232 related_information: Some(vec![
3233 lsp::DiagnosticRelatedInformation {
3234 location: lsp::Location {
3235 uri: buffer_uri.clone(),
3236 range: lsp::Range::new(
3237 lsp::Position::new(1, 13),
3238 lsp::Position::new(1, 15),
3239 ),
3240 },
3241 message: "error 2 hint 1".to_string(),
3242 },
3243 lsp::DiagnosticRelatedInformation {
3244 location: lsp::Location {
3245 uri: buffer_uri.clone(),
3246 range: lsp::Range::new(
3247 lsp::Position::new(1, 13),
3248 lsp::Position::new(1, 15),
3249 ),
3250 },
3251 message: "error 2 hint 2".to_string(),
3252 },
3253 ]),
3254 ..Default::default()
3255 },
3256 lsp::Diagnostic {
3257 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3258 severity: Some(DiagnosticSeverity::HINT),
3259 message: "error 2 hint 1".to_string(),
3260 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3261 location: lsp::Location {
3262 uri: buffer_uri.clone(),
3263 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3264 },
3265 message: "original diagnostic".to_string(),
3266 }]),
3267 ..Default::default()
3268 },
3269 lsp::Diagnostic {
3270 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3271 severity: Some(DiagnosticSeverity::HINT),
3272 message: "error 2 hint 2".to_string(),
3273 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3274 location: lsp::Location {
3275 uri: buffer_uri,
3276 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3277 },
3278 message: "original diagnostic".to_string(),
3279 }]),
3280 ..Default::default()
3281 },
3282 ],
3283 version: None,
3284 };
3285
3286 project
3287 .update(cx, |p, cx| {
3288 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3289 })
3290 .unwrap();
3291 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3292
3293 assert_eq!(
3294 buffer
3295 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3296 .collect::<Vec<_>>(),
3297 &[
3298 DiagnosticEntry {
3299 range: Point::new(1, 8)..Point::new(1, 9),
3300 diagnostic: Diagnostic {
3301 severity: DiagnosticSeverity::WARNING,
3302 message: "error 1".to_string(),
3303 group_id: 1,
3304 is_primary: true,
3305 ..Default::default()
3306 }
3307 },
3308 DiagnosticEntry {
3309 range: Point::new(1, 8)..Point::new(1, 9),
3310 diagnostic: Diagnostic {
3311 severity: DiagnosticSeverity::HINT,
3312 message: "error 1 hint 1".to_string(),
3313 group_id: 1,
3314 is_primary: false,
3315 ..Default::default()
3316 }
3317 },
3318 DiagnosticEntry {
3319 range: Point::new(1, 13)..Point::new(1, 15),
3320 diagnostic: Diagnostic {
3321 severity: DiagnosticSeverity::HINT,
3322 message: "error 2 hint 1".to_string(),
3323 group_id: 0,
3324 is_primary: false,
3325 ..Default::default()
3326 }
3327 },
3328 DiagnosticEntry {
3329 range: Point::new(1, 13)..Point::new(1, 15),
3330 diagnostic: Diagnostic {
3331 severity: DiagnosticSeverity::HINT,
3332 message: "error 2 hint 2".to_string(),
3333 group_id: 0,
3334 is_primary: false,
3335 ..Default::default()
3336 }
3337 },
3338 DiagnosticEntry {
3339 range: Point::new(2, 8)..Point::new(2, 17),
3340 diagnostic: Diagnostic {
3341 severity: DiagnosticSeverity::ERROR,
3342 message: "error 2".to_string(),
3343 group_id: 0,
3344 is_primary: true,
3345 ..Default::default()
3346 }
3347 }
3348 ]
3349 );
3350
3351 assert_eq!(
3352 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3353 &[
3354 DiagnosticEntry {
3355 range: Point::new(1, 13)..Point::new(1, 15),
3356 diagnostic: Diagnostic {
3357 severity: DiagnosticSeverity::HINT,
3358 message: "error 2 hint 1".to_string(),
3359 group_id: 0,
3360 is_primary: false,
3361 ..Default::default()
3362 }
3363 },
3364 DiagnosticEntry {
3365 range: Point::new(1, 13)..Point::new(1, 15),
3366 diagnostic: Diagnostic {
3367 severity: DiagnosticSeverity::HINT,
3368 message: "error 2 hint 2".to_string(),
3369 group_id: 0,
3370 is_primary: false,
3371 ..Default::default()
3372 }
3373 },
3374 DiagnosticEntry {
3375 range: Point::new(2, 8)..Point::new(2, 17),
3376 diagnostic: Diagnostic {
3377 severity: DiagnosticSeverity::ERROR,
3378 message: "error 2".to_string(),
3379 group_id: 0,
3380 is_primary: true,
3381 ..Default::default()
3382 }
3383 }
3384 ]
3385 );
3386
3387 assert_eq!(
3388 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3389 &[
3390 DiagnosticEntry {
3391 range: Point::new(1, 8)..Point::new(1, 9),
3392 diagnostic: Diagnostic {
3393 severity: DiagnosticSeverity::WARNING,
3394 message: "error 1".to_string(),
3395 group_id: 1,
3396 is_primary: true,
3397 ..Default::default()
3398 }
3399 },
3400 DiagnosticEntry {
3401 range: Point::new(1, 8)..Point::new(1, 9),
3402 diagnostic: Diagnostic {
3403 severity: DiagnosticSeverity::HINT,
3404 message: "error 1 hint 1".to_string(),
3405 group_id: 1,
3406 is_primary: false,
3407 ..Default::default()
3408 }
3409 },
3410 ]
3411 );
3412}
3413
3414#[gpui::test]
3415async fn test_rename(cx: &mut gpui::TestAppContext) {
3416 init_test(cx);
3417
3418 let mut language = Language::new(
3419 LanguageConfig {
3420 name: "Rust".into(),
3421 path_suffixes: vec!["rs".to_string()],
3422 ..Default::default()
3423 },
3424 Some(tree_sitter_rust::language()),
3425 );
3426 let mut fake_servers = language
3427 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3428 capabilities: lsp::ServerCapabilities {
3429 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3430 prepare_provider: Some(true),
3431 work_done_progress_options: Default::default(),
3432 })),
3433 ..Default::default()
3434 },
3435 ..Default::default()
3436 }))
3437 .await;
3438
3439 let fs = FakeFs::new(cx.background());
3440 fs.insert_tree(
3441 "/dir",
3442 json!({
3443 "one.rs": "const ONE: usize = 1;",
3444 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3445 }),
3446 )
3447 .await;
3448
3449 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3450 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3451 let buffer = project
3452 .update(cx, |project, cx| {
3453 project.open_local_buffer("/dir/one.rs", cx)
3454 })
3455 .await
3456 .unwrap();
3457
3458 let fake_server = fake_servers.next().await.unwrap();
3459
3460 let response = project.update(cx, |project, cx| {
3461 project.prepare_rename(buffer.clone(), 7, cx)
3462 });
3463 fake_server
3464 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3465 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3466 assert_eq!(params.position, lsp::Position::new(0, 7));
3467 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3468 lsp::Position::new(0, 6),
3469 lsp::Position::new(0, 9),
3470 ))))
3471 })
3472 .next()
3473 .await
3474 .unwrap();
3475 let range = response.await.unwrap().unwrap();
3476 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3477 assert_eq!(range, 6..9);
3478
3479 let response = project.update(cx, |project, cx| {
3480 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3481 });
3482 fake_server
3483 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3484 assert_eq!(
3485 params.text_document_position.text_document.uri.as_str(),
3486 "file:///dir/one.rs"
3487 );
3488 assert_eq!(
3489 params.text_document_position.position,
3490 lsp::Position::new(0, 7)
3491 );
3492 assert_eq!(params.new_name, "THREE");
3493 Ok(Some(lsp::WorkspaceEdit {
3494 changes: Some(
3495 [
3496 (
3497 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3498 vec![lsp::TextEdit::new(
3499 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3500 "THREE".to_string(),
3501 )],
3502 ),
3503 (
3504 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3505 vec![
3506 lsp::TextEdit::new(
3507 lsp::Range::new(
3508 lsp::Position::new(0, 24),
3509 lsp::Position::new(0, 27),
3510 ),
3511 "THREE".to_string(),
3512 ),
3513 lsp::TextEdit::new(
3514 lsp::Range::new(
3515 lsp::Position::new(0, 35),
3516 lsp::Position::new(0, 38),
3517 ),
3518 "THREE".to_string(),
3519 ),
3520 ],
3521 ),
3522 ]
3523 .into_iter()
3524 .collect(),
3525 ),
3526 ..Default::default()
3527 }))
3528 })
3529 .next()
3530 .await
3531 .unwrap();
3532 let mut transaction = response.await.unwrap().0;
3533 assert_eq!(transaction.len(), 2);
3534 assert_eq!(
3535 transaction
3536 .remove_entry(&buffer)
3537 .unwrap()
3538 .0
3539 .read_with(cx, |buffer, _| buffer.text()),
3540 "const THREE: usize = 1;"
3541 );
3542 assert_eq!(
3543 transaction
3544 .into_keys()
3545 .next()
3546 .unwrap()
3547 .read_with(cx, |buffer, _| buffer.text()),
3548 "const TWO: usize = one::THREE + one::THREE;"
3549 );
3550}
3551
3552#[gpui::test]
3553async fn test_search(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let fs = FakeFs::new(cx.background());
3557 fs.insert_tree(
3558 "/dir",
3559 json!({
3560 "one.rs": "const ONE: usize = 1;",
3561 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3562 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3563 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3564 }),
3565 )
3566 .await;
3567 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3568 assert_eq!(
3569 search(
3570 &project,
3571 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3572 cx
3573 )
3574 .await
3575 .unwrap(),
3576 HashMap::from_iter([
3577 ("two.rs".to_string(), vec![6..9]),
3578 ("three.rs".to_string(), vec![37..40])
3579 ])
3580 );
3581
3582 let buffer_4 = project
3583 .update(cx, |project, cx| {
3584 project.open_local_buffer("/dir/four.rs", cx)
3585 })
3586 .await
3587 .unwrap();
3588 buffer_4.update(cx, |buffer, cx| {
3589 let text = "two::TWO";
3590 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3591 });
3592
3593 assert_eq!(
3594 search(
3595 &project,
3596 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3597 cx
3598 )
3599 .await
3600 .unwrap(),
3601 HashMap::from_iter([
3602 ("two.rs".to_string(), vec![6..9]),
3603 ("three.rs".to_string(), vec![37..40]),
3604 ("four.rs".to_string(), vec![25..28, 36..39])
3605 ])
3606 );
3607}
3608
3609#[gpui::test]
3610async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3611 init_test(cx);
3612
3613 let search_query = "file";
3614
3615 let fs = FakeFs::new(cx.background());
3616 fs.insert_tree(
3617 "/dir",
3618 json!({
3619 "one.rs": r#"// Rust file one"#,
3620 "one.ts": r#"// TypeScript file one"#,
3621 "two.rs": r#"// Rust file two"#,
3622 "two.ts": r#"// TypeScript file two"#,
3623 }),
3624 )
3625 .await;
3626 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3627
3628 assert!(
3629 search(
3630 &project,
3631 SearchQuery::text(
3632 search_query,
3633 false,
3634 true,
3635 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3636 Vec::new()
3637 ),
3638 cx
3639 )
3640 .await
3641 .unwrap()
3642 .is_empty(),
3643 "If no inclusions match, no files should be returned"
3644 );
3645
3646 assert_eq!(
3647 search(
3648 &project,
3649 SearchQuery::text(
3650 search_query,
3651 false,
3652 true,
3653 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3654 Vec::new()
3655 ),
3656 cx
3657 )
3658 .await
3659 .unwrap(),
3660 HashMap::from_iter([
3661 ("one.rs".to_string(), vec![8..12]),
3662 ("two.rs".to_string(), vec![8..12]),
3663 ]),
3664 "Rust only search should give only Rust files"
3665 );
3666
3667 assert_eq!(
3668 search(
3669 &project,
3670 SearchQuery::text(
3671 search_query,
3672 false,
3673 true,
3674 vec![
3675 Glob::new("*.ts").unwrap().compile_matcher(),
3676 Glob::new("*.odd").unwrap().compile_matcher(),
3677 ],
3678 Vec::new()
3679 ),
3680 cx
3681 )
3682 .await
3683 .unwrap(),
3684 HashMap::from_iter([
3685 ("one.ts".to_string(), vec![14..18]),
3686 ("two.ts".to_string(), vec![14..18]),
3687 ]),
3688 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3689 );
3690
3691 assert_eq!(
3692 search(
3693 &project,
3694 SearchQuery::text(
3695 search_query,
3696 false,
3697 true,
3698 vec![
3699 Glob::new("*.rs").unwrap().compile_matcher(),
3700 Glob::new("*.ts").unwrap().compile_matcher(),
3701 Glob::new("*.odd").unwrap().compile_matcher(),
3702 ],
3703 Vec::new()
3704 ),
3705 cx
3706 )
3707 .await
3708 .unwrap(),
3709 HashMap::from_iter([
3710 ("one.rs".to_string(), vec![8..12]),
3711 ("one.ts".to_string(), vec![14..18]),
3712 ("two.rs".to_string(), vec![8..12]),
3713 ("two.ts".to_string(), vec![14..18]),
3714 ]),
3715 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3716 );
3717}
3718
3719#[gpui::test]
3720async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3721 init_test(cx);
3722
3723 let search_query = "file";
3724
3725 let fs = FakeFs::new(cx.background());
3726 fs.insert_tree(
3727 "/dir",
3728 json!({
3729 "one.rs": r#"// Rust file one"#,
3730 "one.ts": r#"// TypeScript file one"#,
3731 "two.rs": r#"// Rust file two"#,
3732 "two.ts": r#"// TypeScript file two"#,
3733 }),
3734 )
3735 .await;
3736 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3737
3738 assert_eq!(
3739 search(
3740 &project,
3741 SearchQuery::text(
3742 search_query,
3743 false,
3744 true,
3745 Vec::new(),
3746 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3747 ),
3748 cx
3749 )
3750 .await
3751 .unwrap(),
3752 HashMap::from_iter([
3753 ("one.rs".to_string(), vec![8..12]),
3754 ("one.ts".to_string(), vec![14..18]),
3755 ("two.rs".to_string(), vec![8..12]),
3756 ("two.ts".to_string(), vec![14..18]),
3757 ]),
3758 "If no exclusions match, all files should be returned"
3759 );
3760
3761 assert_eq!(
3762 search(
3763 &project,
3764 SearchQuery::text(
3765 search_query,
3766 false,
3767 true,
3768 Vec::new(),
3769 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3770 ),
3771 cx
3772 )
3773 .await
3774 .unwrap(),
3775 HashMap::from_iter([
3776 ("one.ts".to_string(), vec![14..18]),
3777 ("two.ts".to_string(), vec![14..18]),
3778 ]),
3779 "Rust exclusion search should give only TypeScript files"
3780 );
3781
3782 assert_eq!(
3783 search(
3784 &project,
3785 SearchQuery::text(
3786 search_query,
3787 false,
3788 true,
3789 Vec::new(),
3790 vec![
3791 Glob::new("*.ts").unwrap().compile_matcher(),
3792 Glob::new("*.odd").unwrap().compile_matcher(),
3793 ],
3794 ),
3795 cx
3796 )
3797 .await
3798 .unwrap(),
3799 HashMap::from_iter([
3800 ("one.rs".to_string(), vec![8..12]),
3801 ("two.rs".to_string(), vec![8..12]),
3802 ]),
3803 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3804 );
3805
3806 assert!(
3807 search(
3808 &project,
3809 SearchQuery::text(
3810 search_query,
3811 false,
3812 true,
3813 Vec::new(),
3814 vec![
3815 Glob::new("*.rs").unwrap().compile_matcher(),
3816 Glob::new("*.ts").unwrap().compile_matcher(),
3817 Glob::new("*.odd").unwrap().compile_matcher(),
3818 ],
3819 ),
3820 cx
3821 )
3822 .await
3823 .unwrap().is_empty(),
3824 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3825 );
3826}
3827
3828#[gpui::test]
3829async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3830 init_test(cx);
3831
3832 let search_query = "file";
3833
3834 let fs = FakeFs::new(cx.background());
3835 fs.insert_tree(
3836 "/dir",
3837 json!({
3838 "one.rs": r#"// Rust file one"#,
3839 "one.ts": r#"// TypeScript file one"#,
3840 "two.rs": r#"// Rust file two"#,
3841 "two.ts": r#"// TypeScript file two"#,
3842 }),
3843 )
3844 .await;
3845 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3846
3847 assert!(
3848 search(
3849 &project,
3850 SearchQuery::text(
3851 search_query,
3852 false,
3853 true,
3854 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3855 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3856 ),
3857 cx
3858 )
3859 .await
3860 .unwrap()
3861 .is_empty(),
3862 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3863 );
3864
3865 assert!(
3866 search(
3867 &project,
3868 SearchQuery::text(
3869 search_query,
3870 false,
3871 true,
3872 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3873 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3874 ),
3875 cx
3876 )
3877 .await
3878 .unwrap()
3879 .is_empty(),
3880 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3881 );
3882
3883 assert!(
3884 search(
3885 &project,
3886 SearchQuery::text(
3887 search_query,
3888 false,
3889 true,
3890 vec![
3891 Glob::new("*.ts").unwrap().compile_matcher(),
3892 Glob::new("*.odd").unwrap().compile_matcher()
3893 ],
3894 vec![
3895 Glob::new("*.ts").unwrap().compile_matcher(),
3896 Glob::new("*.odd").unwrap().compile_matcher()
3897 ],
3898 ),
3899 cx
3900 )
3901 .await
3902 .unwrap()
3903 .is_empty(),
3904 "Non-matching inclusions and exclusions should not change that."
3905 );
3906
3907 assert_eq!(
3908 search(
3909 &project,
3910 SearchQuery::text(
3911 search_query,
3912 false,
3913 true,
3914 vec![
3915 Glob::new("*.ts").unwrap().compile_matcher(),
3916 Glob::new("*.odd").unwrap().compile_matcher()
3917 ],
3918 vec![
3919 Glob::new("*.rs").unwrap().compile_matcher(),
3920 Glob::new("*.odd").unwrap().compile_matcher()
3921 ],
3922 ),
3923 cx
3924 )
3925 .await
3926 .unwrap(),
3927 HashMap::from_iter([
3928 ("one.ts".to_string(), vec![14..18]),
3929 ("two.ts".to_string(), vec![14..18]),
3930 ]),
3931 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3932 );
3933}
3934
3935#[test]
3936fn test_glob_literal_prefix() {
3937 assert_eq!(glob_literal_prefix("**/*.js"), "");
3938 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
3939 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
3940 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
3941}
3942
3943async fn search(
3944 project: &ModelHandle<Project>,
3945 query: SearchQuery,
3946 cx: &mut gpui::TestAppContext,
3947) -> Result<HashMap<String, Vec<Range<usize>>>> {
3948 let results = project
3949 .update(cx, |project, cx| project.search(query, cx))
3950 .await?;
3951
3952 Ok(results
3953 .into_iter()
3954 .map(|(buffer, ranges)| {
3955 buffer.read_with(cx, |buffer, _| {
3956 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3957 let ranges = ranges
3958 .into_iter()
3959 .map(|range| range.to_offset(buffer))
3960 .collect::<Vec<_>>();
3961 (path, ranges)
3962 })
3963 })
3964 .collect())
3965}
3966
3967fn init_test(cx: &mut gpui::TestAppContext) {
3968 cx.foreground().forbid_parking();
3969
3970 cx.update(|cx| {
3971 cx.set_global(SettingsStore::test(cx));
3972 language::init(cx);
3973 Project::init_settings(cx);
3974 });
3975}