1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[cfg(not(windows))]
49#[gpui::test]
50async fn test_symlinks(cx: &mut gpui::TestAppContext) {
51 init_test(cx);
52 cx.executor().allow_parking();
53
54 let dir = temp_tree(json!({
55 "root": {
56 "apple": "",
57 "banana": {
58 "carrot": {
59 "date": "",
60 "endive": "",
61 }
62 },
63 "fennel": {
64 "grape": "",
65 }
66 }
67 }));
68
69 let root_link_path = dir.path().join("root_link");
70 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
71 os::unix::fs::symlink(
72 &dir.path().join("root/fennel"),
73 &dir.path().join("root/finnochio"),
74 )
75 .unwrap();
76
77 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
78
79 project.update(cx, |project, cx| {
80 let tree = project.worktrees().next().unwrap().read(cx);
81 assert_eq!(tree.file_count(), 5);
82 assert_eq!(
83 tree.inode_for_path("fennel/grape"),
84 tree.inode_for_path("finnochio/grape")
85 );
86 });
87}
88
89#[gpui::test]
90async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
91 init_test(cx);
92
93 let fs = FakeFs::new(cx.executor());
94 fs.insert_tree(
95 "/the-root",
96 json!({
97 ".zed": {
98 "settings.json": r#"{ "tab_size": 8 }"#
99 },
100 "a": {
101 "a.rs": "fn a() {\n A\n}"
102 },
103 "b": {
104 ".zed": {
105 "settings.json": r#"{ "tab_size": 2 }"#
106 },
107 "b.rs": "fn b() {\n B\n}"
108 }
109 }),
110 )
111 .await;
112
113 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
114 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
115
116 cx.executor().run_until_parked();
117 cx.update(|cx| {
118 let tree = worktree.read(cx);
119
120 let settings_a = language_settings(
121 None,
122 Some(
123 &(File::for_entry(
124 tree.entry_for_path("a/a.rs").unwrap().clone(),
125 worktree.clone(),
126 ) as _),
127 ),
128 cx,
129 );
130 let settings_b = language_settings(
131 None,
132 Some(
133 &(File::for_entry(
134 tree.entry_for_path("b/b.rs").unwrap().clone(),
135 worktree.clone(),
136 ) as _),
137 ),
138 cx,
139 );
140
141 assert_eq!(settings_a.tab_size.get(), 8);
142 assert_eq!(settings_b.tab_size.get(), 2);
143 });
144}
145
146#[gpui::test]
147async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
148 init_test(cx);
149
150 let mut rust_language = Language::new(
151 LanguageConfig {
152 name: "Rust".into(),
153 matcher: LanguageMatcher {
154 path_suffixes: vec!["rs".to_string()],
155 ..Default::default()
156 },
157 ..Default::default()
158 },
159 Some(tree_sitter_rust::language()),
160 );
161 let mut json_language = Language::new(
162 LanguageConfig {
163 name: "JSON".into(),
164 matcher: LanguageMatcher {
165 path_suffixes: vec!["json".to_string()],
166 ..Default::default()
167 },
168 ..Default::default()
169 },
170 None,
171 );
172 let mut fake_rust_servers = rust_language
173 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
174 name: "the-rust-language-server",
175 capabilities: lsp::ServerCapabilities {
176 completion_provider: Some(lsp::CompletionOptions {
177 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
178 ..Default::default()
179 }),
180 ..Default::default()
181 },
182 ..Default::default()
183 }))
184 .await;
185 let mut fake_json_servers = json_language
186 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
187 name: "the-json-language-server",
188 capabilities: lsp::ServerCapabilities {
189 completion_provider: Some(lsp::CompletionOptions {
190 trigger_characters: Some(vec![":".to_string()]),
191 ..Default::default()
192 }),
193 ..Default::default()
194 },
195 ..Default::default()
196 }))
197 .await;
198
199 let fs = FakeFs::new(cx.executor());
200 fs.insert_tree(
201 "/the-root",
202 json!({
203 "test.rs": "const A: i32 = 1;",
204 "test2.rs": "",
205 "Cargo.toml": "a = 1",
206 "package.json": "{\"a\": 1}",
207 }),
208 )
209 .await;
210
211 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
212
213 // Open a buffer without an associated language server.
214 let toml_buffer = project
215 .update(cx, |project, cx| {
216 project.open_local_buffer("/the-root/Cargo.toml", cx)
217 })
218 .await
219 .unwrap();
220
221 // Open a buffer with an associated language server before the language for it has been loaded.
222 let rust_buffer = project
223 .update(cx, |project, cx| {
224 project.open_local_buffer("/the-root/test.rs", cx)
225 })
226 .await
227 .unwrap();
228 rust_buffer.update(cx, |buffer, _| {
229 assert_eq!(buffer.language().map(|l| l.name()), None);
230 });
231
232 // Now we add the languages to the project, and ensure they get assigned to all
233 // the relevant open buffers.
234 project.update(cx, |project, _| {
235 project.languages.add(Arc::new(json_language));
236 project.languages.add(Arc::new(rust_language));
237 });
238 cx.executor().run_until_parked();
239 rust_buffer.update(cx, |buffer, _| {
240 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
241 });
242
243 // A server is started up, and it is notified about Rust files.
244 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
245 assert_eq!(
246 fake_rust_server
247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
248 .await
249 .text_document,
250 lsp::TextDocumentItem {
251 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
252 version: 0,
253 text: "const A: i32 = 1;".to_string(),
254 language_id: Default::default()
255 }
256 );
257
258 // The buffer is configured based on the language server's capabilities.
259 rust_buffer.update(cx, |buffer, _| {
260 assert_eq!(
261 buffer.completion_triggers(),
262 &[".".to_string(), "::".to_string()]
263 );
264 });
265 toml_buffer.update(cx, |buffer, _| {
266 assert!(buffer.completion_triggers().is_empty());
267 });
268
269 // Edit a buffer. The changes are reported to the language server.
270 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidChangeTextDocument>()
274 .await
275 .text_document,
276 lsp::VersionedTextDocumentIdentifier::new(
277 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
278 1
279 )
280 );
281
282 // Open a third buffer with a different associated language server.
283 let json_buffer = project
284 .update(cx, |project, cx| {
285 project.open_local_buffer("/the-root/package.json", cx)
286 })
287 .await
288 .unwrap();
289
290 // A json language server is started up and is only notified about the json buffer.
291 let mut fake_json_server = fake_json_servers.next().await.unwrap();
292 assert_eq!(
293 fake_json_server
294 .receive_notification::<lsp::notification::DidOpenTextDocument>()
295 .await
296 .text_document,
297 lsp::TextDocumentItem {
298 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
299 version: 0,
300 text: "{\"a\": 1}".to_string(),
301 language_id: Default::default()
302 }
303 );
304
305 // This buffer is configured based on the second language server's
306 // capabilities.
307 json_buffer.update(cx, |buffer, _| {
308 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
309 });
310
311 // When opening another buffer whose language server is already running,
312 // it is also configured based on the existing language server's capabilities.
313 let rust_buffer2 = project
314 .update(cx, |project, cx| {
315 project.open_local_buffer("/the-root/test2.rs", cx)
316 })
317 .await
318 .unwrap();
319 rust_buffer2.update(cx, |buffer, _| {
320 assert_eq!(
321 buffer.completion_triggers(),
322 &[".".to_string(), "::".to_string()]
323 );
324 });
325
326 // Changes are reported only to servers matching the buffer's language.
327 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
328 rust_buffer2.update(cx, |buffer, cx| {
329 buffer.edit([(0..0, "let x = 1;")], None, cx)
330 });
331 assert_eq!(
332 fake_rust_server
333 .receive_notification::<lsp::notification::DidChangeTextDocument>()
334 .await
335 .text_document,
336 lsp::VersionedTextDocumentIdentifier::new(
337 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
338 1
339 )
340 );
341
342 // Save notifications are reported to all servers.
343 project
344 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidSaveTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
353 );
354 assert_eq!(
355 fake_json_server
356 .receive_notification::<lsp::notification::DidSaveTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
360 );
361
362 // Renames are reported only to servers matching the buffer's language.
363 fs.rename(
364 Path::new("/the-root/test2.rs"),
365 Path::new("/the-root/test3.rs"),
366 Default::default(),
367 )
368 .await
369 .unwrap();
370 assert_eq!(
371 fake_rust_server
372 .receive_notification::<lsp::notification::DidCloseTextDocument>()
373 .await
374 .text_document,
375 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
376 );
377 assert_eq!(
378 fake_rust_server
379 .receive_notification::<lsp::notification::DidOpenTextDocument>()
380 .await
381 .text_document,
382 lsp::TextDocumentItem {
383 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
384 version: 0,
385 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
386 language_id: Default::default()
387 },
388 );
389
390 rust_buffer2.update(cx, |buffer, cx| {
391 buffer.update_diagnostics(
392 LanguageServerId(0),
393 DiagnosticSet::from_sorted_entries(
394 vec![DiagnosticEntry {
395 diagnostic: Default::default(),
396 range: Anchor::MIN..Anchor::MAX,
397 }],
398 &buffer.snapshot(),
399 ),
400 cx,
401 );
402 assert_eq!(
403 buffer
404 .snapshot()
405 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
406 .count(),
407 1
408 );
409 });
410
411 // When the rename changes the extension of the file, the buffer gets closed on the old
412 // language server and gets opened on the new one.
413 fs.rename(
414 Path::new("/the-root/test3.rs"),
415 Path::new("/the-root/test3.json"),
416 Default::default(),
417 )
418 .await
419 .unwrap();
420 assert_eq!(
421 fake_rust_server
422 .receive_notification::<lsp::notification::DidCloseTextDocument>()
423 .await
424 .text_document,
425 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
426 );
427 assert_eq!(
428 fake_json_server
429 .receive_notification::<lsp::notification::DidOpenTextDocument>()
430 .await
431 .text_document,
432 lsp::TextDocumentItem {
433 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
434 version: 0,
435 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
436 language_id: Default::default()
437 },
438 );
439
440 // We clear the diagnostics, since the language has changed.
441 rust_buffer2.update(cx, |buffer, _| {
442 assert_eq!(
443 buffer
444 .snapshot()
445 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
446 .count(),
447 0
448 );
449 });
450
451 // The renamed file's version resets after changing language server.
452 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
453 assert_eq!(
454 fake_json_server
455 .receive_notification::<lsp::notification::DidChangeTextDocument>()
456 .await
457 .text_document,
458 lsp::VersionedTextDocumentIdentifier::new(
459 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
460 1
461 )
462 );
463
464 // Restart language servers
465 project.update(cx, |project, cx| {
466 project.restart_language_servers_for_buffers(
467 vec![rust_buffer.clone(), json_buffer.clone()],
468 cx,
469 );
470 });
471
472 let mut rust_shutdown_requests = fake_rust_server
473 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
474 let mut json_shutdown_requests = fake_json_server
475 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
476 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
477
478 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
479 let mut fake_json_server = fake_json_servers.next().await.unwrap();
480
481 // Ensure rust document is reopened in new rust language server
482 assert_eq!(
483 fake_rust_server
484 .receive_notification::<lsp::notification::DidOpenTextDocument>()
485 .await
486 .text_document,
487 lsp::TextDocumentItem {
488 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
489 version: 0,
490 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
491 language_id: Default::default()
492 }
493 );
494
495 // Ensure json documents are reopened in new json language server
496 assert_set_eq!(
497 [
498 fake_json_server
499 .receive_notification::<lsp::notification::DidOpenTextDocument>()
500 .await
501 .text_document,
502 fake_json_server
503 .receive_notification::<lsp::notification::DidOpenTextDocument>()
504 .await
505 .text_document,
506 ],
507 [
508 lsp::TextDocumentItem {
509 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
510 version: 0,
511 text: json_buffer.update(cx, |buffer, _| buffer.text()),
512 language_id: Default::default()
513 },
514 lsp::TextDocumentItem {
515 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
516 version: 0,
517 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
518 language_id: Default::default()
519 }
520 ]
521 );
522
523 // Close notifications are reported only to servers matching the buffer's language.
524 cx.update(|_| drop(json_buffer));
525 let close_message = lsp::DidCloseTextDocumentParams {
526 text_document: lsp::TextDocumentIdentifier::new(
527 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
528 ),
529 };
530 assert_eq!(
531 fake_json_server
532 .receive_notification::<lsp::notification::DidCloseTextDocument>()
533 .await,
534 close_message,
535 );
536}
537
538#[gpui::test]
539async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
540 init_test(cx);
541
542 let mut language = Language::new(
543 LanguageConfig {
544 name: "Rust".into(),
545 matcher: LanguageMatcher {
546 path_suffixes: vec!["rs".to_string()],
547 ..Default::default()
548 },
549 ..Default::default()
550 },
551 Some(tree_sitter_rust::language()),
552 );
553 let mut fake_servers = language
554 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
555 name: "the-language-server",
556 ..Default::default()
557 }))
558 .await;
559
560 let fs = FakeFs::new(cx.executor());
561 fs.insert_tree(
562 "/the-root",
563 json!({
564 ".gitignore": "target\n",
565 "src": {
566 "a.rs": "",
567 "b.rs": "",
568 },
569 "target": {
570 "x": {
571 "out": {
572 "x.rs": ""
573 }
574 },
575 "y": {
576 "out": {
577 "y.rs": "",
578 }
579 },
580 "z": {
581 "out": {
582 "z.rs": ""
583 }
584 }
585 }
586 }),
587 )
588 .await;
589
590 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
591 project.update(cx, |project, _| {
592 project.languages.add(Arc::new(language));
593 });
594 cx.executor().run_until_parked();
595
596 // Start the language server by opening a buffer with a compatible file extension.
597 let _buffer = project
598 .update(cx, |project, cx| {
599 project.open_local_buffer("/the-root/src/a.rs", cx)
600 })
601 .await
602 .unwrap();
603
604 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
605 project.update(cx, |project, cx| {
606 let worktree = project.worktrees().next().unwrap();
607 assert_eq!(
608 worktree
609 .read(cx)
610 .snapshot()
611 .entries(true)
612 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
613 .collect::<Vec<_>>(),
614 &[
615 (Path::new(""), false),
616 (Path::new(".gitignore"), false),
617 (Path::new("src"), false),
618 (Path::new("src/a.rs"), false),
619 (Path::new("src/b.rs"), false),
620 (Path::new("target"), true),
621 ]
622 );
623 });
624
625 let prev_read_dir_count = fs.read_dir_call_count();
626
627 // Keep track of the FS events reported to the language server.
628 let fake_server = fake_servers.next().await.unwrap();
629 let file_changes = Arc::new(Mutex::new(Vec::new()));
630 fake_server
631 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
632 registrations: vec![lsp::Registration {
633 id: Default::default(),
634 method: "workspace/didChangeWatchedFiles".to_string(),
635 register_options: serde_json::to_value(
636 lsp::DidChangeWatchedFilesRegistrationOptions {
637 watchers: vec![
638 lsp::FileSystemWatcher {
639 glob_pattern: lsp::GlobPattern::String(
640 "/the-root/Cargo.toml".to_string(),
641 ),
642 kind: None,
643 },
644 lsp::FileSystemWatcher {
645 glob_pattern: lsp::GlobPattern::String(
646 "/the-root/src/*.{rs,c}".to_string(),
647 ),
648 kind: None,
649 },
650 lsp::FileSystemWatcher {
651 glob_pattern: lsp::GlobPattern::String(
652 "/the-root/target/y/**/*.rs".to_string(),
653 ),
654 kind: None,
655 },
656 ],
657 },
658 )
659 .ok(),
660 }],
661 })
662 .await
663 .unwrap();
664 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
665 let file_changes = file_changes.clone();
666 move |params, _| {
667 let mut file_changes = file_changes.lock();
668 file_changes.extend(params.changes);
669 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
670 }
671 });
672
673 cx.executor().run_until_parked();
674 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
675 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
676
677 // Now the language server has asked us to watch an ignored directory path,
678 // so we recursively load it.
679 project.update(cx, |project, cx| {
680 let worktree = project.worktrees().next().unwrap();
681 assert_eq!(
682 worktree
683 .read(cx)
684 .snapshot()
685 .entries(true)
686 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
687 .collect::<Vec<_>>(),
688 &[
689 (Path::new(""), false),
690 (Path::new(".gitignore"), false),
691 (Path::new("src"), false),
692 (Path::new("src/a.rs"), false),
693 (Path::new("src/b.rs"), false),
694 (Path::new("target"), true),
695 (Path::new("target/x"), true),
696 (Path::new("target/y"), true),
697 (Path::new("target/y/out"), true),
698 (Path::new("target/y/out/y.rs"), true),
699 (Path::new("target/z"), true),
700 ]
701 );
702 });
703
704 // Perform some file system mutations, two of which match the watched patterns,
705 // and one of which does not.
706 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
707 .await
708 .unwrap();
709 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
710 .await
711 .unwrap();
712 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
713 .await
714 .unwrap();
715 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
716 .await
717 .unwrap();
718 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
719 .await
720 .unwrap();
721
722 // The language server receives events for the FS mutations that match its watch patterns.
723 cx.executor().run_until_parked();
724 assert_eq!(
725 &*file_changes.lock(),
726 &[
727 lsp::FileEvent {
728 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
729 typ: lsp::FileChangeType::DELETED,
730 },
731 lsp::FileEvent {
732 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
733 typ: lsp::FileChangeType::CREATED,
734 },
735 lsp::FileEvent {
736 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
737 typ: lsp::FileChangeType::CREATED,
738 },
739 ]
740 );
741}
742
743#[gpui::test]
744async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
745 init_test(cx);
746
747 let fs = FakeFs::new(cx.executor());
748 fs.insert_tree(
749 "/dir",
750 json!({
751 "a.rs": "let a = 1;",
752 "b.rs": "let b = 2;"
753 }),
754 )
755 .await;
756
757 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
758
759 let buffer_a = project
760 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
761 .await
762 .unwrap();
763 let buffer_b = project
764 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
765 .await
766 .unwrap();
767
768 project.update(cx, |project, cx| {
769 project
770 .update_diagnostics(
771 LanguageServerId(0),
772 lsp::PublishDiagnosticsParams {
773 uri: Url::from_file_path("/dir/a.rs").unwrap(),
774 version: None,
775 diagnostics: vec![lsp::Diagnostic {
776 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
777 severity: Some(lsp::DiagnosticSeverity::ERROR),
778 message: "error 1".to_string(),
779 ..Default::default()
780 }],
781 },
782 &[],
783 cx,
784 )
785 .unwrap();
786 project
787 .update_diagnostics(
788 LanguageServerId(0),
789 lsp::PublishDiagnosticsParams {
790 uri: Url::from_file_path("/dir/b.rs").unwrap(),
791 version: None,
792 diagnostics: vec![lsp::Diagnostic {
793 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
794 severity: Some(lsp::DiagnosticSeverity::WARNING),
795 message: "error 2".to_string(),
796 ..Default::default()
797 }],
798 },
799 &[],
800 cx,
801 )
802 .unwrap();
803 });
804
805 buffer_a.update(cx, |buffer, _| {
806 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
807 assert_eq!(
808 chunks
809 .iter()
810 .map(|(s, d)| (s.as_str(), *d))
811 .collect::<Vec<_>>(),
812 &[
813 ("let ", None),
814 ("a", Some(DiagnosticSeverity::ERROR)),
815 (" = 1;", None),
816 ]
817 );
818 });
819 buffer_b.update(cx, |buffer, _| {
820 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
821 assert_eq!(
822 chunks
823 .iter()
824 .map(|(s, d)| (s.as_str(), *d))
825 .collect::<Vec<_>>(),
826 &[
827 ("let ", None),
828 ("b", Some(DiagnosticSeverity::WARNING)),
829 (" = 2;", None),
830 ]
831 );
832 });
833}
834
835#[gpui::test]
836async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
837 init_test(cx);
838
839 let fs = FakeFs::new(cx.executor());
840 fs.insert_tree(
841 "/root",
842 json!({
843 "dir": {
844 ".git": {
845 "HEAD": "ref: refs/heads/main",
846 },
847 ".gitignore": "b.rs",
848 "a.rs": "let a = 1;",
849 "b.rs": "let b = 2;",
850 },
851 "other.rs": "let b = c;"
852 }),
853 )
854 .await;
855
856 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
857 let (worktree, _) = project
858 .update(cx, |project, cx| {
859 project.find_or_create_local_worktree("/root/dir", true, cx)
860 })
861 .await
862 .unwrap();
863 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
864
865 let (worktree, _) = project
866 .update(cx, |project, cx| {
867 project.find_or_create_local_worktree("/root/other.rs", false, cx)
868 })
869 .await
870 .unwrap();
871 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
872
873 let server_id = LanguageServerId(0);
874 project.update(cx, |project, cx| {
875 project
876 .update_diagnostics(
877 server_id,
878 lsp::PublishDiagnosticsParams {
879 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
880 version: None,
881 diagnostics: vec![lsp::Diagnostic {
882 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
883 severity: Some(lsp::DiagnosticSeverity::ERROR),
884 message: "unused variable 'b'".to_string(),
885 ..Default::default()
886 }],
887 },
888 &[],
889 cx,
890 )
891 .unwrap();
892 project
893 .update_diagnostics(
894 server_id,
895 lsp::PublishDiagnosticsParams {
896 uri: Url::from_file_path("/root/other.rs").unwrap(),
897 version: None,
898 diagnostics: vec![lsp::Diagnostic {
899 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
900 severity: Some(lsp::DiagnosticSeverity::ERROR),
901 message: "unknown variable 'c'".to_string(),
902 ..Default::default()
903 }],
904 },
905 &[],
906 cx,
907 )
908 .unwrap();
909 });
910
911 let main_ignored_buffer = project
912 .update(cx, |project, cx| {
913 project.open_buffer((main_worktree_id, "b.rs"), cx)
914 })
915 .await
916 .unwrap();
917 main_ignored_buffer.update(cx, |buffer, _| {
918 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
919 assert_eq!(
920 chunks
921 .iter()
922 .map(|(s, d)| (s.as_str(), *d))
923 .collect::<Vec<_>>(),
924 &[
925 ("let ", None),
926 ("b", Some(DiagnosticSeverity::ERROR)),
927 (" = 2;", None),
928 ],
929 "Gigitnored buffers should still get in-buffer diagnostics",
930 );
931 });
932 let other_buffer = project
933 .update(cx, |project, cx| {
934 project.open_buffer((other_worktree_id, ""), cx)
935 })
936 .await
937 .unwrap();
938 other_buffer.update(cx, |buffer, _| {
939 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
940 assert_eq!(
941 chunks
942 .iter()
943 .map(|(s, d)| (s.as_str(), *d))
944 .collect::<Vec<_>>(),
945 &[
946 ("let b = ", None),
947 ("c", Some(DiagnosticSeverity::ERROR)),
948 (";", None),
949 ],
950 "Buffers from hidden projects should still get in-buffer diagnostics"
951 );
952 });
953
954 project.update(cx, |project, cx| {
955 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
956 assert_eq!(
957 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
958 vec![(
959 ProjectPath {
960 worktree_id: main_worktree_id,
961 path: Arc::from(Path::new("b.rs")),
962 },
963 server_id,
964 DiagnosticSummary {
965 error_count: 1,
966 warning_count: 0,
967 }
968 )]
969 );
970 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
971 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
972 });
973}
974
975#[gpui::test]
976async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
977 init_test(cx);
978
979 let progress_token = "the-progress-token";
980 let mut language = Language::new(
981 LanguageConfig {
982 name: "Rust".into(),
983 matcher: LanguageMatcher {
984 path_suffixes: vec!["rs".to_string()],
985 ..Default::default()
986 },
987 ..Default::default()
988 },
989 Some(tree_sitter_rust::language()),
990 );
991 let mut fake_servers = language
992 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
993 disk_based_diagnostics_progress_token: Some(progress_token.into()),
994 disk_based_diagnostics_sources: vec!["disk".into()],
995 ..Default::default()
996 }))
997 .await;
998
999 let fs = FakeFs::new(cx.executor());
1000 fs.insert_tree(
1001 "/dir",
1002 json!({
1003 "a.rs": "fn a() { A }",
1004 "b.rs": "const y: i32 = 1",
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1010 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1011 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1012
1013 // Cause worktree to start the fake language server
1014 let _buffer = project
1015 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1016 .await
1017 .unwrap();
1018
1019 let mut events = cx.events(&project);
1020
1021 let fake_server = fake_servers.next().await.unwrap();
1022 assert_eq!(
1023 events.next().await.unwrap(),
1024 Event::LanguageServerAdded(LanguageServerId(0)),
1025 );
1026
1027 fake_server
1028 .start_progress(format!("{}/0", progress_token))
1029 .await;
1030 assert_eq!(
1031 events.next().await.unwrap(),
1032 Event::DiskBasedDiagnosticsStarted {
1033 language_server_id: LanguageServerId(0),
1034 }
1035 );
1036
1037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1038 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1039 version: None,
1040 diagnostics: vec![lsp::Diagnostic {
1041 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1042 severity: Some(lsp::DiagnosticSeverity::ERROR),
1043 message: "undefined variable 'A'".to_string(),
1044 ..Default::default()
1045 }],
1046 });
1047 assert_eq!(
1048 events.next().await.unwrap(),
1049 Event::DiagnosticsUpdated {
1050 language_server_id: LanguageServerId(0),
1051 path: (worktree_id, Path::new("a.rs")).into()
1052 }
1053 );
1054
1055 fake_server.end_progress(format!("{}/0", progress_token));
1056 assert_eq!(
1057 events.next().await.unwrap(),
1058 Event::DiskBasedDiagnosticsFinished {
1059 language_server_id: LanguageServerId(0)
1060 }
1061 );
1062
1063 let buffer = project
1064 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1065 .await
1066 .unwrap();
1067
1068 buffer.update(cx, |buffer, _| {
1069 let snapshot = buffer.snapshot();
1070 let diagnostics = snapshot
1071 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1072 .collect::<Vec<_>>();
1073 assert_eq!(
1074 diagnostics,
1075 &[DiagnosticEntry {
1076 range: Point::new(0, 9)..Point::new(0, 10),
1077 diagnostic: Diagnostic {
1078 severity: lsp::DiagnosticSeverity::ERROR,
1079 message: "undefined variable 'A'".to_string(),
1080 group_id: 0,
1081 is_primary: true,
1082 ..Default::default()
1083 }
1084 }]
1085 )
1086 });
1087
1088 // Ensure publishing empty diagnostics twice only results in one update event.
1089 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1090 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1091 version: None,
1092 diagnostics: Default::default(),
1093 });
1094 assert_eq!(
1095 events.next().await.unwrap(),
1096 Event::DiagnosticsUpdated {
1097 language_server_id: LanguageServerId(0),
1098 path: (worktree_id, Path::new("a.rs")).into()
1099 }
1100 );
1101
1102 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1103 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1104 version: None,
1105 diagnostics: Default::default(),
1106 });
1107 cx.executor().run_until_parked();
1108 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1109}
1110
1111#[gpui::test]
1112async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1113 init_test(cx);
1114
1115 let progress_token = "the-progress-token";
1116 let mut language = Language::new(
1117 LanguageConfig {
1118 matcher: LanguageMatcher {
1119 path_suffixes: vec!["rs".to_string()],
1120 ..Default::default()
1121 },
1122 ..Default::default()
1123 },
1124 None,
1125 );
1126 let mut fake_servers = language
1127 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1128 disk_based_diagnostics_sources: vec!["disk".into()],
1129 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1130 ..Default::default()
1131 }))
1132 .await;
1133
1134 let fs = FakeFs::new(cx.executor());
1135 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1136
1137 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1138 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1139
1140 let buffer = project
1141 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1142 .await
1143 .unwrap();
1144
1145 // Simulate diagnostics starting to update.
1146 let fake_server = fake_servers.next().await.unwrap();
1147 fake_server.start_progress(progress_token).await;
1148
1149 // Restart the server before the diagnostics finish updating.
1150 project.update(cx, |project, cx| {
1151 project.restart_language_servers_for_buffers([buffer], cx);
1152 });
1153 let mut events = cx.events(&project);
1154
1155 // Simulate the newly started server sending more diagnostics.
1156 let fake_server = fake_servers.next().await.unwrap();
1157 assert_eq!(
1158 events.next().await.unwrap(),
1159 Event::LanguageServerAdded(LanguageServerId(1))
1160 );
1161 fake_server.start_progress(progress_token).await;
1162 assert_eq!(
1163 events.next().await.unwrap(),
1164 Event::DiskBasedDiagnosticsStarted {
1165 language_server_id: LanguageServerId(1)
1166 }
1167 );
1168 project.update(cx, |project, _| {
1169 assert_eq!(
1170 project
1171 .language_servers_running_disk_based_diagnostics()
1172 .collect::<Vec<_>>(),
1173 [LanguageServerId(1)]
1174 );
1175 });
1176
1177 // All diagnostics are considered done, despite the old server's diagnostic
1178 // task never completing.
1179 fake_server.end_progress(progress_token);
1180 assert_eq!(
1181 events.next().await.unwrap(),
1182 Event::DiskBasedDiagnosticsFinished {
1183 language_server_id: LanguageServerId(1)
1184 }
1185 );
1186 project.update(cx, |project, _| {
1187 assert_eq!(
1188 project
1189 .language_servers_running_disk_based_diagnostics()
1190 .collect::<Vec<_>>(),
1191 [LanguageServerId(0); 0]
1192 );
1193 });
1194}
1195
1196#[gpui::test]
1197async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let mut language = Language::new(
1201 LanguageConfig {
1202 matcher: LanguageMatcher {
1203 path_suffixes: vec!["rs".to_string()],
1204 ..Default::default()
1205 },
1206 ..Default::default()
1207 },
1208 None,
1209 );
1210 let mut fake_servers = language
1211 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1212 ..Default::default()
1213 }))
1214 .await;
1215
1216 let fs = FakeFs::new(cx.executor());
1217 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1218
1219 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1220 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1221
1222 let buffer = project
1223 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1224 .await
1225 .unwrap();
1226
1227 // Publish diagnostics
1228 let fake_server = fake_servers.next().await.unwrap();
1229 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1230 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1231 version: None,
1232 diagnostics: vec![lsp::Diagnostic {
1233 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1234 severity: Some(lsp::DiagnosticSeverity::ERROR),
1235 message: "the message".to_string(),
1236 ..Default::default()
1237 }],
1238 });
1239
1240 cx.executor().run_until_parked();
1241 buffer.update(cx, |buffer, _| {
1242 assert_eq!(
1243 buffer
1244 .snapshot()
1245 .diagnostics_in_range::<_, usize>(0..1, false)
1246 .map(|entry| entry.diagnostic.message.clone())
1247 .collect::<Vec<_>>(),
1248 ["the message".to_string()]
1249 );
1250 });
1251 project.update(cx, |project, cx| {
1252 assert_eq!(
1253 project.diagnostic_summary(false, cx),
1254 DiagnosticSummary {
1255 error_count: 1,
1256 warning_count: 0,
1257 }
1258 );
1259 });
1260
1261 project.update(cx, |project, cx| {
1262 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1263 });
1264
1265 // The diagnostics are cleared.
1266 cx.executor().run_until_parked();
1267 buffer.update(cx, |buffer, _| {
1268 assert_eq!(
1269 buffer
1270 .snapshot()
1271 .diagnostics_in_range::<_, usize>(0..1, false)
1272 .map(|entry| entry.diagnostic.message.clone())
1273 .collect::<Vec<_>>(),
1274 Vec::<String>::new(),
1275 );
1276 });
1277 project.update(cx, |project, cx| {
1278 assert_eq!(
1279 project.diagnostic_summary(false, cx),
1280 DiagnosticSummary {
1281 error_count: 0,
1282 warning_count: 0,
1283 }
1284 );
1285 });
1286}
1287
1288#[gpui::test]
1289async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1290 init_test(cx);
1291
1292 let mut language = Language::new(
1293 LanguageConfig {
1294 matcher: LanguageMatcher {
1295 path_suffixes: vec!["rs".to_string()],
1296 ..Default::default()
1297 },
1298 ..Default::default()
1299 },
1300 None,
1301 );
1302 let mut fake_servers = language
1303 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1304 name: "the-lsp",
1305 ..Default::default()
1306 }))
1307 .await;
1308
1309 let fs = FakeFs::new(cx.executor());
1310 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1311
1312 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1313 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1314
1315 let buffer = project
1316 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1317 .await
1318 .unwrap();
1319
1320 // Before restarting the server, report diagnostics with an unknown buffer version.
1321 let fake_server = fake_servers.next().await.unwrap();
1322 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1323 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1324 version: Some(10000),
1325 diagnostics: Vec::new(),
1326 });
1327 cx.executor().run_until_parked();
1328
1329 project.update(cx, |project, cx| {
1330 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1331 });
1332 let mut fake_server = fake_servers.next().await.unwrap();
1333 let notification = fake_server
1334 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1335 .await
1336 .text_document;
1337 assert_eq!(notification.version, 0);
1338}
1339
1340#[gpui::test]
1341async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1342 init_test(cx);
1343
1344 let mut rust = Language::new(
1345 LanguageConfig {
1346 name: Arc::from("Rust"),
1347 matcher: LanguageMatcher {
1348 path_suffixes: vec!["rs".to_string()],
1349 ..Default::default()
1350 },
1351 ..Default::default()
1352 },
1353 None,
1354 );
1355 let mut fake_rust_servers = rust
1356 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1357 name: "rust-lsp",
1358 ..Default::default()
1359 }))
1360 .await;
1361 let mut js = Language::new(
1362 LanguageConfig {
1363 name: Arc::from("JavaScript"),
1364 matcher: LanguageMatcher {
1365 path_suffixes: vec!["js".to_string()],
1366 ..Default::default()
1367 },
1368 ..Default::default()
1369 },
1370 None,
1371 );
1372 let mut fake_js_servers = js
1373 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1374 name: "js-lsp",
1375 ..Default::default()
1376 }))
1377 .await;
1378
1379 let fs = FakeFs::new(cx.executor());
1380 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1381 .await;
1382
1383 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1384 project.update(cx, |project, _| {
1385 project.languages.add(Arc::new(rust));
1386 project.languages.add(Arc::new(js));
1387 });
1388
1389 let _rs_buffer = project
1390 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1391 .await
1392 .unwrap();
1393 let _js_buffer = project
1394 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1395 .await
1396 .unwrap();
1397
1398 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1399 assert_eq!(
1400 fake_rust_server_1
1401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1402 .await
1403 .text_document
1404 .uri
1405 .as_str(),
1406 "file:///dir/a.rs"
1407 );
1408
1409 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1410 assert_eq!(
1411 fake_js_server
1412 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1413 .await
1414 .text_document
1415 .uri
1416 .as_str(),
1417 "file:///dir/b.js"
1418 );
1419
1420 // Disable Rust language server, ensuring only that server gets stopped.
1421 cx.update(|cx| {
1422 cx.update_global(|settings: &mut SettingsStore, cx| {
1423 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1424 settings.languages.insert(
1425 Arc::from("Rust"),
1426 LanguageSettingsContent {
1427 enable_language_server: Some(false),
1428 ..Default::default()
1429 },
1430 );
1431 });
1432 })
1433 });
1434 fake_rust_server_1
1435 .receive_notification::<lsp::notification::Exit>()
1436 .await;
1437
1438 // Enable Rust and disable JavaScript language servers, ensuring that the
1439 // former gets started again and that the latter stops.
1440 cx.update(|cx| {
1441 cx.update_global(|settings: &mut SettingsStore, cx| {
1442 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1443 settings.languages.insert(
1444 Arc::from("Rust"),
1445 LanguageSettingsContent {
1446 enable_language_server: Some(true),
1447 ..Default::default()
1448 },
1449 );
1450 settings.languages.insert(
1451 Arc::from("JavaScript"),
1452 LanguageSettingsContent {
1453 enable_language_server: Some(false),
1454 ..Default::default()
1455 },
1456 );
1457 });
1458 })
1459 });
1460 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1461 assert_eq!(
1462 fake_rust_server_2
1463 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1464 .await
1465 .text_document
1466 .uri
1467 .as_str(),
1468 "file:///dir/a.rs"
1469 );
1470 fake_js_server
1471 .receive_notification::<lsp::notification::Exit>()
1472 .await;
1473}
1474
1475#[gpui::test(iterations = 3)]
1476async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1477 init_test(cx);
1478
1479 let mut language = Language::new(
1480 LanguageConfig {
1481 name: "Rust".into(),
1482 matcher: LanguageMatcher {
1483 path_suffixes: vec!["rs".to_string()],
1484 ..Default::default()
1485 },
1486 ..Default::default()
1487 },
1488 Some(tree_sitter_rust::language()),
1489 );
1490 let mut fake_servers = language
1491 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1492 disk_based_diagnostics_sources: vec!["disk".into()],
1493 ..Default::default()
1494 }))
1495 .await;
1496
1497 let text = "
1498 fn a() { A }
1499 fn b() { BB }
1500 fn c() { CCC }
1501 "
1502 .unindent();
1503
1504 let fs = FakeFs::new(cx.executor());
1505 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1506
1507 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1508 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1509
1510 let buffer = project
1511 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1512 .await
1513 .unwrap();
1514
1515 let mut fake_server = fake_servers.next().await.unwrap();
1516 let open_notification = fake_server
1517 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1518 .await;
1519
1520 // Edit the buffer, moving the content down
1521 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1522 let change_notification_1 = fake_server
1523 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1524 .await;
1525 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1526
1527 // Report some diagnostics for the initial version of the buffer
1528 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1529 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1530 version: Some(open_notification.text_document.version),
1531 diagnostics: vec![
1532 lsp::Diagnostic {
1533 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1534 severity: Some(DiagnosticSeverity::ERROR),
1535 message: "undefined variable 'A'".to_string(),
1536 source: Some("disk".to_string()),
1537 ..Default::default()
1538 },
1539 lsp::Diagnostic {
1540 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1541 severity: Some(DiagnosticSeverity::ERROR),
1542 message: "undefined variable 'BB'".to_string(),
1543 source: Some("disk".to_string()),
1544 ..Default::default()
1545 },
1546 lsp::Diagnostic {
1547 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1548 severity: Some(DiagnosticSeverity::ERROR),
1549 source: Some("disk".to_string()),
1550 message: "undefined variable 'CCC'".to_string(),
1551 ..Default::default()
1552 },
1553 ],
1554 });
1555
1556 // The diagnostics have moved down since they were created.
1557 cx.executor().run_until_parked();
1558 buffer.update(cx, |buffer, _| {
1559 assert_eq!(
1560 buffer
1561 .snapshot()
1562 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1563 .collect::<Vec<_>>(),
1564 &[
1565 DiagnosticEntry {
1566 range: Point::new(3, 9)..Point::new(3, 11),
1567 diagnostic: Diagnostic {
1568 source: Some("disk".into()),
1569 severity: DiagnosticSeverity::ERROR,
1570 message: "undefined variable 'BB'".to_string(),
1571 is_disk_based: true,
1572 group_id: 1,
1573 is_primary: true,
1574 ..Default::default()
1575 },
1576 },
1577 DiagnosticEntry {
1578 range: Point::new(4, 9)..Point::new(4, 12),
1579 diagnostic: Diagnostic {
1580 source: Some("disk".into()),
1581 severity: DiagnosticSeverity::ERROR,
1582 message: "undefined variable 'CCC'".to_string(),
1583 is_disk_based: true,
1584 group_id: 2,
1585 is_primary: true,
1586 ..Default::default()
1587 }
1588 }
1589 ]
1590 );
1591 assert_eq!(
1592 chunks_with_diagnostics(buffer, 0..buffer.len()),
1593 [
1594 ("\n\nfn a() { ".to_string(), None),
1595 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1596 (" }\nfn b() { ".to_string(), None),
1597 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1598 (" }\nfn c() { ".to_string(), None),
1599 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1600 (" }\n".to_string(), None),
1601 ]
1602 );
1603 assert_eq!(
1604 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1605 [
1606 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1607 (" }\nfn c() { ".to_string(), None),
1608 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1609 ]
1610 );
1611 });
1612
1613 // Ensure overlapping diagnostics are highlighted correctly.
1614 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1615 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1616 version: Some(open_notification.text_document.version),
1617 diagnostics: vec![
1618 lsp::Diagnostic {
1619 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1620 severity: Some(DiagnosticSeverity::ERROR),
1621 message: "undefined variable 'A'".to_string(),
1622 source: Some("disk".to_string()),
1623 ..Default::default()
1624 },
1625 lsp::Diagnostic {
1626 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1627 severity: Some(DiagnosticSeverity::WARNING),
1628 message: "unreachable statement".to_string(),
1629 source: Some("disk".to_string()),
1630 ..Default::default()
1631 },
1632 ],
1633 });
1634
1635 cx.executor().run_until_parked();
1636 buffer.update(cx, |buffer, _| {
1637 assert_eq!(
1638 buffer
1639 .snapshot()
1640 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1641 .collect::<Vec<_>>(),
1642 &[
1643 DiagnosticEntry {
1644 range: Point::new(2, 9)..Point::new(2, 12),
1645 diagnostic: Diagnostic {
1646 source: Some("disk".into()),
1647 severity: DiagnosticSeverity::WARNING,
1648 message: "unreachable statement".to_string(),
1649 is_disk_based: true,
1650 group_id: 4,
1651 is_primary: true,
1652 ..Default::default()
1653 }
1654 },
1655 DiagnosticEntry {
1656 range: Point::new(2, 9)..Point::new(2, 10),
1657 diagnostic: Diagnostic {
1658 source: Some("disk".into()),
1659 severity: DiagnosticSeverity::ERROR,
1660 message: "undefined variable 'A'".to_string(),
1661 is_disk_based: true,
1662 group_id: 3,
1663 is_primary: true,
1664 ..Default::default()
1665 },
1666 }
1667 ]
1668 );
1669 assert_eq!(
1670 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1671 [
1672 ("fn a() { ".to_string(), None),
1673 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1674 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1675 ("\n".to_string(), None),
1676 ]
1677 );
1678 assert_eq!(
1679 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1680 [
1681 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1682 ("\n".to_string(), None),
1683 ]
1684 );
1685 });
1686
1687 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1688 // changes since the last save.
1689 buffer.update(cx, |buffer, cx| {
1690 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1691 buffer.edit(
1692 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1693 None,
1694 cx,
1695 );
1696 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1697 });
1698 let change_notification_2 = fake_server
1699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1700 .await;
1701 assert!(
1702 change_notification_2.text_document.version > change_notification_1.text_document.version
1703 );
1704
1705 // Handle out-of-order diagnostics
1706 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1707 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1708 version: Some(change_notification_2.text_document.version),
1709 diagnostics: vec![
1710 lsp::Diagnostic {
1711 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1712 severity: Some(DiagnosticSeverity::ERROR),
1713 message: "undefined variable 'BB'".to_string(),
1714 source: Some("disk".to_string()),
1715 ..Default::default()
1716 },
1717 lsp::Diagnostic {
1718 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1719 severity: Some(DiagnosticSeverity::WARNING),
1720 message: "undefined variable 'A'".to_string(),
1721 source: Some("disk".to_string()),
1722 ..Default::default()
1723 },
1724 ],
1725 });
1726
1727 cx.executor().run_until_parked();
1728 buffer.update(cx, |buffer, _| {
1729 assert_eq!(
1730 buffer
1731 .snapshot()
1732 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1733 .collect::<Vec<_>>(),
1734 &[
1735 DiagnosticEntry {
1736 range: Point::new(2, 21)..Point::new(2, 22),
1737 diagnostic: Diagnostic {
1738 source: Some("disk".into()),
1739 severity: DiagnosticSeverity::WARNING,
1740 message: "undefined variable 'A'".to_string(),
1741 is_disk_based: true,
1742 group_id: 6,
1743 is_primary: true,
1744 ..Default::default()
1745 }
1746 },
1747 DiagnosticEntry {
1748 range: Point::new(3, 9)..Point::new(3, 14),
1749 diagnostic: Diagnostic {
1750 source: Some("disk".into()),
1751 severity: DiagnosticSeverity::ERROR,
1752 message: "undefined variable 'BB'".to_string(),
1753 is_disk_based: true,
1754 group_id: 5,
1755 is_primary: true,
1756 ..Default::default()
1757 },
1758 }
1759 ]
1760 );
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let text = concat!(
1769 "let one = ;\n", //
1770 "let two = \n",
1771 "let three = 3;\n",
1772 );
1773
1774 let fs = FakeFs::new(cx.executor());
1775 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1776
1777 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1778 let buffer = project
1779 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1780 .await
1781 .unwrap();
1782
1783 project.update(cx, |project, cx| {
1784 project
1785 .update_buffer_diagnostics(
1786 &buffer,
1787 LanguageServerId(0),
1788 None,
1789 vec![
1790 DiagnosticEntry {
1791 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1792 diagnostic: Diagnostic {
1793 severity: DiagnosticSeverity::ERROR,
1794 message: "syntax error 1".to_string(),
1795 ..Default::default()
1796 },
1797 },
1798 DiagnosticEntry {
1799 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1800 diagnostic: Diagnostic {
1801 severity: DiagnosticSeverity::ERROR,
1802 message: "syntax error 2".to_string(),
1803 ..Default::default()
1804 },
1805 },
1806 ],
1807 cx,
1808 )
1809 .unwrap();
1810 });
1811
1812 // An empty range is extended forward to include the following character.
1813 // At the end of a line, an empty range is extended backward to include
1814 // the preceding character.
1815 buffer.update(cx, |buffer, _| {
1816 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1817 assert_eq!(
1818 chunks
1819 .iter()
1820 .map(|(s, d)| (s.as_str(), *d))
1821 .collect::<Vec<_>>(),
1822 &[
1823 ("let one = ", None),
1824 (";", Some(DiagnosticSeverity::ERROR)),
1825 ("\nlet two =", None),
1826 (" ", Some(DiagnosticSeverity::ERROR)),
1827 ("\nlet three = 3;\n", None)
1828 ]
1829 );
1830 });
1831}
1832
1833#[gpui::test]
1834async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1835 init_test(cx);
1836
1837 let fs = FakeFs::new(cx.executor());
1838 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1839 .await;
1840
1841 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1842
1843 project.update(cx, |project, cx| {
1844 project
1845 .update_diagnostic_entries(
1846 LanguageServerId(0),
1847 Path::new("/dir/a.rs").to_owned(),
1848 None,
1849 vec![DiagnosticEntry {
1850 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1851 diagnostic: Diagnostic {
1852 severity: DiagnosticSeverity::ERROR,
1853 is_primary: true,
1854 message: "syntax error a1".to_string(),
1855 ..Default::default()
1856 },
1857 }],
1858 cx,
1859 )
1860 .unwrap();
1861 project
1862 .update_diagnostic_entries(
1863 LanguageServerId(1),
1864 Path::new("/dir/a.rs").to_owned(),
1865 None,
1866 vec![DiagnosticEntry {
1867 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1868 diagnostic: Diagnostic {
1869 severity: DiagnosticSeverity::ERROR,
1870 is_primary: true,
1871 message: "syntax error b1".to_string(),
1872 ..Default::default()
1873 },
1874 }],
1875 cx,
1876 )
1877 .unwrap();
1878
1879 assert_eq!(
1880 project.diagnostic_summary(false, cx),
1881 DiagnosticSummary {
1882 error_count: 2,
1883 warning_count: 0,
1884 }
1885 );
1886 });
1887}
1888
1889#[gpui::test]
1890async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1891 init_test(cx);
1892
1893 let mut language = Language::new(
1894 LanguageConfig {
1895 name: "Rust".into(),
1896 matcher: LanguageMatcher {
1897 path_suffixes: vec!["rs".to_string()],
1898 ..Default::default()
1899 },
1900 ..Default::default()
1901 },
1902 Some(tree_sitter_rust::language()),
1903 );
1904 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1905
1906 let text = "
1907 fn a() {
1908 f1();
1909 }
1910 fn b() {
1911 f2();
1912 }
1913 fn c() {
1914 f3();
1915 }
1916 "
1917 .unindent();
1918
1919 let fs = FakeFs::new(cx.executor());
1920 fs.insert_tree(
1921 "/dir",
1922 json!({
1923 "a.rs": text.clone(),
1924 }),
1925 )
1926 .await;
1927
1928 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1929 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1930 let buffer = project
1931 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1932 .await
1933 .unwrap();
1934
1935 let mut fake_server = fake_servers.next().await.unwrap();
1936 let lsp_document_version = fake_server
1937 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1938 .await
1939 .text_document
1940 .version;
1941
1942 // Simulate editing the buffer after the language server computes some edits.
1943 buffer.update(cx, |buffer, cx| {
1944 buffer.edit(
1945 [(
1946 Point::new(0, 0)..Point::new(0, 0),
1947 "// above first function\n",
1948 )],
1949 None,
1950 cx,
1951 );
1952 buffer.edit(
1953 [(
1954 Point::new(2, 0)..Point::new(2, 0),
1955 " // inside first function\n",
1956 )],
1957 None,
1958 cx,
1959 );
1960 buffer.edit(
1961 [(
1962 Point::new(6, 4)..Point::new(6, 4),
1963 "// inside second function ",
1964 )],
1965 None,
1966 cx,
1967 );
1968
1969 assert_eq!(
1970 buffer.text(),
1971 "
1972 // above first function
1973 fn a() {
1974 // inside first function
1975 f1();
1976 }
1977 fn b() {
1978 // inside second function f2();
1979 }
1980 fn c() {
1981 f3();
1982 }
1983 "
1984 .unindent()
1985 );
1986 });
1987
1988 let edits = project
1989 .update(cx, |project, cx| {
1990 project.edits_from_lsp(
1991 &buffer,
1992 vec![
1993 // replace body of first function
1994 lsp::TextEdit {
1995 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1996 new_text: "
1997 fn a() {
1998 f10();
1999 }
2000 "
2001 .unindent(),
2002 },
2003 // edit inside second function
2004 lsp::TextEdit {
2005 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2006 new_text: "00".into(),
2007 },
2008 // edit inside third function via two distinct edits
2009 lsp::TextEdit {
2010 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2011 new_text: "4000".into(),
2012 },
2013 lsp::TextEdit {
2014 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2015 new_text: "".into(),
2016 },
2017 ],
2018 LanguageServerId(0),
2019 Some(lsp_document_version),
2020 cx,
2021 )
2022 })
2023 .await
2024 .unwrap();
2025
2026 buffer.update(cx, |buffer, cx| {
2027 for (range, new_text) in edits {
2028 buffer.edit([(range, new_text)], None, cx);
2029 }
2030 assert_eq!(
2031 buffer.text(),
2032 "
2033 // above first function
2034 fn a() {
2035 // inside first function
2036 f10();
2037 }
2038 fn b() {
2039 // inside second function f200();
2040 }
2041 fn c() {
2042 f4000();
2043 }
2044 "
2045 .unindent()
2046 );
2047 });
2048}
2049
2050#[gpui::test]
2051async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2052 init_test(cx);
2053
2054 let text = "
2055 use a::b;
2056 use a::c;
2057
2058 fn f() {
2059 b();
2060 c();
2061 }
2062 "
2063 .unindent();
2064
2065 let fs = FakeFs::new(cx.executor());
2066 fs.insert_tree(
2067 "/dir",
2068 json!({
2069 "a.rs": text.clone(),
2070 }),
2071 )
2072 .await;
2073
2074 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2075 let buffer = project
2076 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2077 .await
2078 .unwrap();
2079
2080 // Simulate the language server sending us a small edit in the form of a very large diff.
2081 // Rust-analyzer does this when performing a merge-imports code action.
2082 let edits = project
2083 .update(cx, |project, cx| {
2084 project.edits_from_lsp(
2085 &buffer,
2086 [
2087 // Replace the first use statement without editing the semicolon.
2088 lsp::TextEdit {
2089 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2090 new_text: "a::{b, c}".into(),
2091 },
2092 // Reinsert the remainder of the file between the semicolon and the final
2093 // newline of the file.
2094 lsp::TextEdit {
2095 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2096 new_text: "\n\n".into(),
2097 },
2098 lsp::TextEdit {
2099 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2100 new_text: "
2101 fn f() {
2102 b();
2103 c();
2104 }"
2105 .unindent(),
2106 },
2107 // Delete everything after the first newline of the file.
2108 lsp::TextEdit {
2109 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2110 new_text: "".into(),
2111 },
2112 ],
2113 LanguageServerId(0),
2114 None,
2115 cx,
2116 )
2117 })
2118 .await
2119 .unwrap();
2120
2121 buffer.update(cx, |buffer, cx| {
2122 let edits = edits
2123 .into_iter()
2124 .map(|(range, text)| {
2125 (
2126 range.start.to_point(buffer)..range.end.to_point(buffer),
2127 text,
2128 )
2129 })
2130 .collect::<Vec<_>>();
2131
2132 assert_eq!(
2133 edits,
2134 [
2135 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2136 (Point::new(1, 0)..Point::new(2, 0), "".into())
2137 ]
2138 );
2139
2140 for (range, new_text) in edits {
2141 buffer.edit([(range, new_text)], None, cx);
2142 }
2143 assert_eq!(
2144 buffer.text(),
2145 "
2146 use a::{b, c};
2147
2148 fn f() {
2149 b();
2150 c();
2151 }
2152 "
2153 .unindent()
2154 );
2155 });
2156}
2157
2158#[gpui::test]
2159async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2160 init_test(cx);
2161
2162 let text = "
2163 use a::b;
2164 use a::c;
2165
2166 fn f() {
2167 b();
2168 c();
2169 }
2170 "
2171 .unindent();
2172
2173 let fs = FakeFs::new(cx.executor());
2174 fs.insert_tree(
2175 "/dir",
2176 json!({
2177 "a.rs": text.clone(),
2178 }),
2179 )
2180 .await;
2181
2182 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2183 let buffer = project
2184 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2185 .await
2186 .unwrap();
2187
2188 // Simulate the language server sending us edits in a non-ordered fashion,
2189 // with ranges sometimes being inverted or pointing to invalid locations.
2190 let edits = project
2191 .update(cx, |project, cx| {
2192 project.edits_from_lsp(
2193 &buffer,
2194 [
2195 lsp::TextEdit {
2196 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2197 new_text: "\n\n".into(),
2198 },
2199 lsp::TextEdit {
2200 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2201 new_text: "a::{b, c}".into(),
2202 },
2203 lsp::TextEdit {
2204 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2205 new_text: "".into(),
2206 },
2207 lsp::TextEdit {
2208 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2209 new_text: "
2210 fn f() {
2211 b();
2212 c();
2213 }"
2214 .unindent(),
2215 },
2216 ],
2217 LanguageServerId(0),
2218 None,
2219 cx,
2220 )
2221 })
2222 .await
2223 .unwrap();
2224
2225 buffer.update(cx, |buffer, cx| {
2226 let edits = edits
2227 .into_iter()
2228 .map(|(range, text)| {
2229 (
2230 range.start.to_point(buffer)..range.end.to_point(buffer),
2231 text,
2232 )
2233 })
2234 .collect::<Vec<_>>();
2235
2236 assert_eq!(
2237 edits,
2238 [
2239 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2240 (Point::new(1, 0)..Point::new(2, 0), "".into())
2241 ]
2242 );
2243
2244 for (range, new_text) in edits {
2245 buffer.edit([(range, new_text)], None, cx);
2246 }
2247 assert_eq!(
2248 buffer.text(),
2249 "
2250 use a::{b, c};
2251
2252 fn f() {
2253 b();
2254 c();
2255 }
2256 "
2257 .unindent()
2258 );
2259 });
2260}
2261
2262fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2263 buffer: &Buffer,
2264 range: Range<T>,
2265) -> Vec<(String, Option<DiagnosticSeverity>)> {
2266 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2267 for chunk in buffer.snapshot().chunks(range, true) {
2268 if chunks.last().map_or(false, |prev_chunk| {
2269 prev_chunk.1 == chunk.diagnostic_severity
2270 }) {
2271 chunks.last_mut().unwrap().0.push_str(chunk.text);
2272 } else {
2273 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2274 }
2275 }
2276 chunks
2277}
2278
2279#[gpui::test(iterations = 10)]
2280async fn test_definition(cx: &mut gpui::TestAppContext) {
2281 init_test(cx);
2282
2283 let mut language = Language::new(
2284 LanguageConfig {
2285 name: "Rust".into(),
2286 matcher: LanguageMatcher {
2287 path_suffixes: vec!["rs".to_string()],
2288 ..Default::default()
2289 },
2290 ..Default::default()
2291 },
2292 Some(tree_sitter_rust::language()),
2293 );
2294 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2295
2296 let fs = FakeFs::new(cx.executor());
2297 fs.insert_tree(
2298 "/dir",
2299 json!({
2300 "a.rs": "const fn a() { A }",
2301 "b.rs": "const y: i32 = crate::a()",
2302 }),
2303 )
2304 .await;
2305
2306 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2307 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2308
2309 let buffer = project
2310 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2311 .await
2312 .unwrap();
2313
2314 let fake_server = fake_servers.next().await.unwrap();
2315 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2316 let params = params.text_document_position_params;
2317 assert_eq!(
2318 params.text_document.uri.to_file_path().unwrap(),
2319 Path::new("/dir/b.rs"),
2320 );
2321 assert_eq!(params.position, lsp::Position::new(0, 22));
2322
2323 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2324 lsp::Location::new(
2325 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2326 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2327 ),
2328 )))
2329 });
2330
2331 let mut definitions = project
2332 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2333 .await
2334 .unwrap();
2335
2336 // Assert no new language server started
2337 cx.executor().run_until_parked();
2338 assert!(fake_servers.try_next().is_err());
2339
2340 assert_eq!(definitions.len(), 1);
2341 let definition = definitions.pop().unwrap();
2342 cx.update(|cx| {
2343 let target_buffer = definition.target.buffer.read(cx);
2344 assert_eq!(
2345 target_buffer
2346 .file()
2347 .unwrap()
2348 .as_local()
2349 .unwrap()
2350 .abs_path(cx),
2351 Path::new("/dir/a.rs"),
2352 );
2353 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2354 assert_eq!(
2355 list_worktrees(&project, cx),
2356 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2357 );
2358
2359 drop(definition);
2360 });
2361 cx.update(|cx| {
2362 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2363 });
2364
2365 fn list_worktrees<'a>(
2366 project: &'a Model<Project>,
2367 cx: &'a AppContext,
2368 ) -> Vec<(&'a Path, bool)> {
2369 project
2370 .read(cx)
2371 .worktrees()
2372 .map(|worktree| {
2373 let worktree = worktree.read(cx);
2374 (
2375 worktree.as_local().unwrap().abs_path().as_ref(),
2376 worktree.is_visible(),
2377 )
2378 })
2379 .collect::<Vec<_>>()
2380 }
2381}
2382
2383#[gpui::test]
2384async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2385 init_test(cx);
2386
2387 let mut language = Language::new(
2388 LanguageConfig {
2389 name: "TypeScript".into(),
2390 matcher: LanguageMatcher {
2391 path_suffixes: vec!["ts".to_string()],
2392 ..Default::default()
2393 },
2394 ..Default::default()
2395 },
2396 Some(tree_sitter_typescript::language_typescript()),
2397 );
2398 let mut fake_language_servers = language
2399 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2400 capabilities: lsp::ServerCapabilities {
2401 completion_provider: Some(lsp::CompletionOptions {
2402 trigger_characters: Some(vec![":".to_string()]),
2403 ..Default::default()
2404 }),
2405 ..Default::default()
2406 },
2407 ..Default::default()
2408 }))
2409 .await;
2410
2411 let fs = FakeFs::new(cx.executor());
2412 fs.insert_tree(
2413 "/dir",
2414 json!({
2415 "a.ts": "",
2416 }),
2417 )
2418 .await;
2419
2420 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2421 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2422 let buffer = project
2423 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2424 .await
2425 .unwrap();
2426
2427 let fake_server = fake_language_servers.next().await.unwrap();
2428
2429 let text = "let a = b.fqn";
2430 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2431 let completions = project.update(cx, |project, cx| {
2432 project.completions(&buffer, text.len(), cx)
2433 });
2434
2435 fake_server
2436 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2437 Ok(Some(lsp::CompletionResponse::Array(vec![
2438 lsp::CompletionItem {
2439 label: "fullyQualifiedName?".into(),
2440 insert_text: Some("fullyQualifiedName".into()),
2441 ..Default::default()
2442 },
2443 ])))
2444 })
2445 .next()
2446 .await;
2447 let completions = completions.await.unwrap();
2448 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2449 assert_eq!(completions.len(), 1);
2450 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2451 assert_eq!(
2452 completions[0].old_range.to_offset(&snapshot),
2453 text.len() - 3..text.len()
2454 );
2455
2456 let text = "let a = \"atoms/cmp\"";
2457 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2458 let completions = project.update(cx, |project, cx| {
2459 project.completions(&buffer, text.len() - 1, cx)
2460 });
2461
2462 fake_server
2463 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2464 Ok(Some(lsp::CompletionResponse::Array(vec![
2465 lsp::CompletionItem {
2466 label: "component".into(),
2467 ..Default::default()
2468 },
2469 ])))
2470 })
2471 .next()
2472 .await;
2473 let completions = completions.await.unwrap();
2474 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2475 assert_eq!(completions.len(), 1);
2476 assert_eq!(completions[0].new_text, "component");
2477 assert_eq!(
2478 completions[0].old_range.to_offset(&snapshot),
2479 text.len() - 4..text.len() - 1
2480 );
2481}
2482
2483#[gpui::test]
2484async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2485 init_test(cx);
2486
2487 let mut language = Language::new(
2488 LanguageConfig {
2489 name: "TypeScript".into(),
2490 matcher: LanguageMatcher {
2491 path_suffixes: vec!["ts".to_string()],
2492 ..Default::default()
2493 },
2494 ..Default::default()
2495 },
2496 Some(tree_sitter_typescript::language_typescript()),
2497 );
2498 let mut fake_language_servers = language
2499 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2500 capabilities: lsp::ServerCapabilities {
2501 completion_provider: Some(lsp::CompletionOptions {
2502 trigger_characters: Some(vec![":".to_string()]),
2503 ..Default::default()
2504 }),
2505 ..Default::default()
2506 },
2507 ..Default::default()
2508 }))
2509 .await;
2510
2511 let fs = FakeFs::new(cx.executor());
2512 fs.insert_tree(
2513 "/dir",
2514 json!({
2515 "a.ts": "",
2516 }),
2517 )
2518 .await;
2519
2520 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2521 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2522 let buffer = project
2523 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2524 .await
2525 .unwrap();
2526
2527 let fake_server = fake_language_servers.next().await.unwrap();
2528
2529 let text = "let a = b.fqn";
2530 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2531 let completions = project.update(cx, |project, cx| {
2532 project.completions(&buffer, text.len(), cx)
2533 });
2534
2535 fake_server
2536 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2537 Ok(Some(lsp::CompletionResponse::Array(vec![
2538 lsp::CompletionItem {
2539 label: "fullyQualifiedName?".into(),
2540 insert_text: Some("fully\rQualified\r\nName".into()),
2541 ..Default::default()
2542 },
2543 ])))
2544 })
2545 .next()
2546 .await;
2547 let completions = completions.await.unwrap();
2548 assert_eq!(completions.len(), 1);
2549 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2550}
2551
2552#[gpui::test(iterations = 10)]
2553async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2554 init_test(cx);
2555
2556 let mut language = Language::new(
2557 LanguageConfig {
2558 name: "TypeScript".into(),
2559 matcher: LanguageMatcher {
2560 path_suffixes: vec!["ts".to_string()],
2561 ..Default::default()
2562 },
2563 ..Default::default()
2564 },
2565 None,
2566 );
2567 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2568
2569 let fs = FakeFs::new(cx.executor());
2570 fs.insert_tree(
2571 "/dir",
2572 json!({
2573 "a.ts": "a",
2574 }),
2575 )
2576 .await;
2577
2578 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2579 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2580 let buffer = project
2581 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2582 .await
2583 .unwrap();
2584
2585 let fake_server = fake_language_servers.next().await.unwrap();
2586
2587 // Language server returns code actions that contain commands, and not edits.
2588 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2589 fake_server
2590 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2591 Ok(Some(vec![
2592 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2593 title: "The code action".into(),
2594 command: Some(lsp::Command {
2595 title: "The command".into(),
2596 command: "_the/command".into(),
2597 arguments: Some(vec![json!("the-argument")]),
2598 }),
2599 ..Default::default()
2600 }),
2601 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2602 title: "two".into(),
2603 ..Default::default()
2604 }),
2605 ]))
2606 })
2607 .next()
2608 .await;
2609
2610 let action = actions.await.unwrap()[0].clone();
2611 let apply = project.update(cx, |project, cx| {
2612 project.apply_code_action(buffer.clone(), action, true, cx)
2613 });
2614
2615 // Resolving the code action does not populate its edits. In absence of
2616 // edits, we must execute the given command.
2617 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2618 |action, _| async move { Ok(action) },
2619 );
2620
2621 // While executing the command, the language server sends the editor
2622 // a `workspaceEdit` request.
2623 fake_server
2624 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2625 let fake = fake_server.clone();
2626 move |params, _| {
2627 assert_eq!(params.command, "_the/command");
2628 let fake = fake.clone();
2629 async move {
2630 fake.server
2631 .request::<lsp::request::ApplyWorkspaceEdit>(
2632 lsp::ApplyWorkspaceEditParams {
2633 label: None,
2634 edit: lsp::WorkspaceEdit {
2635 changes: Some(
2636 [(
2637 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2638 vec![lsp::TextEdit {
2639 range: lsp::Range::new(
2640 lsp::Position::new(0, 0),
2641 lsp::Position::new(0, 0),
2642 ),
2643 new_text: "X".into(),
2644 }],
2645 )]
2646 .into_iter()
2647 .collect(),
2648 ),
2649 ..Default::default()
2650 },
2651 },
2652 )
2653 .await
2654 .unwrap();
2655 Ok(Some(json!(null)))
2656 }
2657 }
2658 })
2659 .next()
2660 .await;
2661
2662 // Applying the code action returns a project transaction containing the edits
2663 // sent by the language server in its `workspaceEdit` request.
2664 let transaction = apply.await.unwrap();
2665 assert!(transaction.0.contains_key(&buffer));
2666 buffer.update(cx, |buffer, cx| {
2667 assert_eq!(buffer.text(), "Xa");
2668 buffer.undo(cx);
2669 assert_eq!(buffer.text(), "a");
2670 });
2671}
2672
2673#[gpui::test(iterations = 10)]
2674async fn test_save_file(cx: &mut gpui::TestAppContext) {
2675 init_test(cx);
2676
2677 let fs = FakeFs::new(cx.executor());
2678 fs.insert_tree(
2679 "/dir",
2680 json!({
2681 "file1": "the old contents",
2682 }),
2683 )
2684 .await;
2685
2686 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2687 let buffer = project
2688 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2689 .await
2690 .unwrap();
2691 buffer.update(cx, |buffer, cx| {
2692 assert_eq!(buffer.text(), "the old contents");
2693 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2694 });
2695
2696 project
2697 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2698 .await
2699 .unwrap();
2700
2701 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2702 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2703}
2704
2705#[gpui::test(iterations = 30)]
2706async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2707 init_test(cx);
2708
2709 let fs = FakeFs::new(cx.executor().clone());
2710 fs.insert_tree(
2711 "/dir",
2712 json!({
2713 "file1": "the original contents",
2714 }),
2715 )
2716 .await;
2717
2718 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2719 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2720 let buffer = project
2721 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2722 .await
2723 .unwrap();
2724
2725 // Simulate buffer diffs being slow, so that they don't complete before
2726 // the next file change occurs.
2727 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2728
2729 // Change the buffer's file on disk, and then wait for the file change
2730 // to be detected by the worktree, so that the buffer starts reloading.
2731 fs.save(
2732 "/dir/file1".as_ref(),
2733 &"the first contents".into(),
2734 Default::default(),
2735 )
2736 .await
2737 .unwrap();
2738 worktree.next_event(cx);
2739
2740 // Change the buffer's file again. Depending on the random seed, the
2741 // previous file change may still be in progress.
2742 fs.save(
2743 "/dir/file1".as_ref(),
2744 &"the second contents".into(),
2745 Default::default(),
2746 )
2747 .await
2748 .unwrap();
2749 worktree.next_event(cx);
2750
2751 cx.executor().run_until_parked();
2752 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2753 buffer.read_with(cx, |buffer, _| {
2754 assert_eq!(buffer.text(), on_disk_text);
2755 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2756 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2757 });
2758}
2759
2760#[gpui::test(iterations = 30)]
2761async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2762 init_test(cx);
2763
2764 let fs = FakeFs::new(cx.executor().clone());
2765 fs.insert_tree(
2766 "/dir",
2767 json!({
2768 "file1": "the original contents",
2769 }),
2770 )
2771 .await;
2772
2773 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2774 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2775 let buffer = project
2776 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2777 .await
2778 .unwrap();
2779
2780 // Simulate buffer diffs being slow, so that they don't complete before
2781 // the next file change occurs.
2782 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2783
2784 // Change the buffer's file on disk, and then wait for the file change
2785 // to be detected by the worktree, so that the buffer starts reloading.
2786 fs.save(
2787 "/dir/file1".as_ref(),
2788 &"the first contents".into(),
2789 Default::default(),
2790 )
2791 .await
2792 .unwrap();
2793 worktree.next_event(cx);
2794
2795 cx.executor()
2796 .spawn(cx.executor().simulate_random_delay())
2797 .await;
2798
2799 // Perform a noop edit, causing the buffer's version to increase.
2800 buffer.update(cx, |buffer, cx| {
2801 buffer.edit([(0..0, " ")], None, cx);
2802 buffer.undo(cx);
2803 });
2804
2805 cx.executor().run_until_parked();
2806 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2807 buffer.read_with(cx, |buffer, _| {
2808 let buffer_text = buffer.text();
2809 if buffer_text == on_disk_text {
2810 assert!(
2811 !buffer.is_dirty() && !buffer.has_conflict(),
2812 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2813 );
2814 }
2815 // If the file change occurred while the buffer was processing the first
2816 // change, the buffer will be in a conflicting state.
2817 else {
2818 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2819 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2820 }
2821 });
2822}
2823
2824#[gpui::test]
2825async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2826 init_test(cx);
2827
2828 let fs = FakeFs::new(cx.executor());
2829 fs.insert_tree(
2830 "/dir",
2831 json!({
2832 "file1": "the old contents",
2833 }),
2834 )
2835 .await;
2836
2837 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2838 let buffer = project
2839 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2840 .await
2841 .unwrap();
2842 buffer.update(cx, |buffer, cx| {
2843 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2844 });
2845
2846 project
2847 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2848 .await
2849 .unwrap();
2850
2851 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2852 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2853}
2854
2855#[gpui::test]
2856async fn test_save_as(cx: &mut gpui::TestAppContext) {
2857 init_test(cx);
2858
2859 let fs = FakeFs::new(cx.executor());
2860 fs.insert_tree("/dir", json!({})).await;
2861
2862 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2863
2864 let languages = project.update(cx, |project, _| project.languages().clone());
2865 languages.register_native_grammars([("rust", tree_sitter_rust::language())]);
2866 languages.register_test_language(LanguageConfig {
2867 name: "Rust".into(),
2868 grammar: Some("rust".into()),
2869 matcher: LanguageMatcher {
2870 path_suffixes: vec!["rs".into()],
2871 ..Default::default()
2872 },
2873 ..Default::default()
2874 });
2875
2876 let buffer = project.update(cx, |project, cx| {
2877 project.create_buffer("", None, cx).unwrap()
2878 });
2879 buffer.update(cx, |buffer, cx| {
2880 buffer.edit([(0..0, "abc")], None, cx);
2881 assert!(buffer.is_dirty());
2882 assert!(!buffer.has_conflict());
2883 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2884 });
2885 project
2886 .update(cx, |project, cx| {
2887 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2888 })
2889 .await
2890 .unwrap();
2891 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2892
2893 cx.executor().run_until_parked();
2894 buffer.update(cx, |buffer, cx| {
2895 assert_eq!(
2896 buffer.file().unwrap().full_path(cx),
2897 Path::new("dir/file1.rs")
2898 );
2899 assert!(!buffer.is_dirty());
2900 assert!(!buffer.has_conflict());
2901 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2902 });
2903
2904 let opened_buffer = project
2905 .update(cx, |project, cx| {
2906 project.open_local_buffer("/dir/file1.rs", cx)
2907 })
2908 .await
2909 .unwrap();
2910 assert_eq!(opened_buffer, buffer);
2911}
2912
2913#[gpui::test(retries = 5)]
2914async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2915 init_test(cx);
2916 cx.executor().allow_parking();
2917
2918 let dir = temp_tree(json!({
2919 "a": {
2920 "file1": "",
2921 "file2": "",
2922 "file3": "",
2923 },
2924 "b": {
2925 "c": {
2926 "file4": "",
2927 "file5": "",
2928 }
2929 }
2930 }));
2931
2932 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2933 let rpc = project.update(cx, |p, _| p.client.clone());
2934
2935 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2936 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2937 async move { buffer.await.unwrap() }
2938 };
2939 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2940 project.update(cx, |project, cx| {
2941 let tree = project.worktrees().next().unwrap();
2942 tree.read(cx)
2943 .entry_for_path(path)
2944 .unwrap_or_else(|| panic!("no entry for path {}", path))
2945 .id
2946 })
2947 };
2948
2949 let buffer2 = buffer_for_path("a/file2", cx).await;
2950 let buffer3 = buffer_for_path("a/file3", cx).await;
2951 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2952 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2953
2954 let file2_id = id_for_path("a/file2", cx);
2955 let file3_id = id_for_path("a/file3", cx);
2956 let file4_id = id_for_path("b/c/file4", cx);
2957
2958 // Create a remote copy of this worktree.
2959 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2960
2961 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2962
2963 let updates = Arc::new(Mutex::new(Vec::new()));
2964 tree.update(cx, |tree, cx| {
2965 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2966 let updates = updates.clone();
2967 move |update| {
2968 updates.lock().push(update);
2969 async { true }
2970 }
2971 });
2972 });
2973
2974 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2975
2976 cx.executor().run_until_parked();
2977
2978 cx.update(|cx| {
2979 assert!(!buffer2.read(cx).is_dirty());
2980 assert!(!buffer3.read(cx).is_dirty());
2981 assert!(!buffer4.read(cx).is_dirty());
2982 assert!(!buffer5.read(cx).is_dirty());
2983 });
2984
2985 // Rename and delete files and directories.
2986 tree.flush_fs_events(cx).await;
2987 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2988 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2989 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2990 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2991 tree.flush_fs_events(cx).await;
2992
2993 let expected_paths = vec![
2994 "a",
2995 "a/file1",
2996 "a/file2.new",
2997 "b",
2998 "d",
2999 "d/file3",
3000 "d/file4",
3001 ];
3002
3003 cx.update(|app| {
3004 assert_eq!(
3005 tree.read(app)
3006 .paths()
3007 .map(|p| p.to_str().unwrap())
3008 .collect::<Vec<_>>(),
3009 expected_paths
3010 );
3011 });
3012
3013 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3014 assert_eq!(id_for_path("d/file3", cx), file3_id);
3015 assert_eq!(id_for_path("d/file4", cx), file4_id);
3016
3017 cx.update(|cx| {
3018 assert_eq!(
3019 buffer2.read(cx).file().unwrap().path().as_ref(),
3020 Path::new("a/file2.new")
3021 );
3022 assert_eq!(
3023 buffer3.read(cx).file().unwrap().path().as_ref(),
3024 Path::new("d/file3")
3025 );
3026 assert_eq!(
3027 buffer4.read(cx).file().unwrap().path().as_ref(),
3028 Path::new("d/file4")
3029 );
3030 assert_eq!(
3031 buffer5.read(cx).file().unwrap().path().as_ref(),
3032 Path::new("b/c/file5")
3033 );
3034
3035 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3036 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3037 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3038 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3039 });
3040
3041 // Update the remote worktree. Check that it becomes consistent with the
3042 // local worktree.
3043 cx.executor().run_until_parked();
3044
3045 remote.update(cx, |remote, _| {
3046 for update in updates.lock().drain(..) {
3047 remote.as_remote_mut().unwrap().update_from_remote(update);
3048 }
3049 });
3050 cx.executor().run_until_parked();
3051 remote.update(cx, |remote, _| {
3052 assert_eq!(
3053 remote
3054 .paths()
3055 .map(|p| p.to_str().unwrap())
3056 .collect::<Vec<_>>(),
3057 expected_paths
3058 );
3059 });
3060}
3061
3062#[gpui::test(iterations = 10)]
3063async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3064 init_test(cx);
3065
3066 let fs = FakeFs::new(cx.executor());
3067 fs.insert_tree(
3068 "/dir",
3069 json!({
3070 "a": {
3071 "file1": "",
3072 }
3073 }),
3074 )
3075 .await;
3076
3077 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3078 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3079 let tree_id = tree.update(cx, |tree, _| tree.id());
3080
3081 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3082 project.update(cx, |project, cx| {
3083 let tree = project.worktrees().next().unwrap();
3084 tree.read(cx)
3085 .entry_for_path(path)
3086 .unwrap_or_else(|| panic!("no entry for path {}", path))
3087 .id
3088 })
3089 };
3090
3091 let dir_id = id_for_path("a", cx);
3092 let file_id = id_for_path("a/file1", cx);
3093 let buffer = project
3094 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3095 .await
3096 .unwrap();
3097 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3098
3099 project
3100 .update(cx, |project, cx| {
3101 project.rename_entry(dir_id, Path::new("b"), cx)
3102 })
3103 .unwrap()
3104 .await
3105 .unwrap();
3106 cx.executor().run_until_parked();
3107
3108 assert_eq!(id_for_path("b", cx), dir_id);
3109 assert_eq!(id_for_path("b/file1", cx), file_id);
3110 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3111}
3112
3113#[gpui::test]
3114async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3115 init_test(cx);
3116
3117 let fs = FakeFs::new(cx.executor());
3118 fs.insert_tree(
3119 "/dir",
3120 json!({
3121 "a.txt": "a-contents",
3122 "b.txt": "b-contents",
3123 }),
3124 )
3125 .await;
3126
3127 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3128
3129 // Spawn multiple tasks to open paths, repeating some paths.
3130 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3131 (
3132 p.open_local_buffer("/dir/a.txt", cx),
3133 p.open_local_buffer("/dir/b.txt", cx),
3134 p.open_local_buffer("/dir/a.txt", cx),
3135 )
3136 });
3137
3138 let buffer_a_1 = buffer_a_1.await.unwrap();
3139 let buffer_a_2 = buffer_a_2.await.unwrap();
3140 let buffer_b = buffer_b.await.unwrap();
3141 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3142 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3143
3144 // There is only one buffer per path.
3145 let buffer_a_id = buffer_a_1.entity_id();
3146 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3147
3148 // Open the same path again while it is still open.
3149 drop(buffer_a_1);
3150 let buffer_a_3 = project
3151 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3152 .await
3153 .unwrap();
3154
3155 // There's still only one buffer per path.
3156 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3157}
3158
3159#[gpui::test]
3160async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3161 init_test(cx);
3162
3163 let fs = FakeFs::new(cx.executor());
3164 fs.insert_tree(
3165 "/dir",
3166 json!({
3167 "file1": "abc",
3168 "file2": "def",
3169 "file3": "ghi",
3170 }),
3171 )
3172 .await;
3173
3174 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3175
3176 let buffer1 = project
3177 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3178 .await
3179 .unwrap();
3180 let events = Arc::new(Mutex::new(Vec::new()));
3181
3182 // initially, the buffer isn't dirty.
3183 buffer1.update(cx, |buffer, cx| {
3184 cx.subscribe(&buffer1, {
3185 let events = events.clone();
3186 move |_, _, event, _| match event {
3187 BufferEvent::Operation(_) => {}
3188 _ => events.lock().push(event.clone()),
3189 }
3190 })
3191 .detach();
3192
3193 assert!(!buffer.is_dirty());
3194 assert!(events.lock().is_empty());
3195
3196 buffer.edit([(1..2, "")], None, cx);
3197 });
3198
3199 // after the first edit, the buffer is dirty, and emits a dirtied event.
3200 buffer1.update(cx, |buffer, cx| {
3201 assert!(buffer.text() == "ac");
3202 assert!(buffer.is_dirty());
3203 assert_eq!(
3204 *events.lock(),
3205 &[language::Event::Edited, language::Event::DirtyChanged]
3206 );
3207 events.lock().clear();
3208 buffer.did_save(
3209 buffer.version(),
3210 buffer.as_rope().fingerprint(),
3211 buffer.file().unwrap().mtime(),
3212 cx,
3213 );
3214 });
3215
3216 // after saving, the buffer is not dirty, and emits a saved event.
3217 buffer1.update(cx, |buffer, cx| {
3218 assert!(!buffer.is_dirty());
3219 assert_eq!(*events.lock(), &[language::Event::Saved]);
3220 events.lock().clear();
3221
3222 buffer.edit([(1..1, "B")], None, cx);
3223 buffer.edit([(2..2, "D")], None, cx);
3224 });
3225
3226 // after editing again, the buffer is dirty, and emits another dirty event.
3227 buffer1.update(cx, |buffer, cx| {
3228 assert!(buffer.text() == "aBDc");
3229 assert!(buffer.is_dirty());
3230 assert_eq!(
3231 *events.lock(),
3232 &[
3233 language::Event::Edited,
3234 language::Event::DirtyChanged,
3235 language::Event::Edited,
3236 ],
3237 );
3238 events.lock().clear();
3239
3240 // After restoring the buffer to its previously-saved state,
3241 // the buffer is not considered dirty anymore.
3242 buffer.edit([(1..3, "")], None, cx);
3243 assert!(buffer.text() == "ac");
3244 assert!(!buffer.is_dirty());
3245 });
3246
3247 assert_eq!(
3248 *events.lock(),
3249 &[language::Event::Edited, language::Event::DirtyChanged]
3250 );
3251
3252 // When a file is deleted, the buffer is considered dirty.
3253 let events = Arc::new(Mutex::new(Vec::new()));
3254 let buffer2 = project
3255 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3256 .await
3257 .unwrap();
3258 buffer2.update(cx, |_, cx| {
3259 cx.subscribe(&buffer2, {
3260 let events = events.clone();
3261 move |_, _, event, _| events.lock().push(event.clone())
3262 })
3263 .detach();
3264 });
3265
3266 fs.remove_file("/dir/file2".as_ref(), Default::default())
3267 .await
3268 .unwrap();
3269 cx.executor().run_until_parked();
3270 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3271 assert_eq!(
3272 *events.lock(),
3273 &[
3274 language::Event::DirtyChanged,
3275 language::Event::FileHandleChanged
3276 ]
3277 );
3278
3279 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3280 let events = Arc::new(Mutex::new(Vec::new()));
3281 let buffer3 = project
3282 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3283 .await
3284 .unwrap();
3285 buffer3.update(cx, |_, cx| {
3286 cx.subscribe(&buffer3, {
3287 let events = events.clone();
3288 move |_, _, event, _| events.lock().push(event.clone())
3289 })
3290 .detach();
3291 });
3292
3293 buffer3.update(cx, |buffer, cx| {
3294 buffer.edit([(0..0, "x")], None, cx);
3295 });
3296 events.lock().clear();
3297 fs.remove_file("/dir/file3".as_ref(), Default::default())
3298 .await
3299 .unwrap();
3300 cx.executor().run_until_parked();
3301 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3302 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3303}
3304
3305#[gpui::test]
3306async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3307 init_test(cx);
3308
3309 let initial_contents = "aaa\nbbbbb\nc\n";
3310 let fs = FakeFs::new(cx.executor());
3311 fs.insert_tree(
3312 "/dir",
3313 json!({
3314 "the-file": initial_contents,
3315 }),
3316 )
3317 .await;
3318 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3319 let buffer = project
3320 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3321 .await
3322 .unwrap();
3323
3324 let anchors = (0..3)
3325 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3326 .collect::<Vec<_>>();
3327
3328 // Change the file on disk, adding two new lines of text, and removing
3329 // one line.
3330 buffer.update(cx, |buffer, _| {
3331 assert!(!buffer.is_dirty());
3332 assert!(!buffer.has_conflict());
3333 });
3334 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3335 fs.save(
3336 "/dir/the-file".as_ref(),
3337 &new_contents.into(),
3338 LineEnding::Unix,
3339 )
3340 .await
3341 .unwrap();
3342
3343 // Because the buffer was not modified, it is reloaded from disk. Its
3344 // contents are edited according to the diff between the old and new
3345 // file contents.
3346 cx.executor().run_until_parked();
3347 buffer.update(cx, |buffer, _| {
3348 assert_eq!(buffer.text(), new_contents);
3349 assert!(!buffer.is_dirty());
3350 assert!(!buffer.has_conflict());
3351
3352 let anchor_positions = anchors
3353 .iter()
3354 .map(|anchor| anchor.to_point(&*buffer))
3355 .collect::<Vec<_>>();
3356 assert_eq!(
3357 anchor_positions,
3358 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3359 );
3360 });
3361
3362 // Modify the buffer
3363 buffer.update(cx, |buffer, cx| {
3364 buffer.edit([(0..0, " ")], None, cx);
3365 assert!(buffer.is_dirty());
3366 assert!(!buffer.has_conflict());
3367 });
3368
3369 // Change the file on disk again, adding blank lines to the beginning.
3370 fs.save(
3371 "/dir/the-file".as_ref(),
3372 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3373 LineEnding::Unix,
3374 )
3375 .await
3376 .unwrap();
3377
3378 // Because the buffer is modified, it doesn't reload from disk, but is
3379 // marked as having a conflict.
3380 cx.executor().run_until_parked();
3381 buffer.update(cx, |buffer, _| {
3382 assert!(buffer.has_conflict());
3383 });
3384}
3385
3386#[gpui::test]
3387async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3388 init_test(cx);
3389
3390 let fs = FakeFs::new(cx.executor());
3391 fs.insert_tree(
3392 "/dir",
3393 json!({
3394 "file1": "a\nb\nc\n",
3395 "file2": "one\r\ntwo\r\nthree\r\n",
3396 }),
3397 )
3398 .await;
3399
3400 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3401 let buffer1 = project
3402 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3403 .await
3404 .unwrap();
3405 let buffer2 = project
3406 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3407 .await
3408 .unwrap();
3409
3410 buffer1.update(cx, |buffer, _| {
3411 assert_eq!(buffer.text(), "a\nb\nc\n");
3412 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3413 });
3414 buffer2.update(cx, |buffer, _| {
3415 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3416 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3417 });
3418
3419 // Change a file's line endings on disk from unix to windows. The buffer's
3420 // state updates correctly.
3421 fs.save(
3422 "/dir/file1".as_ref(),
3423 &"aaa\nb\nc\n".into(),
3424 LineEnding::Windows,
3425 )
3426 .await
3427 .unwrap();
3428 cx.executor().run_until_parked();
3429 buffer1.update(cx, |buffer, _| {
3430 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3431 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3432 });
3433
3434 // Save a file with windows line endings. The file is written correctly.
3435 buffer2.update(cx, |buffer, cx| {
3436 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3437 });
3438 project
3439 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3440 .await
3441 .unwrap();
3442 assert_eq!(
3443 fs.load("/dir/file2".as_ref()).await.unwrap(),
3444 "one\r\ntwo\r\nthree\r\nfour\r\n",
3445 );
3446}
3447
3448#[gpui::test]
3449async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3450 init_test(cx);
3451
3452 let fs = FakeFs::new(cx.executor());
3453 fs.insert_tree(
3454 "/the-dir",
3455 json!({
3456 "a.rs": "
3457 fn foo(mut v: Vec<usize>) {
3458 for x in &v {
3459 v.push(1);
3460 }
3461 }
3462 "
3463 .unindent(),
3464 }),
3465 )
3466 .await;
3467
3468 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3469 let buffer = project
3470 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3471 .await
3472 .unwrap();
3473
3474 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3475 let message = lsp::PublishDiagnosticsParams {
3476 uri: buffer_uri.clone(),
3477 diagnostics: vec![
3478 lsp::Diagnostic {
3479 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3480 severity: Some(DiagnosticSeverity::WARNING),
3481 message: "error 1".to_string(),
3482 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3483 location: lsp::Location {
3484 uri: buffer_uri.clone(),
3485 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3486 },
3487 message: "error 1 hint 1".to_string(),
3488 }]),
3489 ..Default::default()
3490 },
3491 lsp::Diagnostic {
3492 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3493 severity: Some(DiagnosticSeverity::HINT),
3494 message: "error 1 hint 1".to_string(),
3495 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3496 location: lsp::Location {
3497 uri: buffer_uri.clone(),
3498 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3499 },
3500 message: "original diagnostic".to_string(),
3501 }]),
3502 ..Default::default()
3503 },
3504 lsp::Diagnostic {
3505 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3506 severity: Some(DiagnosticSeverity::ERROR),
3507 message: "error 2".to_string(),
3508 related_information: Some(vec![
3509 lsp::DiagnosticRelatedInformation {
3510 location: lsp::Location {
3511 uri: buffer_uri.clone(),
3512 range: lsp::Range::new(
3513 lsp::Position::new(1, 13),
3514 lsp::Position::new(1, 15),
3515 ),
3516 },
3517 message: "error 2 hint 1".to_string(),
3518 },
3519 lsp::DiagnosticRelatedInformation {
3520 location: lsp::Location {
3521 uri: buffer_uri.clone(),
3522 range: lsp::Range::new(
3523 lsp::Position::new(1, 13),
3524 lsp::Position::new(1, 15),
3525 ),
3526 },
3527 message: "error 2 hint 2".to_string(),
3528 },
3529 ]),
3530 ..Default::default()
3531 },
3532 lsp::Diagnostic {
3533 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3534 severity: Some(DiagnosticSeverity::HINT),
3535 message: "error 2 hint 1".to_string(),
3536 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3537 location: lsp::Location {
3538 uri: buffer_uri.clone(),
3539 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3540 },
3541 message: "original diagnostic".to_string(),
3542 }]),
3543 ..Default::default()
3544 },
3545 lsp::Diagnostic {
3546 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3547 severity: Some(DiagnosticSeverity::HINT),
3548 message: "error 2 hint 2".to_string(),
3549 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3550 location: lsp::Location {
3551 uri: buffer_uri,
3552 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3553 },
3554 message: "original diagnostic".to_string(),
3555 }]),
3556 ..Default::default()
3557 },
3558 ],
3559 version: None,
3560 };
3561
3562 project
3563 .update(cx, |p, cx| {
3564 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3565 })
3566 .unwrap();
3567 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3568
3569 assert_eq!(
3570 buffer
3571 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3572 .collect::<Vec<_>>(),
3573 &[
3574 DiagnosticEntry {
3575 range: Point::new(1, 8)..Point::new(1, 9),
3576 diagnostic: Diagnostic {
3577 severity: DiagnosticSeverity::WARNING,
3578 message: "error 1".to_string(),
3579 group_id: 1,
3580 is_primary: true,
3581 ..Default::default()
3582 }
3583 },
3584 DiagnosticEntry {
3585 range: Point::new(1, 8)..Point::new(1, 9),
3586 diagnostic: Diagnostic {
3587 severity: DiagnosticSeverity::HINT,
3588 message: "error 1 hint 1".to_string(),
3589 group_id: 1,
3590 is_primary: false,
3591 ..Default::default()
3592 }
3593 },
3594 DiagnosticEntry {
3595 range: Point::new(1, 13)..Point::new(1, 15),
3596 diagnostic: Diagnostic {
3597 severity: DiagnosticSeverity::HINT,
3598 message: "error 2 hint 1".to_string(),
3599 group_id: 0,
3600 is_primary: false,
3601 ..Default::default()
3602 }
3603 },
3604 DiagnosticEntry {
3605 range: Point::new(1, 13)..Point::new(1, 15),
3606 diagnostic: Diagnostic {
3607 severity: DiagnosticSeverity::HINT,
3608 message: "error 2 hint 2".to_string(),
3609 group_id: 0,
3610 is_primary: false,
3611 ..Default::default()
3612 }
3613 },
3614 DiagnosticEntry {
3615 range: Point::new(2, 8)..Point::new(2, 17),
3616 diagnostic: Diagnostic {
3617 severity: DiagnosticSeverity::ERROR,
3618 message: "error 2".to_string(),
3619 group_id: 0,
3620 is_primary: true,
3621 ..Default::default()
3622 }
3623 }
3624 ]
3625 );
3626
3627 assert_eq!(
3628 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3629 &[
3630 DiagnosticEntry {
3631 range: Point::new(1, 13)..Point::new(1, 15),
3632 diagnostic: Diagnostic {
3633 severity: DiagnosticSeverity::HINT,
3634 message: "error 2 hint 1".to_string(),
3635 group_id: 0,
3636 is_primary: false,
3637 ..Default::default()
3638 }
3639 },
3640 DiagnosticEntry {
3641 range: Point::new(1, 13)..Point::new(1, 15),
3642 diagnostic: Diagnostic {
3643 severity: DiagnosticSeverity::HINT,
3644 message: "error 2 hint 2".to_string(),
3645 group_id: 0,
3646 is_primary: false,
3647 ..Default::default()
3648 }
3649 },
3650 DiagnosticEntry {
3651 range: Point::new(2, 8)..Point::new(2, 17),
3652 diagnostic: Diagnostic {
3653 severity: DiagnosticSeverity::ERROR,
3654 message: "error 2".to_string(),
3655 group_id: 0,
3656 is_primary: true,
3657 ..Default::default()
3658 }
3659 }
3660 ]
3661 );
3662
3663 assert_eq!(
3664 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3665 &[
3666 DiagnosticEntry {
3667 range: Point::new(1, 8)..Point::new(1, 9),
3668 diagnostic: Diagnostic {
3669 severity: DiagnosticSeverity::WARNING,
3670 message: "error 1".to_string(),
3671 group_id: 1,
3672 is_primary: true,
3673 ..Default::default()
3674 }
3675 },
3676 DiagnosticEntry {
3677 range: Point::new(1, 8)..Point::new(1, 9),
3678 diagnostic: Diagnostic {
3679 severity: DiagnosticSeverity::HINT,
3680 message: "error 1 hint 1".to_string(),
3681 group_id: 1,
3682 is_primary: false,
3683 ..Default::default()
3684 }
3685 },
3686 ]
3687 );
3688}
3689
3690#[gpui::test]
3691async fn test_rename(cx: &mut gpui::TestAppContext) {
3692 init_test(cx);
3693
3694 let mut language = Language::new(
3695 LanguageConfig {
3696 name: "Rust".into(),
3697 matcher: LanguageMatcher {
3698 path_suffixes: vec!["rs".to_string()],
3699 ..Default::default()
3700 },
3701 ..Default::default()
3702 },
3703 Some(tree_sitter_rust::language()),
3704 );
3705 let mut fake_servers = language
3706 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3707 capabilities: lsp::ServerCapabilities {
3708 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3709 prepare_provider: Some(true),
3710 work_done_progress_options: Default::default(),
3711 })),
3712 ..Default::default()
3713 },
3714 ..Default::default()
3715 }))
3716 .await;
3717
3718 let fs = FakeFs::new(cx.executor());
3719 fs.insert_tree(
3720 "/dir",
3721 json!({
3722 "one.rs": "const ONE: usize = 1;",
3723 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3724 }),
3725 )
3726 .await;
3727
3728 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3729 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3730 let buffer = project
3731 .update(cx, |project, cx| {
3732 project.open_local_buffer("/dir/one.rs", cx)
3733 })
3734 .await
3735 .unwrap();
3736
3737 let fake_server = fake_servers.next().await.unwrap();
3738
3739 let response = project.update(cx, |project, cx| {
3740 project.prepare_rename(buffer.clone(), 7, cx)
3741 });
3742 fake_server
3743 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3744 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3745 assert_eq!(params.position, lsp::Position::new(0, 7));
3746 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3747 lsp::Position::new(0, 6),
3748 lsp::Position::new(0, 9),
3749 ))))
3750 })
3751 .next()
3752 .await
3753 .unwrap();
3754 let range = response.await.unwrap().unwrap();
3755 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3756 assert_eq!(range, 6..9);
3757
3758 let response = project.update(cx, |project, cx| {
3759 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3760 });
3761 fake_server
3762 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3763 assert_eq!(
3764 params.text_document_position.text_document.uri.as_str(),
3765 "file:///dir/one.rs"
3766 );
3767 assert_eq!(
3768 params.text_document_position.position,
3769 lsp::Position::new(0, 7)
3770 );
3771 assert_eq!(params.new_name, "THREE");
3772 Ok(Some(lsp::WorkspaceEdit {
3773 changes: Some(
3774 [
3775 (
3776 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3777 vec![lsp::TextEdit::new(
3778 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3779 "THREE".to_string(),
3780 )],
3781 ),
3782 (
3783 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3784 vec![
3785 lsp::TextEdit::new(
3786 lsp::Range::new(
3787 lsp::Position::new(0, 24),
3788 lsp::Position::new(0, 27),
3789 ),
3790 "THREE".to_string(),
3791 ),
3792 lsp::TextEdit::new(
3793 lsp::Range::new(
3794 lsp::Position::new(0, 35),
3795 lsp::Position::new(0, 38),
3796 ),
3797 "THREE".to_string(),
3798 ),
3799 ],
3800 ),
3801 ]
3802 .into_iter()
3803 .collect(),
3804 ),
3805 ..Default::default()
3806 }))
3807 })
3808 .next()
3809 .await
3810 .unwrap();
3811 let mut transaction = response.await.unwrap().0;
3812 assert_eq!(transaction.len(), 2);
3813 assert_eq!(
3814 transaction
3815 .remove_entry(&buffer)
3816 .unwrap()
3817 .0
3818 .update(cx, |buffer, _| buffer.text()),
3819 "const THREE: usize = 1;"
3820 );
3821 assert_eq!(
3822 transaction
3823 .into_keys()
3824 .next()
3825 .unwrap()
3826 .update(cx, |buffer, _| buffer.text()),
3827 "const TWO: usize = one::THREE + one::THREE;"
3828 );
3829}
3830
3831#[gpui::test]
3832async fn test_search(cx: &mut gpui::TestAppContext) {
3833 init_test(cx);
3834
3835 let fs = FakeFs::new(cx.executor());
3836 fs.insert_tree(
3837 "/dir",
3838 json!({
3839 "one.rs": "const ONE: usize = 1;",
3840 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3841 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3842 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3843 }),
3844 )
3845 .await;
3846 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3847 assert_eq!(
3848 search(
3849 &project,
3850 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3851 cx
3852 )
3853 .await
3854 .unwrap(),
3855 HashMap::from_iter([
3856 ("two.rs".to_string(), vec![6..9]),
3857 ("three.rs".to_string(), vec![37..40])
3858 ])
3859 );
3860
3861 let buffer_4 = project
3862 .update(cx, |project, cx| {
3863 project.open_local_buffer("/dir/four.rs", cx)
3864 })
3865 .await
3866 .unwrap();
3867 buffer_4.update(cx, |buffer, cx| {
3868 let text = "two::TWO";
3869 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3870 });
3871
3872 assert_eq!(
3873 search(
3874 &project,
3875 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3876 cx
3877 )
3878 .await
3879 .unwrap(),
3880 HashMap::from_iter([
3881 ("two.rs".to_string(), vec![6..9]),
3882 ("three.rs".to_string(), vec![37..40]),
3883 ("four.rs".to_string(), vec![25..28, 36..39])
3884 ])
3885 );
3886}
3887
3888#[gpui::test]
3889async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3890 init_test(cx);
3891
3892 let search_query = "file";
3893
3894 let fs = FakeFs::new(cx.executor());
3895 fs.insert_tree(
3896 "/dir",
3897 json!({
3898 "one.rs": r#"// Rust file one"#,
3899 "one.ts": r#"// TypeScript file one"#,
3900 "two.rs": r#"// Rust file two"#,
3901 "two.ts": r#"// TypeScript file two"#,
3902 }),
3903 )
3904 .await;
3905 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3906
3907 assert!(
3908 search(
3909 &project,
3910 SearchQuery::text(
3911 search_query,
3912 false,
3913 true,
3914 false,
3915 vec![PathMatcher::new("*.odd").unwrap()],
3916 Vec::new()
3917 )
3918 .unwrap(),
3919 cx
3920 )
3921 .await
3922 .unwrap()
3923 .is_empty(),
3924 "If no inclusions match, no files should be returned"
3925 );
3926
3927 assert_eq!(
3928 search(
3929 &project,
3930 SearchQuery::text(
3931 search_query,
3932 false,
3933 true,
3934 false,
3935 vec![PathMatcher::new("*.rs").unwrap()],
3936 Vec::new()
3937 )
3938 .unwrap(),
3939 cx
3940 )
3941 .await
3942 .unwrap(),
3943 HashMap::from_iter([
3944 ("one.rs".to_string(), vec![8..12]),
3945 ("two.rs".to_string(), vec![8..12]),
3946 ]),
3947 "Rust only search should give only Rust files"
3948 );
3949
3950 assert_eq!(
3951 search(
3952 &project,
3953 SearchQuery::text(
3954 search_query,
3955 false,
3956 true,
3957 false,
3958 vec![
3959 PathMatcher::new("*.ts").unwrap(),
3960 PathMatcher::new("*.odd").unwrap(),
3961 ],
3962 Vec::new()
3963 ).unwrap(),
3964 cx
3965 )
3966 .await
3967 .unwrap(),
3968 HashMap::from_iter([
3969 ("one.ts".to_string(), vec![14..18]),
3970 ("two.ts".to_string(), vec![14..18]),
3971 ]),
3972 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3973 );
3974
3975 assert_eq!(
3976 search(
3977 &project,
3978 SearchQuery::text(
3979 search_query,
3980 false,
3981 true,
3982 false,
3983 vec![
3984 PathMatcher::new("*.rs").unwrap(),
3985 PathMatcher::new("*.ts").unwrap(),
3986 PathMatcher::new("*.odd").unwrap(),
3987 ],
3988 Vec::new()
3989 ).unwrap(),
3990 cx
3991 )
3992 .await
3993 .unwrap(),
3994 HashMap::from_iter([
3995 ("one.rs".to_string(), vec![8..12]),
3996 ("one.ts".to_string(), vec![14..18]),
3997 ("two.rs".to_string(), vec![8..12]),
3998 ("two.ts".to_string(), vec![14..18]),
3999 ]),
4000 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4001 );
4002}
4003
4004#[gpui::test]
4005async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4006 init_test(cx);
4007
4008 let search_query = "file";
4009
4010 let fs = FakeFs::new(cx.executor());
4011 fs.insert_tree(
4012 "/dir",
4013 json!({
4014 "one.rs": r#"// Rust file one"#,
4015 "one.ts": r#"// TypeScript file one"#,
4016 "two.rs": r#"// Rust file two"#,
4017 "two.ts": r#"// TypeScript file two"#,
4018 }),
4019 )
4020 .await;
4021 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4022
4023 assert_eq!(
4024 search(
4025 &project,
4026 SearchQuery::text(
4027 search_query,
4028 false,
4029 true,
4030 false,
4031 Vec::new(),
4032 vec![PathMatcher::new("*.odd").unwrap()],
4033 )
4034 .unwrap(),
4035 cx
4036 )
4037 .await
4038 .unwrap(),
4039 HashMap::from_iter([
4040 ("one.rs".to_string(), vec![8..12]),
4041 ("one.ts".to_string(), vec![14..18]),
4042 ("two.rs".to_string(), vec![8..12]),
4043 ("two.ts".to_string(), vec![14..18]),
4044 ]),
4045 "If no exclusions match, all files should be returned"
4046 );
4047
4048 assert_eq!(
4049 search(
4050 &project,
4051 SearchQuery::text(
4052 search_query,
4053 false,
4054 true,
4055 false,
4056 Vec::new(),
4057 vec![PathMatcher::new("*.rs").unwrap()],
4058 )
4059 .unwrap(),
4060 cx
4061 )
4062 .await
4063 .unwrap(),
4064 HashMap::from_iter([
4065 ("one.ts".to_string(), vec![14..18]),
4066 ("two.ts".to_string(), vec![14..18]),
4067 ]),
4068 "Rust exclusion search should give only TypeScript files"
4069 );
4070
4071 assert_eq!(
4072 search(
4073 &project,
4074 SearchQuery::text(
4075 search_query,
4076 false,
4077 true,
4078 false,
4079 Vec::new(),
4080 vec![
4081 PathMatcher::new("*.ts").unwrap(),
4082 PathMatcher::new("*.odd").unwrap(),
4083 ],
4084 ).unwrap(),
4085 cx
4086 )
4087 .await
4088 .unwrap(),
4089 HashMap::from_iter([
4090 ("one.rs".to_string(), vec![8..12]),
4091 ("two.rs".to_string(), vec![8..12]),
4092 ]),
4093 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4094 );
4095
4096 assert!(
4097 search(
4098 &project,
4099 SearchQuery::text(
4100 search_query,
4101 false,
4102 true,
4103 false,
4104 Vec::new(),
4105 vec![
4106 PathMatcher::new("*.rs").unwrap(),
4107 PathMatcher::new("*.ts").unwrap(),
4108 PathMatcher::new("*.odd").unwrap(),
4109 ],
4110 ).unwrap(),
4111 cx
4112 )
4113 .await
4114 .unwrap().is_empty(),
4115 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4116 );
4117}
4118
4119#[gpui::test]
4120async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4121 init_test(cx);
4122
4123 let search_query = "file";
4124
4125 let fs = FakeFs::new(cx.executor());
4126 fs.insert_tree(
4127 "/dir",
4128 json!({
4129 "one.rs": r#"// Rust file one"#,
4130 "one.ts": r#"// TypeScript file one"#,
4131 "two.rs": r#"// Rust file two"#,
4132 "two.ts": r#"// TypeScript file two"#,
4133 }),
4134 )
4135 .await;
4136 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4137
4138 assert!(
4139 search(
4140 &project,
4141 SearchQuery::text(
4142 search_query,
4143 false,
4144 true,
4145 false,
4146 vec![PathMatcher::new("*.odd").unwrap()],
4147 vec![PathMatcher::new("*.odd").unwrap()],
4148 )
4149 .unwrap(),
4150 cx
4151 )
4152 .await
4153 .unwrap()
4154 .is_empty(),
4155 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4156 );
4157
4158 assert!(
4159 search(
4160 &project,
4161 SearchQuery::text(
4162 search_query,
4163 false,
4164 true,
4165 false,
4166 vec![PathMatcher::new("*.ts").unwrap()],
4167 vec![PathMatcher::new("*.ts").unwrap()],
4168 ).unwrap(),
4169 cx
4170 )
4171 .await
4172 .unwrap()
4173 .is_empty(),
4174 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4175 );
4176
4177 assert!(
4178 search(
4179 &project,
4180 SearchQuery::text(
4181 search_query,
4182 false,
4183 true,
4184 false,
4185 vec![
4186 PathMatcher::new("*.ts").unwrap(),
4187 PathMatcher::new("*.odd").unwrap()
4188 ],
4189 vec![
4190 PathMatcher::new("*.ts").unwrap(),
4191 PathMatcher::new("*.odd").unwrap()
4192 ],
4193 )
4194 .unwrap(),
4195 cx
4196 )
4197 .await
4198 .unwrap()
4199 .is_empty(),
4200 "Non-matching inclusions and exclusions should not change that."
4201 );
4202
4203 assert_eq!(
4204 search(
4205 &project,
4206 SearchQuery::text(
4207 search_query,
4208 false,
4209 true,
4210 false,
4211 vec![
4212 PathMatcher::new("*.ts").unwrap(),
4213 PathMatcher::new("*.odd").unwrap()
4214 ],
4215 vec![
4216 PathMatcher::new("*.rs").unwrap(),
4217 PathMatcher::new("*.odd").unwrap()
4218 ],
4219 )
4220 .unwrap(),
4221 cx
4222 )
4223 .await
4224 .unwrap(),
4225 HashMap::from_iter([
4226 ("one.ts".to_string(), vec![14..18]),
4227 ("two.ts".to_string(), vec![14..18]),
4228 ]),
4229 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4230 );
4231}
4232
4233#[gpui::test]
4234async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4235 init_test(cx);
4236
4237 let fs = FakeFs::new(cx.background_executor.clone());
4238 fs.insert_tree(
4239 "/dir",
4240 json!({
4241 ".git": {},
4242 ".gitignore": "**/target\n/node_modules\n",
4243 "target": {
4244 "index.txt": "index_key:index_value"
4245 },
4246 "node_modules": {
4247 "eslint": {
4248 "index.ts": "const eslint_key = 'eslint value'",
4249 "package.json": r#"{ "some_key": "some value" }"#,
4250 },
4251 "prettier": {
4252 "index.ts": "const prettier_key = 'prettier value'",
4253 "package.json": r#"{ "other_key": "other value" }"#,
4254 },
4255 },
4256 "package.json": r#"{ "main_key": "main value" }"#,
4257 }),
4258 )
4259 .await;
4260 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4261
4262 let query = "key";
4263 assert_eq!(
4264 search(
4265 &project,
4266 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4267 cx
4268 )
4269 .await
4270 .unwrap(),
4271 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4272 "Only one non-ignored file should have the query"
4273 );
4274
4275 assert_eq!(
4276 search(
4277 &project,
4278 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4279 cx
4280 )
4281 .await
4282 .unwrap(),
4283 HashMap::from_iter([
4284 ("package.json".to_string(), vec![8..11]),
4285 ("target/index.txt".to_string(), vec![6..9]),
4286 (
4287 "node_modules/prettier/package.json".to_string(),
4288 vec![9..12]
4289 ),
4290 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4291 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4292 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4293 ]),
4294 "Unrestricted search with ignored directories should find every file with the query"
4295 );
4296
4297 assert_eq!(
4298 search(
4299 &project,
4300 SearchQuery::text(
4301 query,
4302 false,
4303 false,
4304 true,
4305 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4306 vec![PathMatcher::new("*.ts").unwrap()],
4307 )
4308 .unwrap(),
4309 cx
4310 )
4311 .await
4312 .unwrap(),
4313 HashMap::from_iter([(
4314 "node_modules/prettier/package.json".to_string(),
4315 vec![9..12]
4316 )]),
4317 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4318 );
4319}
4320
4321#[test]
4322fn test_glob_literal_prefix() {
4323 assert_eq!(glob_literal_prefix("**/*.js"), "");
4324 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4325 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4326 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4327}
4328
4329#[gpui::test]
4330async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4331 init_test(cx);
4332
4333 let fs = FakeFs::new(cx.executor().clone());
4334 fs.insert_tree(
4335 "/one/two",
4336 json!({
4337 "three": {
4338 "a.txt": "",
4339 "four": {}
4340 },
4341 "c.rs": ""
4342 }),
4343 )
4344 .await;
4345
4346 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4347 project
4348 .update(cx, |project, cx| {
4349 let id = project.worktrees().next().unwrap().read(cx).id();
4350 project.create_entry((id, "b.."), true, cx)
4351 })
4352 .unwrap()
4353 .await
4354 .unwrap();
4355
4356 // Can't create paths outside the project
4357 let result = project
4358 .update(cx, |project, cx| {
4359 let id = project.worktrees().next().unwrap().read(cx).id();
4360 project.create_entry((id, "../../boop"), true, cx)
4361 })
4362 .await;
4363 assert!(result.is_err());
4364
4365 // Can't create paths with '..'
4366 let result = project
4367 .update(cx, |project, cx| {
4368 let id = project.worktrees().next().unwrap().read(cx).id();
4369 project.create_entry((id, "four/../beep"), true, cx)
4370 })
4371 .await;
4372 assert!(result.is_err());
4373
4374 assert_eq!(
4375 fs.paths(true),
4376 vec![
4377 PathBuf::from("/"),
4378 PathBuf::from("/one"),
4379 PathBuf::from("/one/two"),
4380 PathBuf::from("/one/two/c.rs"),
4381 PathBuf::from("/one/two/three"),
4382 PathBuf::from("/one/two/three/a.txt"),
4383 PathBuf::from("/one/two/three/b.."),
4384 PathBuf::from("/one/two/three/four"),
4385 ]
4386 );
4387
4388 // And we cannot open buffers with '..'
4389 let result = project
4390 .update(cx, |project, cx| {
4391 let id = project.worktrees().next().unwrap().read(cx).id();
4392 project.open_buffer((id, "../c.rs"), cx)
4393 })
4394 .await;
4395 assert!(result.is_err())
4396}
4397
4398async fn search(
4399 project: &Model<Project>,
4400 query: SearchQuery,
4401 cx: &mut gpui::TestAppContext,
4402) -> Result<HashMap<String, Vec<Range<usize>>>> {
4403 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4404 let mut result = HashMap::default();
4405 while let Some((buffer, range)) = search_rx.next().await {
4406 result.entry(buffer).or_insert(range);
4407 }
4408 Ok(result
4409 .into_iter()
4410 .map(|(buffer, ranges)| {
4411 buffer.update(cx, |buffer, _| {
4412 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4413 let ranges = ranges
4414 .into_iter()
4415 .map(|range| range.to_offset(buffer))
4416 .collect::<Vec<_>>();
4417 (path, ranges)
4418 })
4419 })
4420 .collect())
4421}
4422
4423fn init_test(cx: &mut gpui::TestAppContext) {
4424 if std::env::var("RUST_LOG").is_ok() {
4425 env_logger::try_init().ok();
4426 }
4427
4428 cx.update(|cx| {
4429 let settings_store = SettingsStore::test(cx);
4430 cx.set_global(settings_store);
4431 release_channel::init("0.0.0", cx);
4432 language::init(cx);
4433 Project::init_settings(cx);
4434 });
4435}