1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[gpui::test]
49async fn test_symlinks(cx: &mut gpui::TestAppContext) {
50 init_test(cx);
51 cx.executor().allow_parking();
52
53 let dir = temp_tree(json!({
54 "root": {
55 "apple": "",
56 "banana": {
57 "carrot": {
58 "date": "",
59 "endive": "",
60 }
61 },
62 "fennel": {
63 "grape": "",
64 }
65 }
66 }));
67
68 let root_link_path = dir.path().join("root_link");
69 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
70 os::unix::fs::symlink(
71 &dir.path().join("root/fennel"),
72 &dir.path().join("root/finnochio"),
73 )
74 .unwrap();
75
76 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
77
78 project.update(cx, |project, cx| {
79 let tree = project.worktrees().next().unwrap().read(cx);
80 assert_eq!(tree.file_count(), 5);
81 assert_eq!(
82 tree.inode_for_path("fennel/grape"),
83 tree.inode_for_path("finnochio/grape")
84 );
85 });
86}
87
88#[gpui::test]
89async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91
92 let fs = FakeFs::new(cx.executor());
93 fs.insert_tree(
94 "/the-root",
95 json!({
96 ".zed": {
97 "settings.json": r#"{ "tab_size": 8 }"#
98 },
99 "a": {
100 "a.rs": "fn a() {\n A\n}"
101 },
102 "b": {
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 2 }"#
105 },
106 "b.rs": "fn b() {\n B\n}"
107 }
108 }),
109 )
110 .await;
111
112 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
113 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
114
115 cx.executor().run_until_parked();
116 cx.update(|cx| {
117 let tree = worktree.read(cx);
118
119 let settings_a = language_settings(
120 None,
121 Some(
122 &(File::for_entry(
123 tree.entry_for_path("a/a.rs").unwrap().clone(),
124 worktree.clone(),
125 ) as _),
126 ),
127 cx,
128 );
129 let settings_b = language_settings(
130 None,
131 Some(
132 &(File::for_entry(
133 tree.entry_for_path("b/b.rs").unwrap().clone(),
134 worktree.clone(),
135 ) as _),
136 ),
137 cx,
138 );
139
140 assert_eq!(settings_a.tab_size.get(), 8);
141 assert_eq!(settings_b.tab_size.get(), 2);
142 });
143}
144
145#[gpui::test]
146async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let mut rust_language = Language::new(
150 LanguageConfig {
151 name: "Rust".into(),
152 matcher: LanguageMatcher {
153 path_suffixes: vec!["rs".to_string()],
154 ..Default::default()
155 },
156 ..Default::default()
157 },
158 Some(tree_sitter_rust::language()),
159 );
160 let mut json_language = Language::new(
161 LanguageConfig {
162 name: "JSON".into(),
163 matcher: LanguageMatcher {
164 path_suffixes: vec!["json".to_string()],
165 ..Default::default()
166 },
167 ..Default::default()
168 },
169 None,
170 );
171 let mut fake_rust_servers = rust_language
172 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
173 name: "the-rust-language-server",
174 capabilities: lsp::ServerCapabilities {
175 completion_provider: Some(lsp::CompletionOptions {
176 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
177 ..Default::default()
178 }),
179 ..Default::default()
180 },
181 ..Default::default()
182 }))
183 .await;
184 let mut fake_json_servers = json_language
185 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
186 name: "the-json-language-server",
187 capabilities: lsp::ServerCapabilities {
188 completion_provider: Some(lsp::CompletionOptions {
189 trigger_characters: Some(vec![":".to_string()]),
190 ..Default::default()
191 }),
192 ..Default::default()
193 },
194 ..Default::default()
195 }))
196 .await;
197
198 let fs = FakeFs::new(cx.executor());
199 fs.insert_tree(
200 "/the-root",
201 json!({
202 "test.rs": "const A: i32 = 1;",
203 "test2.rs": "",
204 "Cargo.toml": "a = 1",
205 "package.json": "{\"a\": 1}",
206 }),
207 )
208 .await;
209
210 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
211
212 // Open a buffer without an associated language server.
213 let toml_buffer = project
214 .update(cx, |project, cx| {
215 project.open_local_buffer("/the-root/Cargo.toml", cx)
216 })
217 .await
218 .unwrap();
219
220 // Open a buffer with an associated language server before the language for it has been loaded.
221 let rust_buffer = project
222 .update(cx, |project, cx| {
223 project.open_local_buffer("/the-root/test.rs", cx)
224 })
225 .await
226 .unwrap();
227 rust_buffer.update(cx, |buffer, _| {
228 assert_eq!(buffer.language().map(|l| l.name()), None);
229 });
230
231 // Now we add the languages to the project, and ensure they get assigned to all
232 // the relevant open buffers.
233 project.update(cx, |project, _| {
234 project.languages.add(Arc::new(json_language));
235 project.languages.add(Arc::new(rust_language));
236 });
237 cx.executor().run_until_parked();
238 rust_buffer.update(cx, |buffer, _| {
239 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
240 });
241
242 // A server is started up, and it is notified about Rust files.
243 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
244 assert_eq!(
245 fake_rust_server
246 .receive_notification::<lsp::notification::DidOpenTextDocument>()
247 .await
248 .text_document,
249 lsp::TextDocumentItem {
250 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
251 version: 0,
252 text: "const A: i32 = 1;".to_string(),
253 language_id: Default::default()
254 }
255 );
256
257 // The buffer is configured based on the language server's capabilities.
258 rust_buffer.update(cx, |buffer, _| {
259 assert_eq!(
260 buffer.completion_triggers(),
261 &[".".to_string(), "::".to_string()]
262 );
263 });
264 toml_buffer.update(cx, |buffer, _| {
265 assert!(buffer.completion_triggers().is_empty());
266 });
267
268 // Edit a buffer. The changes are reported to the language server.
269 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
270 assert_eq!(
271 fake_rust_server
272 .receive_notification::<lsp::notification::DidChangeTextDocument>()
273 .await
274 .text_document,
275 lsp::VersionedTextDocumentIdentifier::new(
276 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
277 1
278 )
279 );
280
281 // Open a third buffer with a different associated language server.
282 let json_buffer = project
283 .update(cx, |project, cx| {
284 project.open_local_buffer("/the-root/package.json", cx)
285 })
286 .await
287 .unwrap();
288
289 // A json language server is started up and is only notified about the json buffer.
290 let mut fake_json_server = fake_json_servers.next().await.unwrap();
291 assert_eq!(
292 fake_json_server
293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
294 .await
295 .text_document,
296 lsp::TextDocumentItem {
297 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
298 version: 0,
299 text: "{\"a\": 1}".to_string(),
300 language_id: Default::default()
301 }
302 );
303
304 // This buffer is configured based on the second language server's
305 // capabilities.
306 json_buffer.update(cx, |buffer, _| {
307 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
308 });
309
310 // When opening another buffer whose language server is already running,
311 // it is also configured based on the existing language server's capabilities.
312 let rust_buffer2 = project
313 .update(cx, |project, cx| {
314 project.open_local_buffer("/the-root/test2.rs", cx)
315 })
316 .await
317 .unwrap();
318 rust_buffer2.update(cx, |buffer, _| {
319 assert_eq!(
320 buffer.completion_triggers(),
321 &[".".to_string(), "::".to_string()]
322 );
323 });
324
325 // Changes are reported only to servers matching the buffer's language.
326 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
327 rust_buffer2.update(cx, |buffer, cx| {
328 buffer.edit([(0..0, "let x = 1;")], None, cx)
329 });
330 assert_eq!(
331 fake_rust_server
332 .receive_notification::<lsp::notification::DidChangeTextDocument>()
333 .await
334 .text_document,
335 lsp::VersionedTextDocumentIdentifier::new(
336 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
337 1
338 )
339 );
340
341 // Save notifications are reported to all servers.
342 project
343 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
344 .await
345 .unwrap();
346 assert_eq!(
347 fake_rust_server
348 .receive_notification::<lsp::notification::DidSaveTextDocument>()
349 .await
350 .text_document,
351 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
352 );
353 assert_eq!(
354 fake_json_server
355 .receive_notification::<lsp::notification::DidSaveTextDocument>()
356 .await
357 .text_document,
358 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
359 );
360
361 // Renames are reported only to servers matching the buffer's language.
362 fs.rename(
363 Path::new("/the-root/test2.rs"),
364 Path::new("/the-root/test3.rs"),
365 Default::default(),
366 )
367 .await
368 .unwrap();
369 assert_eq!(
370 fake_rust_server
371 .receive_notification::<lsp::notification::DidCloseTextDocument>()
372 .await
373 .text_document,
374 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
375 );
376 assert_eq!(
377 fake_rust_server
378 .receive_notification::<lsp::notification::DidOpenTextDocument>()
379 .await
380 .text_document,
381 lsp::TextDocumentItem {
382 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
383 version: 0,
384 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
385 language_id: Default::default()
386 },
387 );
388
389 rust_buffer2.update(cx, |buffer, cx| {
390 buffer.update_diagnostics(
391 LanguageServerId(0),
392 DiagnosticSet::from_sorted_entries(
393 vec![DiagnosticEntry {
394 diagnostic: Default::default(),
395 range: Anchor::MIN..Anchor::MAX,
396 }],
397 &buffer.snapshot(),
398 ),
399 cx,
400 );
401 assert_eq!(
402 buffer
403 .snapshot()
404 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
405 .count(),
406 1
407 );
408 });
409
410 // When the rename changes the extension of the file, the buffer gets closed on the old
411 // language server and gets opened on the new one.
412 fs.rename(
413 Path::new("/the-root/test3.rs"),
414 Path::new("/the-root/test3.json"),
415 Default::default(),
416 )
417 .await
418 .unwrap();
419 assert_eq!(
420 fake_rust_server
421 .receive_notification::<lsp::notification::DidCloseTextDocument>()
422 .await
423 .text_document,
424 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
425 );
426 assert_eq!(
427 fake_json_server
428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
429 .await
430 .text_document,
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 0,
434 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 },
437 );
438
439 // We clear the diagnostics, since the language has changed.
440 rust_buffer2.update(cx, |buffer, _| {
441 assert_eq!(
442 buffer
443 .snapshot()
444 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
445 .count(),
446 0
447 );
448 });
449
450 // The renamed file's version resets after changing language server.
451 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
452 assert_eq!(
453 fake_json_server
454 .receive_notification::<lsp::notification::DidChangeTextDocument>()
455 .await
456 .text_document,
457 lsp::VersionedTextDocumentIdentifier::new(
458 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
459 1
460 )
461 );
462
463 // Restart language servers
464 project.update(cx, |project, cx| {
465 project.restart_language_servers_for_buffers(
466 vec![rust_buffer.clone(), json_buffer.clone()],
467 cx,
468 );
469 });
470
471 let mut rust_shutdown_requests = fake_rust_server
472 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
473 let mut json_shutdown_requests = fake_json_server
474 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
475 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
476
477 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
478 let mut fake_json_server = fake_json_servers.next().await.unwrap();
479
480 // Ensure rust document is reopened in new rust language server
481 assert_eq!(
482 fake_rust_server
483 .receive_notification::<lsp::notification::DidOpenTextDocument>()
484 .await
485 .text_document,
486 lsp::TextDocumentItem {
487 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
488 version: 0,
489 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
490 language_id: Default::default()
491 }
492 );
493
494 // Ensure json documents are reopened in new json language server
495 assert_set_eq!(
496 [
497 fake_json_server
498 .receive_notification::<lsp::notification::DidOpenTextDocument>()
499 .await
500 .text_document,
501 fake_json_server
502 .receive_notification::<lsp::notification::DidOpenTextDocument>()
503 .await
504 .text_document,
505 ],
506 [
507 lsp::TextDocumentItem {
508 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
509 version: 0,
510 text: json_buffer.update(cx, |buffer, _| buffer.text()),
511 language_id: Default::default()
512 },
513 lsp::TextDocumentItem {
514 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
515 version: 0,
516 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
517 language_id: Default::default()
518 }
519 ]
520 );
521
522 // Close notifications are reported only to servers matching the buffer's language.
523 cx.update(|_| drop(json_buffer));
524 let close_message = lsp::DidCloseTextDocumentParams {
525 text_document: lsp::TextDocumentIdentifier::new(
526 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
527 ),
528 };
529 assert_eq!(
530 fake_json_server
531 .receive_notification::<lsp::notification::DidCloseTextDocument>()
532 .await,
533 close_message,
534 );
535}
536
537#[gpui::test]
538async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
539 init_test(cx);
540
541 let mut language = Language::new(
542 LanguageConfig {
543 name: "Rust".into(),
544 matcher: LanguageMatcher {
545 path_suffixes: vec!["rs".to_string()],
546 ..Default::default()
547 },
548 ..Default::default()
549 },
550 Some(tree_sitter_rust::language()),
551 );
552 let mut fake_servers = language
553 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
554 name: "the-language-server",
555 ..Default::default()
556 }))
557 .await;
558
559 let fs = FakeFs::new(cx.executor());
560 fs.insert_tree(
561 "/the-root",
562 json!({
563 ".gitignore": "target\n",
564 "src": {
565 "a.rs": "",
566 "b.rs": "",
567 },
568 "target": {
569 "x": {
570 "out": {
571 "x.rs": ""
572 }
573 },
574 "y": {
575 "out": {
576 "y.rs": "",
577 }
578 },
579 "z": {
580 "out": {
581 "z.rs": ""
582 }
583 }
584 }
585 }),
586 )
587 .await;
588
589 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
590 project.update(cx, |project, _| {
591 project.languages.add(Arc::new(language));
592 });
593 cx.executor().run_until_parked();
594
595 // Start the language server by opening a buffer with a compatible file extension.
596 let _buffer = project
597 .update(cx, |project, cx| {
598 project.open_local_buffer("/the-root/src/a.rs", cx)
599 })
600 .await
601 .unwrap();
602
603 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
604 project.update(cx, |project, cx| {
605 let worktree = project.worktrees().next().unwrap();
606 assert_eq!(
607 worktree
608 .read(cx)
609 .snapshot()
610 .entries(true)
611 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
612 .collect::<Vec<_>>(),
613 &[
614 (Path::new(""), false),
615 (Path::new(".gitignore"), false),
616 (Path::new("src"), false),
617 (Path::new("src/a.rs"), false),
618 (Path::new("src/b.rs"), false),
619 (Path::new("target"), true),
620 ]
621 );
622 });
623
624 let prev_read_dir_count = fs.read_dir_call_count();
625
626 // Keep track of the FS events reported to the language server.
627 let fake_server = fake_servers.next().await.unwrap();
628 let file_changes = Arc::new(Mutex::new(Vec::new()));
629 fake_server
630 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
631 registrations: vec![lsp::Registration {
632 id: Default::default(),
633 method: "workspace/didChangeWatchedFiles".to_string(),
634 register_options: serde_json::to_value(
635 lsp::DidChangeWatchedFilesRegistrationOptions {
636 watchers: vec![
637 lsp::FileSystemWatcher {
638 glob_pattern: lsp::GlobPattern::String(
639 "/the-root/Cargo.toml".to_string(),
640 ),
641 kind: None,
642 },
643 lsp::FileSystemWatcher {
644 glob_pattern: lsp::GlobPattern::String(
645 "/the-root/src/*.{rs,c}".to_string(),
646 ),
647 kind: None,
648 },
649 lsp::FileSystemWatcher {
650 glob_pattern: lsp::GlobPattern::String(
651 "/the-root/target/y/**/*.rs".to_string(),
652 ),
653 kind: None,
654 },
655 ],
656 },
657 )
658 .ok(),
659 }],
660 })
661 .await
662 .unwrap();
663 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
664 let file_changes = file_changes.clone();
665 move |params, _| {
666 let mut file_changes = file_changes.lock();
667 file_changes.extend(params.changes);
668 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
669 }
670 });
671
672 cx.executor().run_until_parked();
673 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
674 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
675
676 // Now the language server has asked us to watch an ignored directory path,
677 // so we recursively load it.
678 project.update(cx, |project, cx| {
679 let worktree = project.worktrees().next().unwrap();
680 assert_eq!(
681 worktree
682 .read(cx)
683 .snapshot()
684 .entries(true)
685 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
686 .collect::<Vec<_>>(),
687 &[
688 (Path::new(""), false),
689 (Path::new(".gitignore"), false),
690 (Path::new("src"), false),
691 (Path::new("src/a.rs"), false),
692 (Path::new("src/b.rs"), false),
693 (Path::new("target"), true),
694 (Path::new("target/x"), true),
695 (Path::new("target/y"), true),
696 (Path::new("target/y/out"), true),
697 (Path::new("target/y/out/y.rs"), true),
698 (Path::new("target/z"), true),
699 ]
700 );
701 });
702
703 // Perform some file system mutations, two of which match the watched patterns,
704 // and one of which does not.
705 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
706 .await
707 .unwrap();
708 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
709 .await
710 .unwrap();
711 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
712 .await
713 .unwrap();
714 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
715 .await
716 .unwrap();
717 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
718 .await
719 .unwrap();
720
721 // The language server receives events for the FS mutations that match its watch patterns.
722 cx.executor().run_until_parked();
723 assert_eq!(
724 &*file_changes.lock(),
725 &[
726 lsp::FileEvent {
727 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
728 typ: lsp::FileChangeType::DELETED,
729 },
730 lsp::FileEvent {
731 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
732 typ: lsp::FileChangeType::CREATED,
733 },
734 lsp::FileEvent {
735 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
736 typ: lsp::FileChangeType::CREATED,
737 },
738 ]
739 );
740}
741
742#[gpui::test]
743async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
744 init_test(cx);
745
746 let fs = FakeFs::new(cx.executor());
747 fs.insert_tree(
748 "/dir",
749 json!({
750 "a.rs": "let a = 1;",
751 "b.rs": "let b = 2;"
752 }),
753 )
754 .await;
755
756 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
757
758 let buffer_a = project
759 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
760 .await
761 .unwrap();
762 let buffer_b = project
763 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
764 .await
765 .unwrap();
766
767 project.update(cx, |project, cx| {
768 project
769 .update_diagnostics(
770 LanguageServerId(0),
771 lsp::PublishDiagnosticsParams {
772 uri: Url::from_file_path("/dir/a.rs").unwrap(),
773 version: None,
774 diagnostics: vec![lsp::Diagnostic {
775 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
776 severity: Some(lsp::DiagnosticSeverity::ERROR),
777 message: "error 1".to_string(),
778 ..Default::default()
779 }],
780 },
781 &[],
782 cx,
783 )
784 .unwrap();
785 project
786 .update_diagnostics(
787 LanguageServerId(0),
788 lsp::PublishDiagnosticsParams {
789 uri: Url::from_file_path("/dir/b.rs").unwrap(),
790 version: None,
791 diagnostics: vec![lsp::Diagnostic {
792 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
793 severity: Some(lsp::DiagnosticSeverity::WARNING),
794 message: "error 2".to_string(),
795 ..Default::default()
796 }],
797 },
798 &[],
799 cx,
800 )
801 .unwrap();
802 });
803
804 buffer_a.update(cx, |buffer, _| {
805 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
806 assert_eq!(
807 chunks
808 .iter()
809 .map(|(s, d)| (s.as_str(), *d))
810 .collect::<Vec<_>>(),
811 &[
812 ("let ", None),
813 ("a", Some(DiagnosticSeverity::ERROR)),
814 (" = 1;", None),
815 ]
816 );
817 });
818 buffer_b.update(cx, |buffer, _| {
819 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
820 assert_eq!(
821 chunks
822 .iter()
823 .map(|(s, d)| (s.as_str(), *d))
824 .collect::<Vec<_>>(),
825 &[
826 ("let ", None),
827 ("b", Some(DiagnosticSeverity::WARNING)),
828 (" = 2;", None),
829 ]
830 );
831 });
832}
833
834#[gpui::test]
835async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
836 init_test(cx);
837
838 let fs = FakeFs::new(cx.executor());
839 fs.insert_tree(
840 "/root",
841 json!({
842 "dir": {
843 ".git": {
844 "HEAD": "ref: refs/heads/main",
845 },
846 ".gitignore": "b.rs",
847 "a.rs": "let a = 1;",
848 "b.rs": "let b = 2;",
849 },
850 "other.rs": "let b = c;"
851 }),
852 )
853 .await;
854
855 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
856 let (worktree, _) = project
857 .update(cx, |project, cx| {
858 project.find_or_create_local_worktree("/root/dir", true, cx)
859 })
860 .await
861 .unwrap();
862 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
863
864 let (worktree, _) = project
865 .update(cx, |project, cx| {
866 project.find_or_create_local_worktree("/root/other.rs", false, cx)
867 })
868 .await
869 .unwrap();
870 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
871
872 let server_id = LanguageServerId(0);
873 project.update(cx, |project, cx| {
874 project
875 .update_diagnostics(
876 server_id,
877 lsp::PublishDiagnosticsParams {
878 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
879 version: None,
880 diagnostics: vec![lsp::Diagnostic {
881 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
882 severity: Some(lsp::DiagnosticSeverity::ERROR),
883 message: "unused variable 'b'".to_string(),
884 ..Default::default()
885 }],
886 },
887 &[],
888 cx,
889 )
890 .unwrap();
891 project
892 .update_diagnostics(
893 server_id,
894 lsp::PublishDiagnosticsParams {
895 uri: Url::from_file_path("/root/other.rs").unwrap(),
896 version: None,
897 diagnostics: vec![lsp::Diagnostic {
898 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
899 severity: Some(lsp::DiagnosticSeverity::ERROR),
900 message: "unknown variable 'c'".to_string(),
901 ..Default::default()
902 }],
903 },
904 &[],
905 cx,
906 )
907 .unwrap();
908 });
909
910 let main_ignored_buffer = project
911 .update(cx, |project, cx| {
912 project.open_buffer((main_worktree_id, "b.rs"), cx)
913 })
914 .await
915 .unwrap();
916 main_ignored_buffer.update(cx, |buffer, _| {
917 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
918 assert_eq!(
919 chunks
920 .iter()
921 .map(|(s, d)| (s.as_str(), *d))
922 .collect::<Vec<_>>(),
923 &[
924 ("let ", None),
925 ("b", Some(DiagnosticSeverity::ERROR)),
926 (" = 2;", None),
927 ],
928 "Gigitnored buffers should still get in-buffer diagnostics",
929 );
930 });
931 let other_buffer = project
932 .update(cx, |project, cx| {
933 project.open_buffer((other_worktree_id, ""), cx)
934 })
935 .await
936 .unwrap();
937 other_buffer.update(cx, |buffer, _| {
938 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
939 assert_eq!(
940 chunks
941 .iter()
942 .map(|(s, d)| (s.as_str(), *d))
943 .collect::<Vec<_>>(),
944 &[
945 ("let b = ", None),
946 ("c", Some(DiagnosticSeverity::ERROR)),
947 (";", None),
948 ],
949 "Buffers from hidden projects should still get in-buffer diagnostics"
950 );
951 });
952
953 project.update(cx, |project, cx| {
954 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
955 assert_eq!(
956 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
957 vec![(
958 ProjectPath {
959 worktree_id: main_worktree_id,
960 path: Arc::from(Path::new("b.rs")),
961 },
962 server_id,
963 DiagnosticSummary {
964 error_count: 1,
965 warning_count: 0,
966 }
967 )]
968 );
969 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
970 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
971 });
972}
973
974#[gpui::test]
975async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977
978 let progress_token = "the-progress-token";
979 let mut language = Language::new(
980 LanguageConfig {
981 name: "Rust".into(),
982 matcher: LanguageMatcher {
983 path_suffixes: vec!["rs".to_string()],
984 ..Default::default()
985 },
986 ..Default::default()
987 },
988 Some(tree_sitter_rust::language()),
989 );
990 let mut fake_servers = language
991 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
992 disk_based_diagnostics_progress_token: Some(progress_token.into()),
993 disk_based_diagnostics_sources: vec!["disk".into()],
994 ..Default::default()
995 }))
996 .await;
997
998 let fs = FakeFs::new(cx.executor());
999 fs.insert_tree(
1000 "/dir",
1001 json!({
1002 "a.rs": "fn a() { A }",
1003 "b.rs": "const y: i32 = 1",
1004 }),
1005 )
1006 .await;
1007
1008 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1009 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1010 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1011
1012 // Cause worktree to start the fake language server
1013 let _buffer = project
1014 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1015 .await
1016 .unwrap();
1017
1018 let mut events = cx.events(&project);
1019
1020 let fake_server = fake_servers.next().await.unwrap();
1021 assert_eq!(
1022 events.next().await.unwrap(),
1023 Event::LanguageServerAdded(LanguageServerId(0)),
1024 );
1025
1026 fake_server
1027 .start_progress(format!("{}/0", progress_token))
1028 .await;
1029 assert_eq!(
1030 events.next().await.unwrap(),
1031 Event::DiskBasedDiagnosticsStarted {
1032 language_server_id: LanguageServerId(0),
1033 }
1034 );
1035
1036 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1037 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1038 version: None,
1039 diagnostics: vec![lsp::Diagnostic {
1040 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1041 severity: Some(lsp::DiagnosticSeverity::ERROR),
1042 message: "undefined variable 'A'".to_string(),
1043 ..Default::default()
1044 }],
1045 });
1046 assert_eq!(
1047 events.next().await.unwrap(),
1048 Event::DiagnosticsUpdated {
1049 language_server_id: LanguageServerId(0),
1050 path: (worktree_id, Path::new("a.rs")).into()
1051 }
1052 );
1053
1054 fake_server.end_progress(format!("{}/0", progress_token));
1055 assert_eq!(
1056 events.next().await.unwrap(),
1057 Event::DiskBasedDiagnosticsFinished {
1058 language_server_id: LanguageServerId(0)
1059 }
1060 );
1061
1062 let buffer = project
1063 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1064 .await
1065 .unwrap();
1066
1067 buffer.update(cx, |buffer, _| {
1068 let snapshot = buffer.snapshot();
1069 let diagnostics = snapshot
1070 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1071 .collect::<Vec<_>>();
1072 assert_eq!(
1073 diagnostics,
1074 &[DiagnosticEntry {
1075 range: Point::new(0, 9)..Point::new(0, 10),
1076 diagnostic: Diagnostic {
1077 severity: lsp::DiagnosticSeverity::ERROR,
1078 message: "undefined variable 'A'".to_string(),
1079 group_id: 0,
1080 is_primary: true,
1081 ..Default::default()
1082 }
1083 }]
1084 )
1085 });
1086
1087 // Ensure publishing empty diagnostics twice only results in one update event.
1088 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1089 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1090 version: None,
1091 diagnostics: Default::default(),
1092 });
1093 assert_eq!(
1094 events.next().await.unwrap(),
1095 Event::DiagnosticsUpdated {
1096 language_server_id: LanguageServerId(0),
1097 path: (worktree_id, Path::new("a.rs")).into()
1098 }
1099 );
1100
1101 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1102 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1103 version: None,
1104 diagnostics: Default::default(),
1105 });
1106 cx.executor().run_until_parked();
1107 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1108}
1109
1110#[gpui::test]
1111async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1112 init_test(cx);
1113
1114 let progress_token = "the-progress-token";
1115 let mut language = Language::new(
1116 LanguageConfig {
1117 matcher: LanguageMatcher {
1118 path_suffixes: vec!["rs".to_string()],
1119 ..Default::default()
1120 },
1121 ..Default::default()
1122 },
1123 None,
1124 );
1125 let mut fake_servers = language
1126 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1127 disk_based_diagnostics_sources: vec!["disk".into()],
1128 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1129 ..Default::default()
1130 }))
1131 .await;
1132
1133 let fs = FakeFs::new(cx.executor());
1134 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1135
1136 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1137 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1138
1139 let buffer = project
1140 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1141 .await
1142 .unwrap();
1143
1144 // Simulate diagnostics starting to update.
1145 let fake_server = fake_servers.next().await.unwrap();
1146 fake_server.start_progress(progress_token).await;
1147
1148 // Restart the server before the diagnostics finish updating.
1149 project.update(cx, |project, cx| {
1150 project.restart_language_servers_for_buffers([buffer], cx);
1151 });
1152 let mut events = cx.events(&project);
1153
1154 // Simulate the newly started server sending more diagnostics.
1155 let fake_server = fake_servers.next().await.unwrap();
1156 assert_eq!(
1157 events.next().await.unwrap(),
1158 Event::LanguageServerAdded(LanguageServerId(1))
1159 );
1160 fake_server.start_progress(progress_token).await;
1161 assert_eq!(
1162 events.next().await.unwrap(),
1163 Event::DiskBasedDiagnosticsStarted {
1164 language_server_id: LanguageServerId(1)
1165 }
1166 );
1167 project.update(cx, |project, _| {
1168 assert_eq!(
1169 project
1170 .language_servers_running_disk_based_diagnostics()
1171 .collect::<Vec<_>>(),
1172 [LanguageServerId(1)]
1173 );
1174 });
1175
1176 // All diagnostics are considered done, despite the old server's diagnostic
1177 // task never completing.
1178 fake_server.end_progress(progress_token);
1179 assert_eq!(
1180 events.next().await.unwrap(),
1181 Event::DiskBasedDiagnosticsFinished {
1182 language_server_id: LanguageServerId(1)
1183 }
1184 );
1185 project.update(cx, |project, _| {
1186 assert_eq!(
1187 project
1188 .language_servers_running_disk_based_diagnostics()
1189 .collect::<Vec<_>>(),
1190 [LanguageServerId(0); 0]
1191 );
1192 });
1193}
1194
1195#[gpui::test]
1196async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1197 init_test(cx);
1198
1199 let mut language = Language::new(
1200 LanguageConfig {
1201 matcher: LanguageMatcher {
1202 path_suffixes: vec!["rs".to_string()],
1203 ..Default::default()
1204 },
1205 ..Default::default()
1206 },
1207 None,
1208 );
1209 let mut fake_servers = language
1210 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1211 ..Default::default()
1212 }))
1213 .await;
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1217
1218 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1219 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1220
1221 let buffer = project
1222 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1223 .await
1224 .unwrap();
1225
1226 // Publish diagnostics
1227 let fake_server = fake_servers.next().await.unwrap();
1228 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1229 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1230 version: None,
1231 diagnostics: vec![lsp::Diagnostic {
1232 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1233 severity: Some(lsp::DiagnosticSeverity::ERROR),
1234 message: "the message".to_string(),
1235 ..Default::default()
1236 }],
1237 });
1238
1239 cx.executor().run_until_parked();
1240 buffer.update(cx, |buffer, _| {
1241 assert_eq!(
1242 buffer
1243 .snapshot()
1244 .diagnostics_in_range::<_, usize>(0..1, false)
1245 .map(|entry| entry.diagnostic.message.clone())
1246 .collect::<Vec<_>>(),
1247 ["the message".to_string()]
1248 );
1249 });
1250 project.update(cx, |project, cx| {
1251 assert_eq!(
1252 project.diagnostic_summary(false, cx),
1253 DiagnosticSummary {
1254 error_count: 1,
1255 warning_count: 0,
1256 }
1257 );
1258 });
1259
1260 project.update(cx, |project, cx| {
1261 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1262 });
1263
1264 // The diagnostics are cleared.
1265 cx.executor().run_until_parked();
1266 buffer.update(cx, |buffer, _| {
1267 assert_eq!(
1268 buffer
1269 .snapshot()
1270 .diagnostics_in_range::<_, usize>(0..1, false)
1271 .map(|entry| entry.diagnostic.message.clone())
1272 .collect::<Vec<_>>(),
1273 Vec::<String>::new(),
1274 );
1275 });
1276 project.update(cx, |project, cx| {
1277 assert_eq!(
1278 project.diagnostic_summary(false, cx),
1279 DiagnosticSummary {
1280 error_count: 0,
1281 warning_count: 0,
1282 }
1283 );
1284 });
1285}
1286
1287#[gpui::test]
1288async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1289 init_test(cx);
1290
1291 let mut language = Language::new(
1292 LanguageConfig {
1293 matcher: LanguageMatcher {
1294 path_suffixes: vec!["rs".to_string()],
1295 ..Default::default()
1296 },
1297 ..Default::default()
1298 },
1299 None,
1300 );
1301 let mut fake_servers = language
1302 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1303 name: "the-lsp",
1304 ..Default::default()
1305 }))
1306 .await;
1307
1308 let fs = FakeFs::new(cx.executor());
1309 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1310
1311 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1312 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1313
1314 let buffer = project
1315 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1316 .await
1317 .unwrap();
1318
1319 // Before restarting the server, report diagnostics with an unknown buffer version.
1320 let fake_server = fake_servers.next().await.unwrap();
1321 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1322 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1323 version: Some(10000),
1324 diagnostics: Vec::new(),
1325 });
1326 cx.executor().run_until_parked();
1327
1328 project.update(cx, |project, cx| {
1329 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1330 });
1331 let mut fake_server = fake_servers.next().await.unwrap();
1332 let notification = fake_server
1333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1334 .await
1335 .text_document;
1336 assert_eq!(notification.version, 0);
1337}
1338
1339#[gpui::test]
1340async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let mut rust = Language::new(
1344 LanguageConfig {
1345 name: Arc::from("Rust"),
1346 matcher: LanguageMatcher {
1347 path_suffixes: vec!["rs".to_string()],
1348 ..Default::default()
1349 },
1350 ..Default::default()
1351 },
1352 None,
1353 );
1354 let mut fake_rust_servers = rust
1355 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1356 name: "rust-lsp",
1357 ..Default::default()
1358 }))
1359 .await;
1360 let mut js = Language::new(
1361 LanguageConfig {
1362 name: Arc::from("JavaScript"),
1363 matcher: LanguageMatcher {
1364 path_suffixes: vec!["js".to_string()],
1365 ..Default::default()
1366 },
1367 ..Default::default()
1368 },
1369 None,
1370 );
1371 let mut fake_js_servers = js
1372 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1373 name: "js-lsp",
1374 ..Default::default()
1375 }))
1376 .await;
1377
1378 let fs = FakeFs::new(cx.executor());
1379 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1380 .await;
1381
1382 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1383 project.update(cx, |project, _| {
1384 project.languages.add(Arc::new(rust));
1385 project.languages.add(Arc::new(js));
1386 });
1387
1388 let _rs_buffer = project
1389 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1390 .await
1391 .unwrap();
1392 let _js_buffer = project
1393 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1394 .await
1395 .unwrap();
1396
1397 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1398 assert_eq!(
1399 fake_rust_server_1
1400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1401 .await
1402 .text_document
1403 .uri
1404 .as_str(),
1405 "file:///dir/a.rs"
1406 );
1407
1408 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1409 assert_eq!(
1410 fake_js_server
1411 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1412 .await
1413 .text_document
1414 .uri
1415 .as_str(),
1416 "file:///dir/b.js"
1417 );
1418
1419 // Disable Rust language server, ensuring only that server gets stopped.
1420 cx.update(|cx| {
1421 cx.update_global(|settings: &mut SettingsStore, cx| {
1422 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1423 settings.languages.insert(
1424 Arc::from("Rust"),
1425 LanguageSettingsContent {
1426 enable_language_server: Some(false),
1427 ..Default::default()
1428 },
1429 );
1430 });
1431 })
1432 });
1433 fake_rust_server_1
1434 .receive_notification::<lsp::notification::Exit>()
1435 .await;
1436
1437 // Enable Rust and disable JavaScript language servers, ensuring that the
1438 // former gets started again and that the latter stops.
1439 cx.update(|cx| {
1440 cx.update_global(|settings: &mut SettingsStore, cx| {
1441 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1442 settings.languages.insert(
1443 Arc::from("Rust"),
1444 LanguageSettingsContent {
1445 enable_language_server: Some(true),
1446 ..Default::default()
1447 },
1448 );
1449 settings.languages.insert(
1450 Arc::from("JavaScript"),
1451 LanguageSettingsContent {
1452 enable_language_server: Some(false),
1453 ..Default::default()
1454 },
1455 );
1456 });
1457 })
1458 });
1459 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1460 assert_eq!(
1461 fake_rust_server_2
1462 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1463 .await
1464 .text_document
1465 .uri
1466 .as_str(),
1467 "file:///dir/a.rs"
1468 );
1469 fake_js_server
1470 .receive_notification::<lsp::notification::Exit>()
1471 .await;
1472}
1473
1474#[gpui::test(iterations = 3)]
1475async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1476 init_test(cx);
1477
1478 let mut language = Language::new(
1479 LanguageConfig {
1480 name: "Rust".into(),
1481 matcher: LanguageMatcher {
1482 path_suffixes: vec!["rs".to_string()],
1483 ..Default::default()
1484 },
1485 ..Default::default()
1486 },
1487 Some(tree_sitter_rust::language()),
1488 );
1489 let mut fake_servers = language
1490 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1491 disk_based_diagnostics_sources: vec!["disk".into()],
1492 ..Default::default()
1493 }))
1494 .await;
1495
1496 let text = "
1497 fn a() { A }
1498 fn b() { BB }
1499 fn c() { CCC }
1500 "
1501 .unindent();
1502
1503 let fs = FakeFs::new(cx.executor());
1504 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1505
1506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1507 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1508
1509 let buffer = project
1510 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1511 .await
1512 .unwrap();
1513
1514 let mut fake_server = fake_servers.next().await.unwrap();
1515 let open_notification = fake_server
1516 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1517 .await;
1518
1519 // Edit the buffer, moving the content down
1520 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1521 let change_notification_1 = fake_server
1522 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1523 .await;
1524 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1525
1526 // Report some diagnostics for the initial version of the buffer
1527 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1528 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1529 version: Some(open_notification.text_document.version),
1530 diagnostics: vec![
1531 lsp::Diagnostic {
1532 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1533 severity: Some(DiagnosticSeverity::ERROR),
1534 message: "undefined variable 'A'".to_string(),
1535 source: Some("disk".to_string()),
1536 ..Default::default()
1537 },
1538 lsp::Diagnostic {
1539 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1540 severity: Some(DiagnosticSeverity::ERROR),
1541 message: "undefined variable 'BB'".to_string(),
1542 source: Some("disk".to_string()),
1543 ..Default::default()
1544 },
1545 lsp::Diagnostic {
1546 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1547 severity: Some(DiagnosticSeverity::ERROR),
1548 source: Some("disk".to_string()),
1549 message: "undefined variable 'CCC'".to_string(),
1550 ..Default::default()
1551 },
1552 ],
1553 });
1554
1555 // The diagnostics have moved down since they were created.
1556 cx.executor().run_until_parked();
1557 buffer.update(cx, |buffer, _| {
1558 assert_eq!(
1559 buffer
1560 .snapshot()
1561 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1562 .collect::<Vec<_>>(),
1563 &[
1564 DiagnosticEntry {
1565 range: Point::new(3, 9)..Point::new(3, 11),
1566 diagnostic: Diagnostic {
1567 source: Some("disk".into()),
1568 severity: DiagnosticSeverity::ERROR,
1569 message: "undefined variable 'BB'".to_string(),
1570 is_disk_based: true,
1571 group_id: 1,
1572 is_primary: true,
1573 ..Default::default()
1574 },
1575 },
1576 DiagnosticEntry {
1577 range: Point::new(4, 9)..Point::new(4, 12),
1578 diagnostic: Diagnostic {
1579 source: Some("disk".into()),
1580 severity: DiagnosticSeverity::ERROR,
1581 message: "undefined variable 'CCC'".to_string(),
1582 is_disk_based: true,
1583 group_id: 2,
1584 is_primary: true,
1585 ..Default::default()
1586 }
1587 }
1588 ]
1589 );
1590 assert_eq!(
1591 chunks_with_diagnostics(buffer, 0..buffer.len()),
1592 [
1593 ("\n\nfn a() { ".to_string(), None),
1594 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1595 (" }\nfn b() { ".to_string(), None),
1596 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1597 (" }\nfn c() { ".to_string(), None),
1598 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1599 (" }\n".to_string(), None),
1600 ]
1601 );
1602 assert_eq!(
1603 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1604 [
1605 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1606 (" }\nfn c() { ".to_string(), None),
1607 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1608 ]
1609 );
1610 });
1611
1612 // Ensure overlapping diagnostics are highlighted correctly.
1613 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1614 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1615 version: Some(open_notification.text_document.version),
1616 diagnostics: vec![
1617 lsp::Diagnostic {
1618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1619 severity: Some(DiagnosticSeverity::ERROR),
1620 message: "undefined variable 'A'".to_string(),
1621 source: Some("disk".to_string()),
1622 ..Default::default()
1623 },
1624 lsp::Diagnostic {
1625 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1626 severity: Some(DiagnosticSeverity::WARNING),
1627 message: "unreachable statement".to_string(),
1628 source: Some("disk".to_string()),
1629 ..Default::default()
1630 },
1631 ],
1632 });
1633
1634 cx.executor().run_until_parked();
1635 buffer.update(cx, |buffer, _| {
1636 assert_eq!(
1637 buffer
1638 .snapshot()
1639 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1640 .collect::<Vec<_>>(),
1641 &[
1642 DiagnosticEntry {
1643 range: Point::new(2, 9)..Point::new(2, 12),
1644 diagnostic: Diagnostic {
1645 source: Some("disk".into()),
1646 severity: DiagnosticSeverity::WARNING,
1647 message: "unreachable statement".to_string(),
1648 is_disk_based: true,
1649 group_id: 4,
1650 is_primary: true,
1651 ..Default::default()
1652 }
1653 },
1654 DiagnosticEntry {
1655 range: Point::new(2, 9)..Point::new(2, 10),
1656 diagnostic: Diagnostic {
1657 source: Some("disk".into()),
1658 severity: DiagnosticSeverity::ERROR,
1659 message: "undefined variable 'A'".to_string(),
1660 is_disk_based: true,
1661 group_id: 3,
1662 is_primary: true,
1663 ..Default::default()
1664 },
1665 }
1666 ]
1667 );
1668 assert_eq!(
1669 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1670 [
1671 ("fn a() { ".to_string(), None),
1672 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1673 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1674 ("\n".to_string(), None),
1675 ]
1676 );
1677 assert_eq!(
1678 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1679 [
1680 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1681 ("\n".to_string(), None),
1682 ]
1683 );
1684 });
1685
1686 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1687 // changes since the last save.
1688 buffer.update(cx, |buffer, cx| {
1689 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1690 buffer.edit(
1691 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1692 None,
1693 cx,
1694 );
1695 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1696 });
1697 let change_notification_2 = fake_server
1698 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1699 .await;
1700 assert!(
1701 change_notification_2.text_document.version > change_notification_1.text_document.version
1702 );
1703
1704 // Handle out-of-order diagnostics
1705 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1706 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1707 version: Some(change_notification_2.text_document.version),
1708 diagnostics: vec![
1709 lsp::Diagnostic {
1710 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1711 severity: Some(DiagnosticSeverity::ERROR),
1712 message: "undefined variable 'BB'".to_string(),
1713 source: Some("disk".to_string()),
1714 ..Default::default()
1715 },
1716 lsp::Diagnostic {
1717 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1718 severity: Some(DiagnosticSeverity::WARNING),
1719 message: "undefined variable 'A'".to_string(),
1720 source: Some("disk".to_string()),
1721 ..Default::default()
1722 },
1723 ],
1724 });
1725
1726 cx.executor().run_until_parked();
1727 buffer.update(cx, |buffer, _| {
1728 assert_eq!(
1729 buffer
1730 .snapshot()
1731 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1732 .collect::<Vec<_>>(),
1733 &[
1734 DiagnosticEntry {
1735 range: Point::new(2, 21)..Point::new(2, 22),
1736 diagnostic: Diagnostic {
1737 source: Some("disk".into()),
1738 severity: DiagnosticSeverity::WARNING,
1739 message: "undefined variable 'A'".to_string(),
1740 is_disk_based: true,
1741 group_id: 6,
1742 is_primary: true,
1743 ..Default::default()
1744 }
1745 },
1746 DiagnosticEntry {
1747 range: Point::new(3, 9)..Point::new(3, 14),
1748 diagnostic: Diagnostic {
1749 source: Some("disk".into()),
1750 severity: DiagnosticSeverity::ERROR,
1751 message: "undefined variable 'BB'".to_string(),
1752 is_disk_based: true,
1753 group_id: 5,
1754 is_primary: true,
1755 ..Default::default()
1756 },
1757 }
1758 ]
1759 );
1760 });
1761}
1762
1763#[gpui::test]
1764async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1765 init_test(cx);
1766
1767 let text = concat!(
1768 "let one = ;\n", //
1769 "let two = \n",
1770 "let three = 3;\n",
1771 );
1772
1773 let fs = FakeFs::new(cx.executor());
1774 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1775
1776 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1777 let buffer = project
1778 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1779 .await
1780 .unwrap();
1781
1782 project.update(cx, |project, cx| {
1783 project
1784 .update_buffer_diagnostics(
1785 &buffer,
1786 LanguageServerId(0),
1787 None,
1788 vec![
1789 DiagnosticEntry {
1790 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1791 diagnostic: Diagnostic {
1792 severity: DiagnosticSeverity::ERROR,
1793 message: "syntax error 1".to_string(),
1794 ..Default::default()
1795 },
1796 },
1797 DiagnosticEntry {
1798 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1799 diagnostic: Diagnostic {
1800 severity: DiagnosticSeverity::ERROR,
1801 message: "syntax error 2".to_string(),
1802 ..Default::default()
1803 },
1804 },
1805 ],
1806 cx,
1807 )
1808 .unwrap();
1809 });
1810
1811 // An empty range is extended forward to include the following character.
1812 // At the end of a line, an empty range is extended backward to include
1813 // the preceding character.
1814 buffer.update(cx, |buffer, _| {
1815 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1816 assert_eq!(
1817 chunks
1818 .iter()
1819 .map(|(s, d)| (s.as_str(), *d))
1820 .collect::<Vec<_>>(),
1821 &[
1822 ("let one = ", None),
1823 (";", Some(DiagnosticSeverity::ERROR)),
1824 ("\nlet two =", None),
1825 (" ", Some(DiagnosticSeverity::ERROR)),
1826 ("\nlet three = 3;\n", None)
1827 ]
1828 );
1829 });
1830}
1831
1832#[gpui::test]
1833async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1834 init_test(cx);
1835
1836 let fs = FakeFs::new(cx.executor());
1837 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1838 .await;
1839
1840 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1841
1842 project.update(cx, |project, cx| {
1843 project
1844 .update_diagnostic_entries(
1845 LanguageServerId(0),
1846 Path::new("/dir/a.rs").to_owned(),
1847 None,
1848 vec![DiagnosticEntry {
1849 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1850 diagnostic: Diagnostic {
1851 severity: DiagnosticSeverity::ERROR,
1852 is_primary: true,
1853 message: "syntax error a1".to_string(),
1854 ..Default::default()
1855 },
1856 }],
1857 cx,
1858 )
1859 .unwrap();
1860 project
1861 .update_diagnostic_entries(
1862 LanguageServerId(1),
1863 Path::new("/dir/a.rs").to_owned(),
1864 None,
1865 vec![DiagnosticEntry {
1866 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1867 diagnostic: Diagnostic {
1868 severity: DiagnosticSeverity::ERROR,
1869 is_primary: true,
1870 message: "syntax error b1".to_string(),
1871 ..Default::default()
1872 },
1873 }],
1874 cx,
1875 )
1876 .unwrap();
1877
1878 assert_eq!(
1879 project.diagnostic_summary(false, cx),
1880 DiagnosticSummary {
1881 error_count: 2,
1882 warning_count: 0,
1883 }
1884 );
1885 });
1886}
1887
1888#[gpui::test]
1889async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1890 init_test(cx);
1891
1892 let mut language = Language::new(
1893 LanguageConfig {
1894 name: "Rust".into(),
1895 matcher: LanguageMatcher {
1896 path_suffixes: vec!["rs".to_string()],
1897 ..Default::default()
1898 },
1899 ..Default::default()
1900 },
1901 Some(tree_sitter_rust::language()),
1902 );
1903 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1904
1905 let text = "
1906 fn a() {
1907 f1();
1908 }
1909 fn b() {
1910 f2();
1911 }
1912 fn c() {
1913 f3();
1914 }
1915 "
1916 .unindent();
1917
1918 let fs = FakeFs::new(cx.executor());
1919 fs.insert_tree(
1920 "/dir",
1921 json!({
1922 "a.rs": text.clone(),
1923 }),
1924 )
1925 .await;
1926
1927 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1928 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1929 let buffer = project
1930 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1931 .await
1932 .unwrap();
1933
1934 let mut fake_server = fake_servers.next().await.unwrap();
1935 let lsp_document_version = fake_server
1936 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1937 .await
1938 .text_document
1939 .version;
1940
1941 // Simulate editing the buffer after the language server computes some edits.
1942 buffer.update(cx, |buffer, cx| {
1943 buffer.edit(
1944 [(
1945 Point::new(0, 0)..Point::new(0, 0),
1946 "// above first function\n",
1947 )],
1948 None,
1949 cx,
1950 );
1951 buffer.edit(
1952 [(
1953 Point::new(2, 0)..Point::new(2, 0),
1954 " // inside first function\n",
1955 )],
1956 None,
1957 cx,
1958 );
1959 buffer.edit(
1960 [(
1961 Point::new(6, 4)..Point::new(6, 4),
1962 "// inside second function ",
1963 )],
1964 None,
1965 cx,
1966 );
1967
1968 assert_eq!(
1969 buffer.text(),
1970 "
1971 // above first function
1972 fn a() {
1973 // inside first function
1974 f1();
1975 }
1976 fn b() {
1977 // inside second function f2();
1978 }
1979 fn c() {
1980 f3();
1981 }
1982 "
1983 .unindent()
1984 );
1985 });
1986
1987 let edits = project
1988 .update(cx, |project, cx| {
1989 project.edits_from_lsp(
1990 &buffer,
1991 vec![
1992 // replace body of first function
1993 lsp::TextEdit {
1994 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1995 new_text: "
1996 fn a() {
1997 f10();
1998 }
1999 "
2000 .unindent(),
2001 },
2002 // edit inside second function
2003 lsp::TextEdit {
2004 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2005 new_text: "00".into(),
2006 },
2007 // edit inside third function via two distinct edits
2008 lsp::TextEdit {
2009 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2010 new_text: "4000".into(),
2011 },
2012 lsp::TextEdit {
2013 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2014 new_text: "".into(),
2015 },
2016 ],
2017 LanguageServerId(0),
2018 Some(lsp_document_version),
2019 cx,
2020 )
2021 })
2022 .await
2023 .unwrap();
2024
2025 buffer.update(cx, |buffer, cx| {
2026 for (range, new_text) in edits {
2027 buffer.edit([(range, new_text)], None, cx);
2028 }
2029 assert_eq!(
2030 buffer.text(),
2031 "
2032 // above first function
2033 fn a() {
2034 // inside first function
2035 f10();
2036 }
2037 fn b() {
2038 // inside second function f200();
2039 }
2040 fn c() {
2041 f4000();
2042 }
2043 "
2044 .unindent()
2045 );
2046 });
2047}
2048
2049#[gpui::test]
2050async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2051 init_test(cx);
2052
2053 let text = "
2054 use a::b;
2055 use a::c;
2056
2057 fn f() {
2058 b();
2059 c();
2060 }
2061 "
2062 .unindent();
2063
2064 let fs = FakeFs::new(cx.executor());
2065 fs.insert_tree(
2066 "/dir",
2067 json!({
2068 "a.rs": text.clone(),
2069 }),
2070 )
2071 .await;
2072
2073 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2074 let buffer = project
2075 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2076 .await
2077 .unwrap();
2078
2079 // Simulate the language server sending us a small edit in the form of a very large diff.
2080 // Rust-analyzer does this when performing a merge-imports code action.
2081 let edits = project
2082 .update(cx, |project, cx| {
2083 project.edits_from_lsp(
2084 &buffer,
2085 [
2086 // Replace the first use statement without editing the semicolon.
2087 lsp::TextEdit {
2088 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2089 new_text: "a::{b, c}".into(),
2090 },
2091 // Reinsert the remainder of the file between the semicolon and the final
2092 // newline of the file.
2093 lsp::TextEdit {
2094 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2095 new_text: "\n\n".into(),
2096 },
2097 lsp::TextEdit {
2098 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2099 new_text: "
2100 fn f() {
2101 b();
2102 c();
2103 }"
2104 .unindent(),
2105 },
2106 // Delete everything after the first newline of the file.
2107 lsp::TextEdit {
2108 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2109 new_text: "".into(),
2110 },
2111 ],
2112 LanguageServerId(0),
2113 None,
2114 cx,
2115 )
2116 })
2117 .await
2118 .unwrap();
2119
2120 buffer.update(cx, |buffer, cx| {
2121 let edits = edits
2122 .into_iter()
2123 .map(|(range, text)| {
2124 (
2125 range.start.to_point(buffer)..range.end.to_point(buffer),
2126 text,
2127 )
2128 })
2129 .collect::<Vec<_>>();
2130
2131 assert_eq!(
2132 edits,
2133 [
2134 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2135 (Point::new(1, 0)..Point::new(2, 0), "".into())
2136 ]
2137 );
2138
2139 for (range, new_text) in edits {
2140 buffer.edit([(range, new_text)], None, cx);
2141 }
2142 assert_eq!(
2143 buffer.text(),
2144 "
2145 use a::{b, c};
2146
2147 fn f() {
2148 b();
2149 c();
2150 }
2151 "
2152 .unindent()
2153 );
2154 });
2155}
2156
2157#[gpui::test]
2158async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2159 init_test(cx);
2160
2161 let text = "
2162 use a::b;
2163 use a::c;
2164
2165 fn f() {
2166 b();
2167 c();
2168 }
2169 "
2170 .unindent();
2171
2172 let fs = FakeFs::new(cx.executor());
2173 fs.insert_tree(
2174 "/dir",
2175 json!({
2176 "a.rs": text.clone(),
2177 }),
2178 )
2179 .await;
2180
2181 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2182 let buffer = project
2183 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2184 .await
2185 .unwrap();
2186
2187 // Simulate the language server sending us edits in a non-ordered fashion,
2188 // with ranges sometimes being inverted or pointing to invalid locations.
2189 let edits = project
2190 .update(cx, |project, cx| {
2191 project.edits_from_lsp(
2192 &buffer,
2193 [
2194 lsp::TextEdit {
2195 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2196 new_text: "\n\n".into(),
2197 },
2198 lsp::TextEdit {
2199 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2200 new_text: "a::{b, c}".into(),
2201 },
2202 lsp::TextEdit {
2203 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2204 new_text: "".into(),
2205 },
2206 lsp::TextEdit {
2207 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2208 new_text: "
2209 fn f() {
2210 b();
2211 c();
2212 }"
2213 .unindent(),
2214 },
2215 ],
2216 LanguageServerId(0),
2217 None,
2218 cx,
2219 )
2220 })
2221 .await
2222 .unwrap();
2223
2224 buffer.update(cx, |buffer, cx| {
2225 let edits = edits
2226 .into_iter()
2227 .map(|(range, text)| {
2228 (
2229 range.start.to_point(buffer)..range.end.to_point(buffer),
2230 text,
2231 )
2232 })
2233 .collect::<Vec<_>>();
2234
2235 assert_eq!(
2236 edits,
2237 [
2238 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2239 (Point::new(1, 0)..Point::new(2, 0), "".into())
2240 ]
2241 );
2242
2243 for (range, new_text) in edits {
2244 buffer.edit([(range, new_text)], None, cx);
2245 }
2246 assert_eq!(
2247 buffer.text(),
2248 "
2249 use a::{b, c};
2250
2251 fn f() {
2252 b();
2253 c();
2254 }
2255 "
2256 .unindent()
2257 );
2258 });
2259}
2260
2261fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2262 buffer: &Buffer,
2263 range: Range<T>,
2264) -> Vec<(String, Option<DiagnosticSeverity>)> {
2265 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2266 for chunk in buffer.snapshot().chunks(range, true) {
2267 if chunks.last().map_or(false, |prev_chunk| {
2268 prev_chunk.1 == chunk.diagnostic_severity
2269 }) {
2270 chunks.last_mut().unwrap().0.push_str(chunk.text);
2271 } else {
2272 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2273 }
2274 }
2275 chunks
2276}
2277
2278#[gpui::test(iterations = 10)]
2279async fn test_definition(cx: &mut gpui::TestAppContext) {
2280 init_test(cx);
2281
2282 let mut language = Language::new(
2283 LanguageConfig {
2284 name: "Rust".into(),
2285 matcher: LanguageMatcher {
2286 path_suffixes: vec!["rs".to_string()],
2287 ..Default::default()
2288 },
2289 ..Default::default()
2290 },
2291 Some(tree_sitter_rust::language()),
2292 );
2293 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2294
2295 let fs = FakeFs::new(cx.executor());
2296 fs.insert_tree(
2297 "/dir",
2298 json!({
2299 "a.rs": "const fn a() { A }",
2300 "b.rs": "const y: i32 = crate::a()",
2301 }),
2302 )
2303 .await;
2304
2305 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2306 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2307
2308 let buffer = project
2309 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2310 .await
2311 .unwrap();
2312
2313 let fake_server = fake_servers.next().await.unwrap();
2314 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2315 let params = params.text_document_position_params;
2316 assert_eq!(
2317 params.text_document.uri.to_file_path().unwrap(),
2318 Path::new("/dir/b.rs"),
2319 );
2320 assert_eq!(params.position, lsp::Position::new(0, 22));
2321
2322 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2323 lsp::Location::new(
2324 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2325 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2326 ),
2327 )))
2328 });
2329
2330 let mut definitions = project
2331 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2332 .await
2333 .unwrap();
2334
2335 // Assert no new language server started
2336 cx.executor().run_until_parked();
2337 assert!(fake_servers.try_next().is_err());
2338
2339 assert_eq!(definitions.len(), 1);
2340 let definition = definitions.pop().unwrap();
2341 cx.update(|cx| {
2342 let target_buffer = definition.target.buffer.read(cx);
2343 assert_eq!(
2344 target_buffer
2345 .file()
2346 .unwrap()
2347 .as_local()
2348 .unwrap()
2349 .abs_path(cx),
2350 Path::new("/dir/a.rs"),
2351 );
2352 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2353 assert_eq!(
2354 list_worktrees(&project, cx),
2355 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2356 );
2357
2358 drop(definition);
2359 });
2360 cx.update(|cx| {
2361 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2362 });
2363
2364 fn list_worktrees<'a>(
2365 project: &'a Model<Project>,
2366 cx: &'a AppContext,
2367 ) -> Vec<(&'a Path, bool)> {
2368 project
2369 .read(cx)
2370 .worktrees()
2371 .map(|worktree| {
2372 let worktree = worktree.read(cx);
2373 (
2374 worktree.as_local().unwrap().abs_path().as_ref(),
2375 worktree.is_visible(),
2376 )
2377 })
2378 .collect::<Vec<_>>()
2379 }
2380}
2381
2382#[gpui::test]
2383async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let mut language = Language::new(
2387 LanguageConfig {
2388 name: "TypeScript".into(),
2389 matcher: LanguageMatcher {
2390 path_suffixes: vec!["ts".to_string()],
2391 ..Default::default()
2392 },
2393 ..Default::default()
2394 },
2395 Some(tree_sitter_typescript::language_typescript()),
2396 );
2397 let mut fake_language_servers = language
2398 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2399 capabilities: lsp::ServerCapabilities {
2400 completion_provider: Some(lsp::CompletionOptions {
2401 trigger_characters: Some(vec![":".to_string()]),
2402 ..Default::default()
2403 }),
2404 ..Default::default()
2405 },
2406 ..Default::default()
2407 }))
2408 .await;
2409
2410 let fs = FakeFs::new(cx.executor());
2411 fs.insert_tree(
2412 "/dir",
2413 json!({
2414 "a.ts": "",
2415 }),
2416 )
2417 .await;
2418
2419 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2420 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2421 let buffer = project
2422 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2423 .await
2424 .unwrap();
2425
2426 let fake_server = fake_language_servers.next().await.unwrap();
2427
2428 let text = "let a = b.fqn";
2429 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2430 let completions = project.update(cx, |project, cx| {
2431 project.completions(&buffer, text.len(), cx)
2432 });
2433
2434 fake_server
2435 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2436 Ok(Some(lsp::CompletionResponse::Array(vec![
2437 lsp::CompletionItem {
2438 label: "fullyQualifiedName?".into(),
2439 insert_text: Some("fullyQualifiedName".into()),
2440 ..Default::default()
2441 },
2442 ])))
2443 })
2444 .next()
2445 .await;
2446 let completions = completions.await.unwrap();
2447 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2448 assert_eq!(completions.len(), 1);
2449 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2450 assert_eq!(
2451 completions[0].old_range.to_offset(&snapshot),
2452 text.len() - 3..text.len()
2453 );
2454
2455 let text = "let a = \"atoms/cmp\"";
2456 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2457 let completions = project.update(cx, |project, cx| {
2458 project.completions(&buffer, text.len() - 1, cx)
2459 });
2460
2461 fake_server
2462 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2463 Ok(Some(lsp::CompletionResponse::Array(vec![
2464 lsp::CompletionItem {
2465 label: "component".into(),
2466 ..Default::default()
2467 },
2468 ])))
2469 })
2470 .next()
2471 .await;
2472 let completions = completions.await.unwrap();
2473 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2474 assert_eq!(completions.len(), 1);
2475 assert_eq!(completions[0].new_text, "component");
2476 assert_eq!(
2477 completions[0].old_range.to_offset(&snapshot),
2478 text.len() - 4..text.len() - 1
2479 );
2480}
2481
2482#[gpui::test]
2483async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2484 init_test(cx);
2485
2486 let mut language = Language::new(
2487 LanguageConfig {
2488 name: "TypeScript".into(),
2489 matcher: LanguageMatcher {
2490 path_suffixes: vec!["ts".to_string()],
2491 ..Default::default()
2492 },
2493 ..Default::default()
2494 },
2495 Some(tree_sitter_typescript::language_typescript()),
2496 );
2497 let mut fake_language_servers = language
2498 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2499 capabilities: lsp::ServerCapabilities {
2500 completion_provider: Some(lsp::CompletionOptions {
2501 trigger_characters: Some(vec![":".to_string()]),
2502 ..Default::default()
2503 }),
2504 ..Default::default()
2505 },
2506 ..Default::default()
2507 }))
2508 .await;
2509
2510 let fs = FakeFs::new(cx.executor());
2511 fs.insert_tree(
2512 "/dir",
2513 json!({
2514 "a.ts": "",
2515 }),
2516 )
2517 .await;
2518
2519 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2520 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2521 let buffer = project
2522 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2523 .await
2524 .unwrap();
2525
2526 let fake_server = fake_language_servers.next().await.unwrap();
2527
2528 let text = "let a = b.fqn";
2529 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2530 let completions = project.update(cx, |project, cx| {
2531 project.completions(&buffer, text.len(), cx)
2532 });
2533
2534 fake_server
2535 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2536 Ok(Some(lsp::CompletionResponse::Array(vec![
2537 lsp::CompletionItem {
2538 label: "fullyQualifiedName?".into(),
2539 insert_text: Some("fully\rQualified\r\nName".into()),
2540 ..Default::default()
2541 },
2542 ])))
2543 })
2544 .next()
2545 .await;
2546 let completions = completions.await.unwrap();
2547 assert_eq!(completions.len(), 1);
2548 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2549}
2550
2551#[gpui::test(iterations = 10)]
2552async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2553 init_test(cx);
2554
2555 let mut language = Language::new(
2556 LanguageConfig {
2557 name: "TypeScript".into(),
2558 matcher: LanguageMatcher {
2559 path_suffixes: vec!["ts".to_string()],
2560 ..Default::default()
2561 },
2562 ..Default::default()
2563 },
2564 None,
2565 );
2566 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2567
2568 let fs = FakeFs::new(cx.executor());
2569 fs.insert_tree(
2570 "/dir",
2571 json!({
2572 "a.ts": "a",
2573 }),
2574 )
2575 .await;
2576
2577 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2578 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2579 let buffer = project
2580 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2581 .await
2582 .unwrap();
2583
2584 let fake_server = fake_language_servers.next().await.unwrap();
2585
2586 // Language server returns code actions that contain commands, and not edits.
2587 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2588 fake_server
2589 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2590 Ok(Some(vec![
2591 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2592 title: "The code action".into(),
2593 command: Some(lsp::Command {
2594 title: "The command".into(),
2595 command: "_the/command".into(),
2596 arguments: Some(vec![json!("the-argument")]),
2597 }),
2598 ..Default::default()
2599 }),
2600 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2601 title: "two".into(),
2602 ..Default::default()
2603 }),
2604 ]))
2605 })
2606 .next()
2607 .await;
2608
2609 let action = actions.await.unwrap()[0].clone();
2610 let apply = project.update(cx, |project, cx| {
2611 project.apply_code_action(buffer.clone(), action, true, cx)
2612 });
2613
2614 // Resolving the code action does not populate its edits. In absence of
2615 // edits, we must execute the given command.
2616 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2617 |action, _| async move { Ok(action) },
2618 );
2619
2620 // While executing the command, the language server sends the editor
2621 // a `workspaceEdit` request.
2622 fake_server
2623 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2624 let fake = fake_server.clone();
2625 move |params, _| {
2626 assert_eq!(params.command, "_the/command");
2627 let fake = fake.clone();
2628 async move {
2629 fake.server
2630 .request::<lsp::request::ApplyWorkspaceEdit>(
2631 lsp::ApplyWorkspaceEditParams {
2632 label: None,
2633 edit: lsp::WorkspaceEdit {
2634 changes: Some(
2635 [(
2636 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2637 vec![lsp::TextEdit {
2638 range: lsp::Range::new(
2639 lsp::Position::new(0, 0),
2640 lsp::Position::new(0, 0),
2641 ),
2642 new_text: "X".into(),
2643 }],
2644 )]
2645 .into_iter()
2646 .collect(),
2647 ),
2648 ..Default::default()
2649 },
2650 },
2651 )
2652 .await
2653 .unwrap();
2654 Ok(Some(json!(null)))
2655 }
2656 }
2657 })
2658 .next()
2659 .await;
2660
2661 // Applying the code action returns a project transaction containing the edits
2662 // sent by the language server in its `workspaceEdit` request.
2663 let transaction = apply.await.unwrap();
2664 assert!(transaction.0.contains_key(&buffer));
2665 buffer.update(cx, |buffer, cx| {
2666 assert_eq!(buffer.text(), "Xa");
2667 buffer.undo(cx);
2668 assert_eq!(buffer.text(), "a");
2669 });
2670}
2671
2672#[gpui::test(iterations = 10)]
2673async fn test_save_file(cx: &mut gpui::TestAppContext) {
2674 init_test(cx);
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree(
2678 "/dir",
2679 json!({
2680 "file1": "the old contents",
2681 }),
2682 )
2683 .await;
2684
2685 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2686 let buffer = project
2687 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2688 .await
2689 .unwrap();
2690 buffer.update(cx, |buffer, cx| {
2691 assert_eq!(buffer.text(), "the old contents");
2692 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2693 });
2694
2695 project
2696 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2697 .await
2698 .unwrap();
2699
2700 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2701 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2702}
2703
2704#[gpui::test(iterations = 30)]
2705async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2706 init_test(cx);
2707
2708 let fs = FakeFs::new(cx.executor().clone());
2709 fs.insert_tree(
2710 "/dir",
2711 json!({
2712 "file1": "the original contents",
2713 }),
2714 )
2715 .await;
2716
2717 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2718 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2719 let buffer = project
2720 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2721 .await
2722 .unwrap();
2723
2724 // Simulate buffer diffs being slow, so that they don't complete before
2725 // the next file change occurs.
2726 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2727
2728 // Change the buffer's file on disk, and then wait for the file change
2729 // to be detected by the worktree, so that the buffer starts reloading.
2730 fs.save(
2731 "/dir/file1".as_ref(),
2732 &"the first contents".into(),
2733 Default::default(),
2734 )
2735 .await
2736 .unwrap();
2737 worktree.next_event(cx);
2738
2739 // Change the buffer's file again. Depending on the random seed, the
2740 // previous file change may still be in progress.
2741 fs.save(
2742 "/dir/file1".as_ref(),
2743 &"the second contents".into(),
2744 Default::default(),
2745 )
2746 .await
2747 .unwrap();
2748 worktree.next_event(cx);
2749
2750 cx.executor().run_until_parked();
2751 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2752 buffer.read_with(cx, |buffer, _| {
2753 assert_eq!(buffer.text(), on_disk_text);
2754 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2755 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2756 });
2757}
2758
2759#[gpui::test(iterations = 30)]
2760async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2761 init_test(cx);
2762
2763 let fs = FakeFs::new(cx.executor().clone());
2764 fs.insert_tree(
2765 "/dir",
2766 json!({
2767 "file1": "the original contents",
2768 }),
2769 )
2770 .await;
2771
2772 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2773 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2774 let buffer = project
2775 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2776 .await
2777 .unwrap();
2778
2779 // Simulate buffer diffs being slow, so that they don't complete before
2780 // the next file change occurs.
2781 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2782
2783 // Change the buffer's file on disk, and then wait for the file change
2784 // to be detected by the worktree, so that the buffer starts reloading.
2785 fs.save(
2786 "/dir/file1".as_ref(),
2787 &"the first contents".into(),
2788 Default::default(),
2789 )
2790 .await
2791 .unwrap();
2792 worktree.next_event(cx);
2793
2794 cx.executor()
2795 .spawn(cx.executor().simulate_random_delay())
2796 .await;
2797
2798 // Perform a noop edit, causing the buffer's version to increase.
2799 buffer.update(cx, |buffer, cx| {
2800 buffer.edit([(0..0, " ")], None, cx);
2801 buffer.undo(cx);
2802 });
2803
2804 cx.executor().run_until_parked();
2805 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2806 buffer.read_with(cx, |buffer, _| {
2807 let buffer_text = buffer.text();
2808 if buffer_text == on_disk_text {
2809 assert!(
2810 !buffer.is_dirty() && !buffer.has_conflict(),
2811 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2812 );
2813 }
2814 // If the file change occurred while the buffer was processing the first
2815 // change, the buffer will be in a conflicting state.
2816 else {
2817 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2818 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2819 }
2820 });
2821}
2822
2823#[gpui::test]
2824async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2825 init_test(cx);
2826
2827 let fs = FakeFs::new(cx.executor());
2828 fs.insert_tree(
2829 "/dir",
2830 json!({
2831 "file1": "the old contents",
2832 }),
2833 )
2834 .await;
2835
2836 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2837 let buffer = project
2838 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2839 .await
2840 .unwrap();
2841 buffer.update(cx, |buffer, cx| {
2842 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2843 });
2844
2845 project
2846 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2847 .await
2848 .unwrap();
2849
2850 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2851 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2852}
2853
2854#[gpui::test]
2855async fn test_save_as(cx: &mut gpui::TestAppContext) {
2856 init_test(cx);
2857
2858 let fs = FakeFs::new(cx.executor());
2859 fs.insert_tree("/dir", json!({})).await;
2860
2861 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2862
2863 let languages = project.update(cx, |project, _| project.languages().clone());
2864 languages.add_grammars([("rust", tree_sitter_rust::language())]);
2865 languages.register(
2866 "/some/path",
2867 LanguageConfig {
2868 name: "Rust".into(),
2869 grammar: Some("rust".into()),
2870 matcher: LanguageMatcher {
2871 path_suffixes: vec!["rs".into()],
2872 ..Default::default()
2873 },
2874 ..Default::default()
2875 },
2876 vec![],
2877 |_| Default::default(),
2878 );
2879
2880 let buffer = project.update(cx, |project, cx| {
2881 project.create_buffer("", None, cx).unwrap()
2882 });
2883 buffer.update(cx, |buffer, cx| {
2884 buffer.edit([(0..0, "abc")], None, cx);
2885 assert!(buffer.is_dirty());
2886 assert!(!buffer.has_conflict());
2887 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2888 });
2889 project
2890 .update(cx, |project, cx| {
2891 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2892 })
2893 .await
2894 .unwrap();
2895 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2896
2897 cx.executor().run_until_parked();
2898 buffer.update(cx, |buffer, cx| {
2899 assert_eq!(
2900 buffer.file().unwrap().full_path(cx),
2901 Path::new("dir/file1.rs")
2902 );
2903 assert!(!buffer.is_dirty());
2904 assert!(!buffer.has_conflict());
2905 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2906 });
2907
2908 let opened_buffer = project
2909 .update(cx, |project, cx| {
2910 project.open_local_buffer("/dir/file1.rs", cx)
2911 })
2912 .await
2913 .unwrap();
2914 assert_eq!(opened_buffer, buffer);
2915}
2916
2917#[gpui::test(retries = 5)]
2918async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2919 init_test(cx);
2920 cx.executor().allow_parking();
2921
2922 let dir = temp_tree(json!({
2923 "a": {
2924 "file1": "",
2925 "file2": "",
2926 "file3": "",
2927 },
2928 "b": {
2929 "c": {
2930 "file4": "",
2931 "file5": "",
2932 }
2933 }
2934 }));
2935
2936 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2937 let rpc = project.update(cx, |p, _| p.client.clone());
2938
2939 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2940 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2941 async move { buffer.await.unwrap() }
2942 };
2943 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2944 project.update(cx, |project, cx| {
2945 let tree = project.worktrees().next().unwrap();
2946 tree.read(cx)
2947 .entry_for_path(path)
2948 .unwrap_or_else(|| panic!("no entry for path {}", path))
2949 .id
2950 })
2951 };
2952
2953 let buffer2 = buffer_for_path("a/file2", cx).await;
2954 let buffer3 = buffer_for_path("a/file3", cx).await;
2955 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2956 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2957
2958 let file2_id = id_for_path("a/file2", cx);
2959 let file3_id = id_for_path("a/file3", cx);
2960 let file4_id = id_for_path("b/c/file4", cx);
2961
2962 // Create a remote copy of this worktree.
2963 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2964
2965 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2966
2967 let updates = Arc::new(Mutex::new(Vec::new()));
2968 tree.update(cx, |tree, cx| {
2969 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2970 let updates = updates.clone();
2971 move |update| {
2972 updates.lock().push(update);
2973 async { true }
2974 }
2975 });
2976 });
2977
2978 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2979
2980 cx.executor().run_until_parked();
2981
2982 cx.update(|cx| {
2983 assert!(!buffer2.read(cx).is_dirty());
2984 assert!(!buffer3.read(cx).is_dirty());
2985 assert!(!buffer4.read(cx).is_dirty());
2986 assert!(!buffer5.read(cx).is_dirty());
2987 });
2988
2989 // Rename and delete files and directories.
2990 tree.flush_fs_events(cx).await;
2991 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2992 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2993 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2994 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2995 tree.flush_fs_events(cx).await;
2996
2997 let expected_paths = vec![
2998 "a",
2999 "a/file1",
3000 "a/file2.new",
3001 "b",
3002 "d",
3003 "d/file3",
3004 "d/file4",
3005 ];
3006
3007 cx.update(|app| {
3008 assert_eq!(
3009 tree.read(app)
3010 .paths()
3011 .map(|p| p.to_str().unwrap())
3012 .collect::<Vec<_>>(),
3013 expected_paths
3014 );
3015 });
3016
3017 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3018 assert_eq!(id_for_path("d/file3", cx), file3_id);
3019 assert_eq!(id_for_path("d/file4", cx), file4_id);
3020
3021 cx.update(|cx| {
3022 assert_eq!(
3023 buffer2.read(cx).file().unwrap().path().as_ref(),
3024 Path::new("a/file2.new")
3025 );
3026 assert_eq!(
3027 buffer3.read(cx).file().unwrap().path().as_ref(),
3028 Path::new("d/file3")
3029 );
3030 assert_eq!(
3031 buffer4.read(cx).file().unwrap().path().as_ref(),
3032 Path::new("d/file4")
3033 );
3034 assert_eq!(
3035 buffer5.read(cx).file().unwrap().path().as_ref(),
3036 Path::new("b/c/file5")
3037 );
3038
3039 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3040 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3041 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3042 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3043 });
3044
3045 // Update the remote worktree. Check that it becomes consistent with the
3046 // local worktree.
3047 cx.executor().run_until_parked();
3048
3049 remote.update(cx, |remote, _| {
3050 for update in updates.lock().drain(..) {
3051 remote.as_remote_mut().unwrap().update_from_remote(update);
3052 }
3053 });
3054 cx.executor().run_until_parked();
3055 remote.update(cx, |remote, _| {
3056 assert_eq!(
3057 remote
3058 .paths()
3059 .map(|p| p.to_str().unwrap())
3060 .collect::<Vec<_>>(),
3061 expected_paths
3062 );
3063 });
3064}
3065
3066#[gpui::test(iterations = 10)]
3067async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3068 init_test(cx);
3069
3070 let fs = FakeFs::new(cx.executor());
3071 fs.insert_tree(
3072 "/dir",
3073 json!({
3074 "a": {
3075 "file1": "",
3076 }
3077 }),
3078 )
3079 .await;
3080
3081 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3082 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3083 let tree_id = tree.update(cx, |tree, _| tree.id());
3084
3085 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3086 project.update(cx, |project, cx| {
3087 let tree = project.worktrees().next().unwrap();
3088 tree.read(cx)
3089 .entry_for_path(path)
3090 .unwrap_or_else(|| panic!("no entry for path {}", path))
3091 .id
3092 })
3093 };
3094
3095 let dir_id = id_for_path("a", cx);
3096 let file_id = id_for_path("a/file1", cx);
3097 let buffer = project
3098 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3099 .await
3100 .unwrap();
3101 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3102
3103 project
3104 .update(cx, |project, cx| {
3105 project.rename_entry(dir_id, Path::new("b"), cx)
3106 })
3107 .unwrap()
3108 .await
3109 .unwrap();
3110 cx.executor().run_until_parked();
3111
3112 assert_eq!(id_for_path("b", cx), dir_id);
3113 assert_eq!(id_for_path("b/file1", cx), file_id);
3114 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3115}
3116
3117#[gpui::test]
3118async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3119 init_test(cx);
3120
3121 let fs = FakeFs::new(cx.executor());
3122 fs.insert_tree(
3123 "/dir",
3124 json!({
3125 "a.txt": "a-contents",
3126 "b.txt": "b-contents",
3127 }),
3128 )
3129 .await;
3130
3131 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3132
3133 // Spawn multiple tasks to open paths, repeating some paths.
3134 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3135 (
3136 p.open_local_buffer("/dir/a.txt", cx),
3137 p.open_local_buffer("/dir/b.txt", cx),
3138 p.open_local_buffer("/dir/a.txt", cx),
3139 )
3140 });
3141
3142 let buffer_a_1 = buffer_a_1.await.unwrap();
3143 let buffer_a_2 = buffer_a_2.await.unwrap();
3144 let buffer_b = buffer_b.await.unwrap();
3145 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3146 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3147
3148 // There is only one buffer per path.
3149 let buffer_a_id = buffer_a_1.entity_id();
3150 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3151
3152 // Open the same path again while it is still open.
3153 drop(buffer_a_1);
3154 let buffer_a_3 = project
3155 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3156 .await
3157 .unwrap();
3158
3159 // There's still only one buffer per path.
3160 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3161}
3162
3163#[gpui::test]
3164async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3165 init_test(cx);
3166
3167 let fs = FakeFs::new(cx.executor());
3168 fs.insert_tree(
3169 "/dir",
3170 json!({
3171 "file1": "abc",
3172 "file2": "def",
3173 "file3": "ghi",
3174 }),
3175 )
3176 .await;
3177
3178 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3179
3180 let buffer1 = project
3181 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3182 .await
3183 .unwrap();
3184 let events = Arc::new(Mutex::new(Vec::new()));
3185
3186 // initially, the buffer isn't dirty.
3187 buffer1.update(cx, |buffer, cx| {
3188 cx.subscribe(&buffer1, {
3189 let events = events.clone();
3190 move |_, _, event, _| match event {
3191 BufferEvent::Operation(_) => {}
3192 _ => events.lock().push(event.clone()),
3193 }
3194 })
3195 .detach();
3196
3197 assert!(!buffer.is_dirty());
3198 assert!(events.lock().is_empty());
3199
3200 buffer.edit([(1..2, "")], None, cx);
3201 });
3202
3203 // after the first edit, the buffer is dirty, and emits a dirtied event.
3204 buffer1.update(cx, |buffer, cx| {
3205 assert!(buffer.text() == "ac");
3206 assert!(buffer.is_dirty());
3207 assert_eq!(
3208 *events.lock(),
3209 &[language::Event::Edited, language::Event::DirtyChanged]
3210 );
3211 events.lock().clear();
3212 buffer.did_save(
3213 buffer.version(),
3214 buffer.as_rope().fingerprint(),
3215 buffer.file().unwrap().mtime(),
3216 cx,
3217 );
3218 });
3219
3220 // after saving, the buffer is not dirty, and emits a saved event.
3221 buffer1.update(cx, |buffer, cx| {
3222 assert!(!buffer.is_dirty());
3223 assert_eq!(*events.lock(), &[language::Event::Saved]);
3224 events.lock().clear();
3225
3226 buffer.edit([(1..1, "B")], None, cx);
3227 buffer.edit([(2..2, "D")], None, cx);
3228 });
3229
3230 // after editing again, the buffer is dirty, and emits another dirty event.
3231 buffer1.update(cx, |buffer, cx| {
3232 assert!(buffer.text() == "aBDc");
3233 assert!(buffer.is_dirty());
3234 assert_eq!(
3235 *events.lock(),
3236 &[
3237 language::Event::Edited,
3238 language::Event::DirtyChanged,
3239 language::Event::Edited,
3240 ],
3241 );
3242 events.lock().clear();
3243
3244 // After restoring the buffer to its previously-saved state,
3245 // the buffer is not considered dirty anymore.
3246 buffer.edit([(1..3, "")], None, cx);
3247 assert!(buffer.text() == "ac");
3248 assert!(!buffer.is_dirty());
3249 });
3250
3251 assert_eq!(
3252 *events.lock(),
3253 &[language::Event::Edited, language::Event::DirtyChanged]
3254 );
3255
3256 // When a file is deleted, the buffer is considered dirty.
3257 let events = Arc::new(Mutex::new(Vec::new()));
3258 let buffer2 = project
3259 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3260 .await
3261 .unwrap();
3262 buffer2.update(cx, |_, cx| {
3263 cx.subscribe(&buffer2, {
3264 let events = events.clone();
3265 move |_, _, event, _| events.lock().push(event.clone())
3266 })
3267 .detach();
3268 });
3269
3270 fs.remove_file("/dir/file2".as_ref(), Default::default())
3271 .await
3272 .unwrap();
3273 cx.executor().run_until_parked();
3274 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3275 assert_eq!(
3276 *events.lock(),
3277 &[
3278 language::Event::DirtyChanged,
3279 language::Event::FileHandleChanged
3280 ]
3281 );
3282
3283 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3284 let events = Arc::new(Mutex::new(Vec::new()));
3285 let buffer3 = project
3286 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3287 .await
3288 .unwrap();
3289 buffer3.update(cx, |_, cx| {
3290 cx.subscribe(&buffer3, {
3291 let events = events.clone();
3292 move |_, _, event, _| events.lock().push(event.clone())
3293 })
3294 .detach();
3295 });
3296
3297 buffer3.update(cx, |buffer, cx| {
3298 buffer.edit([(0..0, "x")], None, cx);
3299 });
3300 events.lock().clear();
3301 fs.remove_file("/dir/file3".as_ref(), Default::default())
3302 .await
3303 .unwrap();
3304 cx.executor().run_until_parked();
3305 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3306 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3307}
3308
3309#[gpui::test]
3310async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3311 init_test(cx);
3312
3313 let initial_contents = "aaa\nbbbbb\nc\n";
3314 let fs = FakeFs::new(cx.executor());
3315 fs.insert_tree(
3316 "/dir",
3317 json!({
3318 "the-file": initial_contents,
3319 }),
3320 )
3321 .await;
3322 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3323 let buffer = project
3324 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3325 .await
3326 .unwrap();
3327
3328 let anchors = (0..3)
3329 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3330 .collect::<Vec<_>>();
3331
3332 // Change the file on disk, adding two new lines of text, and removing
3333 // one line.
3334 buffer.update(cx, |buffer, _| {
3335 assert!(!buffer.is_dirty());
3336 assert!(!buffer.has_conflict());
3337 });
3338 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3339 fs.save(
3340 "/dir/the-file".as_ref(),
3341 &new_contents.into(),
3342 LineEnding::Unix,
3343 )
3344 .await
3345 .unwrap();
3346
3347 // Because the buffer was not modified, it is reloaded from disk. Its
3348 // contents are edited according to the diff between the old and new
3349 // file contents.
3350 cx.executor().run_until_parked();
3351 buffer.update(cx, |buffer, _| {
3352 assert_eq!(buffer.text(), new_contents);
3353 assert!(!buffer.is_dirty());
3354 assert!(!buffer.has_conflict());
3355
3356 let anchor_positions = anchors
3357 .iter()
3358 .map(|anchor| anchor.to_point(&*buffer))
3359 .collect::<Vec<_>>();
3360 assert_eq!(
3361 anchor_positions,
3362 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3363 );
3364 });
3365
3366 // Modify the buffer
3367 buffer.update(cx, |buffer, cx| {
3368 buffer.edit([(0..0, " ")], None, cx);
3369 assert!(buffer.is_dirty());
3370 assert!(!buffer.has_conflict());
3371 });
3372
3373 // Change the file on disk again, adding blank lines to the beginning.
3374 fs.save(
3375 "/dir/the-file".as_ref(),
3376 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3377 LineEnding::Unix,
3378 )
3379 .await
3380 .unwrap();
3381
3382 // Because the buffer is modified, it doesn't reload from disk, but is
3383 // marked as having a conflict.
3384 cx.executor().run_until_parked();
3385 buffer.update(cx, |buffer, _| {
3386 assert!(buffer.has_conflict());
3387 });
3388}
3389
3390#[gpui::test]
3391async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3392 init_test(cx);
3393
3394 let fs = FakeFs::new(cx.executor());
3395 fs.insert_tree(
3396 "/dir",
3397 json!({
3398 "file1": "a\nb\nc\n",
3399 "file2": "one\r\ntwo\r\nthree\r\n",
3400 }),
3401 )
3402 .await;
3403
3404 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3405 let buffer1 = project
3406 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3407 .await
3408 .unwrap();
3409 let buffer2 = project
3410 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3411 .await
3412 .unwrap();
3413
3414 buffer1.update(cx, |buffer, _| {
3415 assert_eq!(buffer.text(), "a\nb\nc\n");
3416 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3417 });
3418 buffer2.update(cx, |buffer, _| {
3419 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3420 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3421 });
3422
3423 // Change a file's line endings on disk from unix to windows. The buffer's
3424 // state updates correctly.
3425 fs.save(
3426 "/dir/file1".as_ref(),
3427 &"aaa\nb\nc\n".into(),
3428 LineEnding::Windows,
3429 )
3430 .await
3431 .unwrap();
3432 cx.executor().run_until_parked();
3433 buffer1.update(cx, |buffer, _| {
3434 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3435 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3436 });
3437
3438 // Save a file with windows line endings. The file is written correctly.
3439 buffer2.update(cx, |buffer, cx| {
3440 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3441 });
3442 project
3443 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3444 .await
3445 .unwrap();
3446 assert_eq!(
3447 fs.load("/dir/file2".as_ref()).await.unwrap(),
3448 "one\r\ntwo\r\nthree\r\nfour\r\n",
3449 );
3450}
3451
3452#[gpui::test]
3453async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3454 init_test(cx);
3455
3456 let fs = FakeFs::new(cx.executor());
3457 fs.insert_tree(
3458 "/the-dir",
3459 json!({
3460 "a.rs": "
3461 fn foo(mut v: Vec<usize>) {
3462 for x in &v {
3463 v.push(1);
3464 }
3465 }
3466 "
3467 .unindent(),
3468 }),
3469 )
3470 .await;
3471
3472 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3473 let buffer = project
3474 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3475 .await
3476 .unwrap();
3477
3478 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3479 let message = lsp::PublishDiagnosticsParams {
3480 uri: buffer_uri.clone(),
3481 diagnostics: vec![
3482 lsp::Diagnostic {
3483 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3484 severity: Some(DiagnosticSeverity::WARNING),
3485 message: "error 1".to_string(),
3486 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3487 location: lsp::Location {
3488 uri: buffer_uri.clone(),
3489 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3490 },
3491 message: "error 1 hint 1".to_string(),
3492 }]),
3493 ..Default::default()
3494 },
3495 lsp::Diagnostic {
3496 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3497 severity: Some(DiagnosticSeverity::HINT),
3498 message: "error 1 hint 1".to_string(),
3499 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3500 location: lsp::Location {
3501 uri: buffer_uri.clone(),
3502 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3503 },
3504 message: "original diagnostic".to_string(),
3505 }]),
3506 ..Default::default()
3507 },
3508 lsp::Diagnostic {
3509 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3510 severity: Some(DiagnosticSeverity::ERROR),
3511 message: "error 2".to_string(),
3512 related_information: Some(vec![
3513 lsp::DiagnosticRelatedInformation {
3514 location: lsp::Location {
3515 uri: buffer_uri.clone(),
3516 range: lsp::Range::new(
3517 lsp::Position::new(1, 13),
3518 lsp::Position::new(1, 15),
3519 ),
3520 },
3521 message: "error 2 hint 1".to_string(),
3522 },
3523 lsp::DiagnosticRelatedInformation {
3524 location: lsp::Location {
3525 uri: buffer_uri.clone(),
3526 range: lsp::Range::new(
3527 lsp::Position::new(1, 13),
3528 lsp::Position::new(1, 15),
3529 ),
3530 },
3531 message: "error 2 hint 2".to_string(),
3532 },
3533 ]),
3534 ..Default::default()
3535 },
3536 lsp::Diagnostic {
3537 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3538 severity: Some(DiagnosticSeverity::HINT),
3539 message: "error 2 hint 1".to_string(),
3540 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3541 location: lsp::Location {
3542 uri: buffer_uri.clone(),
3543 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3544 },
3545 message: "original diagnostic".to_string(),
3546 }]),
3547 ..Default::default()
3548 },
3549 lsp::Diagnostic {
3550 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3551 severity: Some(DiagnosticSeverity::HINT),
3552 message: "error 2 hint 2".to_string(),
3553 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3554 location: lsp::Location {
3555 uri: buffer_uri,
3556 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3557 },
3558 message: "original diagnostic".to_string(),
3559 }]),
3560 ..Default::default()
3561 },
3562 ],
3563 version: None,
3564 };
3565
3566 project
3567 .update(cx, |p, cx| {
3568 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3569 })
3570 .unwrap();
3571 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3572
3573 assert_eq!(
3574 buffer
3575 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3576 .collect::<Vec<_>>(),
3577 &[
3578 DiagnosticEntry {
3579 range: Point::new(1, 8)..Point::new(1, 9),
3580 diagnostic: Diagnostic {
3581 severity: DiagnosticSeverity::WARNING,
3582 message: "error 1".to_string(),
3583 group_id: 1,
3584 is_primary: true,
3585 ..Default::default()
3586 }
3587 },
3588 DiagnosticEntry {
3589 range: Point::new(1, 8)..Point::new(1, 9),
3590 diagnostic: Diagnostic {
3591 severity: DiagnosticSeverity::HINT,
3592 message: "error 1 hint 1".to_string(),
3593 group_id: 1,
3594 is_primary: false,
3595 ..Default::default()
3596 }
3597 },
3598 DiagnosticEntry {
3599 range: Point::new(1, 13)..Point::new(1, 15),
3600 diagnostic: Diagnostic {
3601 severity: DiagnosticSeverity::HINT,
3602 message: "error 2 hint 1".to_string(),
3603 group_id: 0,
3604 is_primary: false,
3605 ..Default::default()
3606 }
3607 },
3608 DiagnosticEntry {
3609 range: Point::new(1, 13)..Point::new(1, 15),
3610 diagnostic: Diagnostic {
3611 severity: DiagnosticSeverity::HINT,
3612 message: "error 2 hint 2".to_string(),
3613 group_id: 0,
3614 is_primary: false,
3615 ..Default::default()
3616 }
3617 },
3618 DiagnosticEntry {
3619 range: Point::new(2, 8)..Point::new(2, 17),
3620 diagnostic: Diagnostic {
3621 severity: DiagnosticSeverity::ERROR,
3622 message: "error 2".to_string(),
3623 group_id: 0,
3624 is_primary: true,
3625 ..Default::default()
3626 }
3627 }
3628 ]
3629 );
3630
3631 assert_eq!(
3632 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3633 &[
3634 DiagnosticEntry {
3635 range: Point::new(1, 13)..Point::new(1, 15),
3636 diagnostic: Diagnostic {
3637 severity: DiagnosticSeverity::HINT,
3638 message: "error 2 hint 1".to_string(),
3639 group_id: 0,
3640 is_primary: false,
3641 ..Default::default()
3642 }
3643 },
3644 DiagnosticEntry {
3645 range: Point::new(1, 13)..Point::new(1, 15),
3646 diagnostic: Diagnostic {
3647 severity: DiagnosticSeverity::HINT,
3648 message: "error 2 hint 2".to_string(),
3649 group_id: 0,
3650 is_primary: false,
3651 ..Default::default()
3652 }
3653 },
3654 DiagnosticEntry {
3655 range: Point::new(2, 8)..Point::new(2, 17),
3656 diagnostic: Diagnostic {
3657 severity: DiagnosticSeverity::ERROR,
3658 message: "error 2".to_string(),
3659 group_id: 0,
3660 is_primary: true,
3661 ..Default::default()
3662 }
3663 }
3664 ]
3665 );
3666
3667 assert_eq!(
3668 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3669 &[
3670 DiagnosticEntry {
3671 range: Point::new(1, 8)..Point::new(1, 9),
3672 diagnostic: Diagnostic {
3673 severity: DiagnosticSeverity::WARNING,
3674 message: "error 1".to_string(),
3675 group_id: 1,
3676 is_primary: true,
3677 ..Default::default()
3678 }
3679 },
3680 DiagnosticEntry {
3681 range: Point::new(1, 8)..Point::new(1, 9),
3682 diagnostic: Diagnostic {
3683 severity: DiagnosticSeverity::HINT,
3684 message: "error 1 hint 1".to_string(),
3685 group_id: 1,
3686 is_primary: false,
3687 ..Default::default()
3688 }
3689 },
3690 ]
3691 );
3692}
3693
3694#[gpui::test]
3695async fn test_rename(cx: &mut gpui::TestAppContext) {
3696 init_test(cx);
3697
3698 let mut language = Language::new(
3699 LanguageConfig {
3700 name: "Rust".into(),
3701 matcher: LanguageMatcher {
3702 path_suffixes: vec!["rs".to_string()],
3703 ..Default::default()
3704 },
3705 ..Default::default()
3706 },
3707 Some(tree_sitter_rust::language()),
3708 );
3709 let mut fake_servers = language
3710 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3711 capabilities: lsp::ServerCapabilities {
3712 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3713 prepare_provider: Some(true),
3714 work_done_progress_options: Default::default(),
3715 })),
3716 ..Default::default()
3717 },
3718 ..Default::default()
3719 }))
3720 .await;
3721
3722 let fs = FakeFs::new(cx.executor());
3723 fs.insert_tree(
3724 "/dir",
3725 json!({
3726 "one.rs": "const ONE: usize = 1;",
3727 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3728 }),
3729 )
3730 .await;
3731
3732 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3733 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3734 let buffer = project
3735 .update(cx, |project, cx| {
3736 project.open_local_buffer("/dir/one.rs", cx)
3737 })
3738 .await
3739 .unwrap();
3740
3741 let fake_server = fake_servers.next().await.unwrap();
3742
3743 let response = project.update(cx, |project, cx| {
3744 project.prepare_rename(buffer.clone(), 7, cx)
3745 });
3746 fake_server
3747 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3748 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3749 assert_eq!(params.position, lsp::Position::new(0, 7));
3750 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3751 lsp::Position::new(0, 6),
3752 lsp::Position::new(0, 9),
3753 ))))
3754 })
3755 .next()
3756 .await
3757 .unwrap();
3758 let range = response.await.unwrap().unwrap();
3759 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3760 assert_eq!(range, 6..9);
3761
3762 let response = project.update(cx, |project, cx| {
3763 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3764 });
3765 fake_server
3766 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3767 assert_eq!(
3768 params.text_document_position.text_document.uri.as_str(),
3769 "file:///dir/one.rs"
3770 );
3771 assert_eq!(
3772 params.text_document_position.position,
3773 lsp::Position::new(0, 7)
3774 );
3775 assert_eq!(params.new_name, "THREE");
3776 Ok(Some(lsp::WorkspaceEdit {
3777 changes: Some(
3778 [
3779 (
3780 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3781 vec![lsp::TextEdit::new(
3782 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3783 "THREE".to_string(),
3784 )],
3785 ),
3786 (
3787 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3788 vec![
3789 lsp::TextEdit::new(
3790 lsp::Range::new(
3791 lsp::Position::new(0, 24),
3792 lsp::Position::new(0, 27),
3793 ),
3794 "THREE".to_string(),
3795 ),
3796 lsp::TextEdit::new(
3797 lsp::Range::new(
3798 lsp::Position::new(0, 35),
3799 lsp::Position::new(0, 38),
3800 ),
3801 "THREE".to_string(),
3802 ),
3803 ],
3804 ),
3805 ]
3806 .into_iter()
3807 .collect(),
3808 ),
3809 ..Default::default()
3810 }))
3811 })
3812 .next()
3813 .await
3814 .unwrap();
3815 let mut transaction = response.await.unwrap().0;
3816 assert_eq!(transaction.len(), 2);
3817 assert_eq!(
3818 transaction
3819 .remove_entry(&buffer)
3820 .unwrap()
3821 .0
3822 .update(cx, |buffer, _| buffer.text()),
3823 "const THREE: usize = 1;"
3824 );
3825 assert_eq!(
3826 transaction
3827 .into_keys()
3828 .next()
3829 .unwrap()
3830 .update(cx, |buffer, _| buffer.text()),
3831 "const TWO: usize = one::THREE + one::THREE;"
3832 );
3833}
3834
3835#[gpui::test]
3836async fn test_search(cx: &mut gpui::TestAppContext) {
3837 init_test(cx);
3838
3839 let fs = FakeFs::new(cx.executor());
3840 fs.insert_tree(
3841 "/dir",
3842 json!({
3843 "one.rs": "const ONE: usize = 1;",
3844 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3845 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3846 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3847 }),
3848 )
3849 .await;
3850 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3851 assert_eq!(
3852 search(
3853 &project,
3854 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3855 cx
3856 )
3857 .await
3858 .unwrap(),
3859 HashMap::from_iter([
3860 ("two.rs".to_string(), vec![6..9]),
3861 ("three.rs".to_string(), vec![37..40])
3862 ])
3863 );
3864
3865 let buffer_4 = project
3866 .update(cx, |project, cx| {
3867 project.open_local_buffer("/dir/four.rs", cx)
3868 })
3869 .await
3870 .unwrap();
3871 buffer_4.update(cx, |buffer, cx| {
3872 let text = "two::TWO";
3873 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3874 });
3875
3876 assert_eq!(
3877 search(
3878 &project,
3879 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3880 cx
3881 )
3882 .await
3883 .unwrap(),
3884 HashMap::from_iter([
3885 ("two.rs".to_string(), vec![6..9]),
3886 ("three.rs".to_string(), vec![37..40]),
3887 ("four.rs".to_string(), vec![25..28, 36..39])
3888 ])
3889 );
3890}
3891
3892#[gpui::test]
3893async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3894 init_test(cx);
3895
3896 let search_query = "file";
3897
3898 let fs = FakeFs::new(cx.executor());
3899 fs.insert_tree(
3900 "/dir",
3901 json!({
3902 "one.rs": r#"// Rust file one"#,
3903 "one.ts": r#"// TypeScript file one"#,
3904 "two.rs": r#"// Rust file two"#,
3905 "two.ts": r#"// TypeScript file two"#,
3906 }),
3907 )
3908 .await;
3909 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3910
3911 assert!(
3912 search(
3913 &project,
3914 SearchQuery::text(
3915 search_query,
3916 false,
3917 true,
3918 false,
3919 vec![PathMatcher::new("*.odd").unwrap()],
3920 Vec::new()
3921 )
3922 .unwrap(),
3923 cx
3924 )
3925 .await
3926 .unwrap()
3927 .is_empty(),
3928 "If no inclusions match, no files should be returned"
3929 );
3930
3931 assert_eq!(
3932 search(
3933 &project,
3934 SearchQuery::text(
3935 search_query,
3936 false,
3937 true,
3938 false,
3939 vec![PathMatcher::new("*.rs").unwrap()],
3940 Vec::new()
3941 )
3942 .unwrap(),
3943 cx
3944 )
3945 .await
3946 .unwrap(),
3947 HashMap::from_iter([
3948 ("one.rs".to_string(), vec![8..12]),
3949 ("two.rs".to_string(), vec![8..12]),
3950 ]),
3951 "Rust only search should give only Rust files"
3952 );
3953
3954 assert_eq!(
3955 search(
3956 &project,
3957 SearchQuery::text(
3958 search_query,
3959 false,
3960 true,
3961 false,
3962 vec![
3963 PathMatcher::new("*.ts").unwrap(),
3964 PathMatcher::new("*.odd").unwrap(),
3965 ],
3966 Vec::new()
3967 ).unwrap(),
3968 cx
3969 )
3970 .await
3971 .unwrap(),
3972 HashMap::from_iter([
3973 ("one.ts".to_string(), vec![14..18]),
3974 ("two.ts".to_string(), vec![14..18]),
3975 ]),
3976 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3977 );
3978
3979 assert_eq!(
3980 search(
3981 &project,
3982 SearchQuery::text(
3983 search_query,
3984 false,
3985 true,
3986 false,
3987 vec![
3988 PathMatcher::new("*.rs").unwrap(),
3989 PathMatcher::new("*.ts").unwrap(),
3990 PathMatcher::new("*.odd").unwrap(),
3991 ],
3992 Vec::new()
3993 ).unwrap(),
3994 cx
3995 )
3996 .await
3997 .unwrap(),
3998 HashMap::from_iter([
3999 ("one.rs".to_string(), vec![8..12]),
4000 ("one.ts".to_string(), vec![14..18]),
4001 ("two.rs".to_string(), vec![8..12]),
4002 ("two.ts".to_string(), vec![14..18]),
4003 ]),
4004 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4005 );
4006}
4007
4008#[gpui::test]
4009async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4010 init_test(cx);
4011
4012 let search_query = "file";
4013
4014 let fs = FakeFs::new(cx.executor());
4015 fs.insert_tree(
4016 "/dir",
4017 json!({
4018 "one.rs": r#"// Rust file one"#,
4019 "one.ts": r#"// TypeScript file one"#,
4020 "two.rs": r#"// Rust file two"#,
4021 "two.ts": r#"// TypeScript file two"#,
4022 }),
4023 )
4024 .await;
4025 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4026
4027 assert_eq!(
4028 search(
4029 &project,
4030 SearchQuery::text(
4031 search_query,
4032 false,
4033 true,
4034 false,
4035 Vec::new(),
4036 vec![PathMatcher::new("*.odd").unwrap()],
4037 )
4038 .unwrap(),
4039 cx
4040 )
4041 .await
4042 .unwrap(),
4043 HashMap::from_iter([
4044 ("one.rs".to_string(), vec![8..12]),
4045 ("one.ts".to_string(), vec![14..18]),
4046 ("two.rs".to_string(), vec![8..12]),
4047 ("two.ts".to_string(), vec![14..18]),
4048 ]),
4049 "If no exclusions match, all files should be returned"
4050 );
4051
4052 assert_eq!(
4053 search(
4054 &project,
4055 SearchQuery::text(
4056 search_query,
4057 false,
4058 true,
4059 false,
4060 Vec::new(),
4061 vec![PathMatcher::new("*.rs").unwrap()],
4062 )
4063 .unwrap(),
4064 cx
4065 )
4066 .await
4067 .unwrap(),
4068 HashMap::from_iter([
4069 ("one.ts".to_string(), vec![14..18]),
4070 ("two.ts".to_string(), vec![14..18]),
4071 ]),
4072 "Rust exclusion search should give only TypeScript files"
4073 );
4074
4075 assert_eq!(
4076 search(
4077 &project,
4078 SearchQuery::text(
4079 search_query,
4080 false,
4081 true,
4082 false,
4083 Vec::new(),
4084 vec![
4085 PathMatcher::new("*.ts").unwrap(),
4086 PathMatcher::new("*.odd").unwrap(),
4087 ],
4088 ).unwrap(),
4089 cx
4090 )
4091 .await
4092 .unwrap(),
4093 HashMap::from_iter([
4094 ("one.rs".to_string(), vec![8..12]),
4095 ("two.rs".to_string(), vec![8..12]),
4096 ]),
4097 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4098 );
4099
4100 assert!(
4101 search(
4102 &project,
4103 SearchQuery::text(
4104 search_query,
4105 false,
4106 true,
4107 false,
4108 Vec::new(),
4109 vec![
4110 PathMatcher::new("*.rs").unwrap(),
4111 PathMatcher::new("*.ts").unwrap(),
4112 PathMatcher::new("*.odd").unwrap(),
4113 ],
4114 ).unwrap(),
4115 cx
4116 )
4117 .await
4118 .unwrap().is_empty(),
4119 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4120 );
4121}
4122
4123#[gpui::test]
4124async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4125 init_test(cx);
4126
4127 let search_query = "file";
4128
4129 let fs = FakeFs::new(cx.executor());
4130 fs.insert_tree(
4131 "/dir",
4132 json!({
4133 "one.rs": r#"// Rust file one"#,
4134 "one.ts": r#"// TypeScript file one"#,
4135 "two.rs": r#"// Rust file two"#,
4136 "two.ts": r#"// TypeScript file two"#,
4137 }),
4138 )
4139 .await;
4140 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4141
4142 assert!(
4143 search(
4144 &project,
4145 SearchQuery::text(
4146 search_query,
4147 false,
4148 true,
4149 false,
4150 vec![PathMatcher::new("*.odd").unwrap()],
4151 vec![PathMatcher::new("*.odd").unwrap()],
4152 )
4153 .unwrap(),
4154 cx
4155 )
4156 .await
4157 .unwrap()
4158 .is_empty(),
4159 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4160 );
4161
4162 assert!(
4163 search(
4164 &project,
4165 SearchQuery::text(
4166 search_query,
4167 false,
4168 true,
4169 false,
4170 vec![PathMatcher::new("*.ts").unwrap()],
4171 vec![PathMatcher::new("*.ts").unwrap()],
4172 ).unwrap(),
4173 cx
4174 )
4175 .await
4176 .unwrap()
4177 .is_empty(),
4178 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4179 );
4180
4181 assert!(
4182 search(
4183 &project,
4184 SearchQuery::text(
4185 search_query,
4186 false,
4187 true,
4188 false,
4189 vec![
4190 PathMatcher::new("*.ts").unwrap(),
4191 PathMatcher::new("*.odd").unwrap()
4192 ],
4193 vec![
4194 PathMatcher::new("*.ts").unwrap(),
4195 PathMatcher::new("*.odd").unwrap()
4196 ],
4197 )
4198 .unwrap(),
4199 cx
4200 )
4201 .await
4202 .unwrap()
4203 .is_empty(),
4204 "Non-matching inclusions and exclusions should not change that."
4205 );
4206
4207 assert_eq!(
4208 search(
4209 &project,
4210 SearchQuery::text(
4211 search_query,
4212 false,
4213 true,
4214 false,
4215 vec![
4216 PathMatcher::new("*.ts").unwrap(),
4217 PathMatcher::new("*.odd").unwrap()
4218 ],
4219 vec![
4220 PathMatcher::new("*.rs").unwrap(),
4221 PathMatcher::new("*.odd").unwrap()
4222 ],
4223 )
4224 .unwrap(),
4225 cx
4226 )
4227 .await
4228 .unwrap(),
4229 HashMap::from_iter([
4230 ("one.ts".to_string(), vec![14..18]),
4231 ("two.ts".to_string(), vec![14..18]),
4232 ]),
4233 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4234 );
4235}
4236
4237#[gpui::test]
4238async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4239 init_test(cx);
4240
4241 let fs = FakeFs::new(cx.background_executor.clone());
4242 fs.insert_tree(
4243 "/dir",
4244 json!({
4245 ".git": {},
4246 ".gitignore": "**/target\n/node_modules\n",
4247 "target": {
4248 "index.txt": "index_key:index_value"
4249 },
4250 "node_modules": {
4251 "eslint": {
4252 "index.ts": "const eslint_key = 'eslint value'",
4253 "package.json": r#"{ "some_key": "some value" }"#,
4254 },
4255 "prettier": {
4256 "index.ts": "const prettier_key = 'prettier value'",
4257 "package.json": r#"{ "other_key": "other value" }"#,
4258 },
4259 },
4260 "package.json": r#"{ "main_key": "main value" }"#,
4261 }),
4262 )
4263 .await;
4264 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4265
4266 let query = "key";
4267 assert_eq!(
4268 search(
4269 &project,
4270 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4271 cx
4272 )
4273 .await
4274 .unwrap(),
4275 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4276 "Only one non-ignored file should have the query"
4277 );
4278
4279 assert_eq!(
4280 search(
4281 &project,
4282 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4283 cx
4284 )
4285 .await
4286 .unwrap(),
4287 HashMap::from_iter([
4288 ("package.json".to_string(), vec![8..11]),
4289 ("target/index.txt".to_string(), vec![6..9]),
4290 (
4291 "node_modules/prettier/package.json".to_string(),
4292 vec![9..12]
4293 ),
4294 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4295 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4296 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4297 ]),
4298 "Unrestricted search with ignored directories should find every file with the query"
4299 );
4300
4301 assert_eq!(
4302 search(
4303 &project,
4304 SearchQuery::text(
4305 query,
4306 false,
4307 false,
4308 true,
4309 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4310 vec![PathMatcher::new("*.ts").unwrap()],
4311 )
4312 .unwrap(),
4313 cx
4314 )
4315 .await
4316 .unwrap(),
4317 HashMap::from_iter([(
4318 "node_modules/prettier/package.json".to_string(),
4319 vec![9..12]
4320 )]),
4321 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4322 );
4323}
4324
4325#[test]
4326fn test_glob_literal_prefix() {
4327 assert_eq!(glob_literal_prefix("**/*.js"), "");
4328 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4329 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4330 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4331}
4332
4333#[gpui::test]
4334async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4335 init_test(cx);
4336
4337 let fs = FakeFs::new(cx.executor().clone());
4338 fs.insert_tree(
4339 "/one/two",
4340 json!({
4341 "three": {
4342 "a.txt": "",
4343 "four": {}
4344 },
4345 "c.rs": ""
4346 }),
4347 )
4348 .await;
4349
4350 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4351 project
4352 .update(cx, |project, cx| {
4353 let id = project.worktrees().next().unwrap().read(cx).id();
4354 project.create_entry((id, "b.."), true, cx)
4355 })
4356 .unwrap()
4357 .await
4358 .unwrap();
4359
4360 // Can't create paths outside the project
4361 let result = project
4362 .update(cx, |project, cx| {
4363 let id = project.worktrees().next().unwrap().read(cx).id();
4364 project.create_entry((id, "../../boop"), true, cx)
4365 })
4366 .await;
4367 assert!(result.is_err());
4368
4369 // Can't create paths with '..'
4370 let result = project
4371 .update(cx, |project, cx| {
4372 let id = project.worktrees().next().unwrap().read(cx).id();
4373 project.create_entry((id, "four/../beep"), true, cx)
4374 })
4375 .await;
4376 assert!(result.is_err());
4377
4378 assert_eq!(
4379 fs.paths(true),
4380 vec![
4381 PathBuf::from("/"),
4382 PathBuf::from("/one"),
4383 PathBuf::from("/one/two"),
4384 PathBuf::from("/one/two/c.rs"),
4385 PathBuf::from("/one/two/three"),
4386 PathBuf::from("/one/two/three/a.txt"),
4387 PathBuf::from("/one/two/three/b.."),
4388 PathBuf::from("/one/two/three/four"),
4389 ]
4390 );
4391
4392 // And we cannot open buffers with '..'
4393 let result = project
4394 .update(cx, |project, cx| {
4395 let id = project.worktrees().next().unwrap().read(cx).id();
4396 project.open_buffer((id, "../c.rs"), cx)
4397 })
4398 .await;
4399 assert!(result.is_err())
4400}
4401
4402async fn search(
4403 project: &Model<Project>,
4404 query: SearchQuery,
4405 cx: &mut gpui::TestAppContext,
4406) -> Result<HashMap<String, Vec<Range<usize>>>> {
4407 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4408 let mut result = HashMap::default();
4409 while let Some((buffer, range)) = search_rx.next().await {
4410 result.entry(buffer).or_insert(range);
4411 }
4412 Ok(result
4413 .into_iter()
4414 .map(|(buffer, ranges)| {
4415 buffer.update(cx, |buffer, _| {
4416 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4417 let ranges = ranges
4418 .into_iter()
4419 .map(|range| range.to_offset(buffer))
4420 .collect::<Vec<_>>();
4421 (path, ranges)
4422 })
4423 })
4424 .collect())
4425}
4426
4427fn init_test(cx: &mut gpui::TestAppContext) {
4428 if std::env::var("RUST_LOG").is_ok() {
4429 env_logger::try_init().ok();
4430 }
4431
4432 cx.update(|cx| {
4433 let settings_store = SettingsStore::test(cx);
4434 cx.set_global(settings_store);
4435 release_channel::init("0.0.0", cx);
4436 language::init(cx);
4437 Project::init_settings(cx);
4438 });
4439}