1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20use worktree::WorktreeModelHandle as _;
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/Users").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees().next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let worktree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees().next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: worktree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree".into(),
141 };
142
143 let all_tasks = cx
144 .update(|cx| {
145 let tree = worktree.read(cx);
146
147 let settings_a = language_settings(
148 None,
149 Some(
150 &(File::for_entry(
151 tree.entry_for_path("a/a.rs").unwrap().clone(),
152 worktree.clone(),
153 ) as _),
154 ),
155 cx,
156 );
157 let settings_b = language_settings(
158 None,
159 Some(
160 &(File::for_entry(
161 tree.entry_for_path("b/b.rs").unwrap().clone(),
162 worktree.clone(),
163 ) as _),
164 ),
165 cx,
166 );
167
168 assert_eq!(settings_a.tab_size.get(), 8);
169 assert_eq!(settings_b.tab_size.get(), 2);
170
171 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
172 })
173 .await
174 .into_iter()
175 .map(|(source_kind, task)| {
176 let resolved = task.resolved.unwrap();
177 (
178 source_kind,
179 task.resolved_label,
180 resolved.args,
181 resolved.env,
182 )
183 })
184 .collect::<Vec<_>>();
185 assert_eq!(
186 all_tasks,
187 vec![
188 (
189 global_task_source_kind.clone(),
190 "cargo check".to_string(),
191 vec!["check".to_string(), "--all".to_string()],
192 HashMap::default(),
193 ),
194 (
195 TaskSourceKind::Worktree {
196 id: worktree_id,
197 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
198 id_base: "local_tasks_for_worktree".into(),
199 },
200 "cargo check".to_string(),
201 vec!["check".to_string()],
202 HashMap::default(),
203 ),
204 ]
205 );
206
207 let (_, resolved_task) = cx
208 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
209 .await
210 .into_iter()
211 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
212 .expect("should have one global task");
213 project.update(cx, |project, cx| {
214 project.task_inventory().update(cx, |inventory, _| {
215 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
216 });
217 });
218
219 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
220 label: "cargo check".to_string(),
221 command: "cargo".to_string(),
222 args: vec![
223 "check".to_string(),
224 "--all".to_string(),
225 "--all-targets".to_string(),
226 ],
227 env: HashMap::from_iter(Some((
228 "RUSTFLAGS".to_string(),
229 "-Zunstable-options".to_string(),
230 ))),
231 ..TaskTemplate::default()
232 }]))
233 .unwrap();
234 let (tx, rx) = futures::channel::mpsc::unbounded();
235 cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.task_inventory().update(cx, |inventory, cx| {
238 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
239 inventory.add_source(
240 global_task_source_kind.clone(),
241 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
242 cx,
243 );
244 });
245 })
246 });
247 tx.unbounded_send(tasks).unwrap();
248
249 cx.run_until_parked();
250 let all_tasks = cx
251 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
252 .await
253 .into_iter()
254 .map(|(source_kind, task)| {
255 let resolved = task.resolved.unwrap();
256 (
257 source_kind,
258 task.resolved_label,
259 resolved.args,
260 resolved.env,
261 )
262 })
263 .collect::<Vec<_>>();
264 assert_eq!(
265 all_tasks,
266 vec![
267 (
268 TaskSourceKind::Worktree {
269 id: worktree_id,
270 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
271 id_base: "local_tasks_for_worktree".into(),
272 },
273 "cargo check".to_string(),
274 vec![
275 "check".to_string(),
276 "--all".to_string(),
277 "--all-targets".to_string()
278 ],
279 HashMap::from_iter(Some((
280 "RUSTFLAGS".to_string(),
281 "-Zunstable-options".to_string()
282 ))),
283 ),
284 (
285 TaskSourceKind::Worktree {
286 id: worktree_id,
287 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
288 id_base: "local_tasks_for_worktree".into(),
289 },
290 "cargo check".to_string(),
291 vec!["check".to_string()],
292 HashMap::default(),
293 ),
294 ]
295 );
296}
297
298#[gpui::test]
299async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301
302 let fs = FakeFs::new(cx.executor());
303 fs.insert_tree(
304 "/the-root",
305 json!({
306 "test.rs": "const A: i32 = 1;",
307 "test2.rs": "",
308 "Cargo.toml": "a = 1",
309 "package.json": "{\"a\": 1}",
310 }),
311 )
312 .await;
313
314 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
316
317 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
318 "Rust",
319 FakeLspAdapter {
320 name: "the-rust-language-server",
321 capabilities: lsp::ServerCapabilities {
322 completion_provider: Some(lsp::CompletionOptions {
323 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
324 ..Default::default()
325 }),
326 ..Default::default()
327 },
328 ..Default::default()
329 },
330 );
331 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
332 "JSON",
333 FakeLspAdapter {
334 name: "the-json-language-server",
335 capabilities: lsp::ServerCapabilities {
336 completion_provider: Some(lsp::CompletionOptions {
337 trigger_characters: Some(vec![":".to_string()]),
338 ..Default::default()
339 }),
340 ..Default::default()
341 },
342 ..Default::default()
343 },
344 );
345
346 // Open a buffer without an associated language server.
347 let toml_buffer = project
348 .update(cx, |project, cx| {
349 project.open_local_buffer("/the-root/Cargo.toml", cx)
350 })
351 .await
352 .unwrap();
353
354 // Open a buffer with an associated language server before the language for it has been loaded.
355 let rust_buffer = project
356 .update(cx, |project, cx| {
357 project.open_local_buffer("/the-root/test.rs", cx)
358 })
359 .await
360 .unwrap();
361 rust_buffer.update(cx, |buffer, _| {
362 assert_eq!(buffer.language().map(|l| l.name()), None);
363 });
364
365 // Now we add the languages to the project, and ensure they get assigned to all
366 // the relevant open buffers.
367 language_registry.add(json_lang());
368 language_registry.add(rust_lang());
369 cx.executor().run_until_parked();
370 rust_buffer.update(cx, |buffer, _| {
371 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
372 });
373
374 // A server is started up, and it is notified about Rust files.
375 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
376 assert_eq!(
377 fake_rust_server
378 .receive_notification::<lsp::notification::DidOpenTextDocument>()
379 .await
380 .text_document,
381 lsp::TextDocumentItem {
382 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
383 version: 0,
384 text: "const A: i32 = 1;".to_string(),
385 language_id: "rust".to_string(),
386 }
387 );
388
389 // The buffer is configured based on the language server's capabilities.
390 rust_buffer.update(cx, |buffer, _| {
391 assert_eq!(
392 buffer.completion_triggers(),
393 &[".".to_string(), "::".to_string()]
394 );
395 });
396 toml_buffer.update(cx, |buffer, _| {
397 assert!(buffer.completion_triggers().is_empty());
398 });
399
400 // Edit a buffer. The changes are reported to the language server.
401 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
402 assert_eq!(
403 fake_rust_server
404 .receive_notification::<lsp::notification::DidChangeTextDocument>()
405 .await
406 .text_document,
407 lsp::VersionedTextDocumentIdentifier::new(
408 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
409 1
410 )
411 );
412
413 // Open a third buffer with a different associated language server.
414 let json_buffer = project
415 .update(cx, |project, cx| {
416 project.open_local_buffer("/the-root/package.json", cx)
417 })
418 .await
419 .unwrap();
420
421 // A json language server is started up and is only notified about the json buffer.
422 let mut fake_json_server = fake_json_servers.next().await.unwrap();
423 assert_eq!(
424 fake_json_server
425 .receive_notification::<lsp::notification::DidOpenTextDocument>()
426 .await
427 .text_document,
428 lsp::TextDocumentItem {
429 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
430 version: 0,
431 text: "{\"a\": 1}".to_string(),
432 language_id: "json".to_string(),
433 }
434 );
435
436 // This buffer is configured based on the second language server's
437 // capabilities.
438 json_buffer.update(cx, |buffer, _| {
439 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
440 });
441
442 // When opening another buffer whose language server is already running,
443 // it is also configured based on the existing language server's capabilities.
444 let rust_buffer2 = project
445 .update(cx, |project, cx| {
446 project.open_local_buffer("/the-root/test2.rs", cx)
447 })
448 .await
449 .unwrap();
450 rust_buffer2.update(cx, |buffer, _| {
451 assert_eq!(
452 buffer.completion_triggers(),
453 &[".".to_string(), "::".to_string()]
454 );
455 });
456
457 // Changes are reported only to servers matching the buffer's language.
458 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
459 rust_buffer2.update(cx, |buffer, cx| {
460 buffer.edit([(0..0, "let x = 1;")], None, cx)
461 });
462 assert_eq!(
463 fake_rust_server
464 .receive_notification::<lsp::notification::DidChangeTextDocument>()
465 .await
466 .text_document,
467 lsp::VersionedTextDocumentIdentifier::new(
468 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
469 1
470 )
471 );
472
473 // Save notifications are reported to all servers.
474 project
475 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
476 .await
477 .unwrap();
478 assert_eq!(
479 fake_rust_server
480 .receive_notification::<lsp::notification::DidSaveTextDocument>()
481 .await
482 .text_document,
483 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
484 );
485 assert_eq!(
486 fake_json_server
487 .receive_notification::<lsp::notification::DidSaveTextDocument>()
488 .await
489 .text_document,
490 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
491 );
492
493 // Renames are reported only to servers matching the buffer's language.
494 fs.rename(
495 Path::new("/the-root/test2.rs"),
496 Path::new("/the-root/test3.rs"),
497 Default::default(),
498 )
499 .await
500 .unwrap();
501 assert_eq!(
502 fake_rust_server
503 .receive_notification::<lsp::notification::DidCloseTextDocument>()
504 .await
505 .text_document,
506 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
507 );
508 assert_eq!(
509 fake_rust_server
510 .receive_notification::<lsp::notification::DidOpenTextDocument>()
511 .await
512 .text_document,
513 lsp::TextDocumentItem {
514 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
515 version: 0,
516 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
517 language_id: "rust".to_string(),
518 },
519 );
520
521 rust_buffer2.update(cx, |buffer, cx| {
522 buffer.update_diagnostics(
523 LanguageServerId(0),
524 DiagnosticSet::from_sorted_entries(
525 vec![DiagnosticEntry {
526 diagnostic: Default::default(),
527 range: Anchor::MIN..Anchor::MAX,
528 }],
529 &buffer.snapshot(),
530 ),
531 cx,
532 );
533 assert_eq!(
534 buffer
535 .snapshot()
536 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
537 .count(),
538 1
539 );
540 });
541
542 // When the rename changes the extension of the file, the buffer gets closed on the old
543 // language server and gets opened on the new one.
544 fs.rename(
545 Path::new("/the-root/test3.rs"),
546 Path::new("/the-root/test3.json"),
547 Default::default(),
548 )
549 .await
550 .unwrap();
551 assert_eq!(
552 fake_rust_server
553 .receive_notification::<lsp::notification::DidCloseTextDocument>()
554 .await
555 .text_document,
556 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
557 );
558 assert_eq!(
559 fake_json_server
560 .receive_notification::<lsp::notification::DidOpenTextDocument>()
561 .await
562 .text_document,
563 lsp::TextDocumentItem {
564 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
565 version: 0,
566 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
567 language_id: "json".to_string(),
568 },
569 );
570
571 // We clear the diagnostics, since the language has changed.
572 rust_buffer2.update(cx, |buffer, _| {
573 assert_eq!(
574 buffer
575 .snapshot()
576 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
577 .count(),
578 0
579 );
580 });
581
582 // The renamed file's version resets after changing language server.
583 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
584 assert_eq!(
585 fake_json_server
586 .receive_notification::<lsp::notification::DidChangeTextDocument>()
587 .await
588 .text_document,
589 lsp::VersionedTextDocumentIdentifier::new(
590 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
591 1
592 )
593 );
594
595 // Restart language servers
596 project.update(cx, |project, cx| {
597 project.restart_language_servers_for_buffers(
598 vec![rust_buffer.clone(), json_buffer.clone()],
599 cx,
600 );
601 });
602
603 let mut rust_shutdown_requests = fake_rust_server
604 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
605 let mut json_shutdown_requests = fake_json_server
606 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
607 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
608
609 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
610 let mut fake_json_server = fake_json_servers.next().await.unwrap();
611
612 // Ensure rust document is reopened in new rust language server
613 assert_eq!(
614 fake_rust_server
615 .receive_notification::<lsp::notification::DidOpenTextDocument>()
616 .await
617 .text_document,
618 lsp::TextDocumentItem {
619 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
620 version: 0,
621 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
622 language_id: "rust".to_string(),
623 }
624 );
625
626 // Ensure json documents are reopened in new json language server
627 assert_set_eq!(
628 [
629 fake_json_server
630 .receive_notification::<lsp::notification::DidOpenTextDocument>()
631 .await
632 .text_document,
633 fake_json_server
634 .receive_notification::<lsp::notification::DidOpenTextDocument>()
635 .await
636 .text_document,
637 ],
638 [
639 lsp::TextDocumentItem {
640 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
641 version: 0,
642 text: json_buffer.update(cx, |buffer, _| buffer.text()),
643 language_id: "json".to_string(),
644 },
645 lsp::TextDocumentItem {
646 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
647 version: 0,
648 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
649 language_id: "json".to_string(),
650 }
651 ]
652 );
653
654 // Close notifications are reported only to servers matching the buffer's language.
655 cx.update(|_| drop(json_buffer));
656 let close_message = lsp::DidCloseTextDocumentParams {
657 text_document: lsp::TextDocumentIdentifier::new(
658 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
659 ),
660 };
661 assert_eq!(
662 fake_json_server
663 .receive_notification::<lsp::notification::DidCloseTextDocument>()
664 .await,
665 close_message,
666 );
667}
668
669#[gpui::test]
670async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
671 init_test(cx);
672
673 let fs = FakeFs::new(cx.executor());
674 fs.insert_tree(
675 "/the-root",
676 json!({
677 ".gitignore": "target\n",
678 "src": {
679 "a.rs": "",
680 "b.rs": "",
681 },
682 "target": {
683 "x": {
684 "out": {
685 "x.rs": ""
686 }
687 },
688 "y": {
689 "out": {
690 "y.rs": "",
691 }
692 },
693 "z": {
694 "out": {
695 "z.rs": ""
696 }
697 }
698 }
699 }),
700 )
701 .await;
702
703 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
704 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
705 language_registry.add(rust_lang());
706 let mut fake_servers = language_registry.register_fake_lsp_adapter(
707 "Rust",
708 FakeLspAdapter {
709 name: "the-language-server",
710 ..Default::default()
711 },
712 );
713
714 cx.executor().run_until_parked();
715
716 // Start the language server by opening a buffer with a compatible file extension.
717 let _buffer = project
718 .update(cx, |project, cx| {
719 project.open_local_buffer("/the-root/src/a.rs", cx)
720 })
721 .await
722 .unwrap();
723
724 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
725 project.update(cx, |project, cx| {
726 let worktree = project.worktrees().next().unwrap();
727 assert_eq!(
728 worktree
729 .read(cx)
730 .snapshot()
731 .entries(true)
732 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
733 .collect::<Vec<_>>(),
734 &[
735 (Path::new(""), false),
736 (Path::new(".gitignore"), false),
737 (Path::new("src"), false),
738 (Path::new("src/a.rs"), false),
739 (Path::new("src/b.rs"), false),
740 (Path::new("target"), true),
741 ]
742 );
743 });
744
745 let prev_read_dir_count = fs.read_dir_call_count();
746
747 // Keep track of the FS events reported to the language server.
748 let fake_server = fake_servers.next().await.unwrap();
749 let file_changes = Arc::new(Mutex::new(Vec::new()));
750 fake_server
751 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
752 registrations: vec![lsp::Registration {
753 id: Default::default(),
754 method: "workspace/didChangeWatchedFiles".to_string(),
755 register_options: serde_json::to_value(
756 lsp::DidChangeWatchedFilesRegistrationOptions {
757 watchers: vec![
758 lsp::FileSystemWatcher {
759 glob_pattern: lsp::GlobPattern::String(
760 "/the-root/Cargo.toml".to_string(),
761 ),
762 kind: None,
763 },
764 lsp::FileSystemWatcher {
765 glob_pattern: lsp::GlobPattern::String(
766 "/the-root/src/*.{rs,c}".to_string(),
767 ),
768 kind: None,
769 },
770 lsp::FileSystemWatcher {
771 glob_pattern: lsp::GlobPattern::String(
772 "/the-root/target/y/**/*.rs".to_string(),
773 ),
774 kind: None,
775 },
776 ],
777 },
778 )
779 .ok(),
780 }],
781 })
782 .await
783 .unwrap();
784 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
785 let file_changes = file_changes.clone();
786 move |params, _| {
787 let mut file_changes = file_changes.lock();
788 file_changes.extend(params.changes);
789 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
790 }
791 });
792
793 cx.executor().run_until_parked();
794 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
795 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
796
797 // Now the language server has asked us to watch an ignored directory path,
798 // so we recursively load it.
799 project.update(cx, |project, cx| {
800 let worktree = project.worktrees().next().unwrap();
801 assert_eq!(
802 worktree
803 .read(cx)
804 .snapshot()
805 .entries(true)
806 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
807 .collect::<Vec<_>>(),
808 &[
809 (Path::new(""), false),
810 (Path::new(".gitignore"), false),
811 (Path::new("src"), false),
812 (Path::new("src/a.rs"), false),
813 (Path::new("src/b.rs"), false),
814 (Path::new("target"), true),
815 (Path::new("target/x"), true),
816 (Path::new("target/y"), true),
817 (Path::new("target/y/out"), true),
818 (Path::new("target/y/out/y.rs"), true),
819 (Path::new("target/z"), true),
820 ]
821 );
822 });
823
824 // Perform some file system mutations, two of which match the watched patterns,
825 // and one of which does not.
826 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
827 .await
828 .unwrap();
829 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
830 .await
831 .unwrap();
832 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
833 .await
834 .unwrap();
835 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
836 .await
837 .unwrap();
838 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
839 .await
840 .unwrap();
841
842 // The language server receives events for the FS mutations that match its watch patterns.
843 cx.executor().run_until_parked();
844 assert_eq!(
845 &*file_changes.lock(),
846 &[
847 lsp::FileEvent {
848 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
849 typ: lsp::FileChangeType::DELETED,
850 },
851 lsp::FileEvent {
852 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
853 typ: lsp::FileChangeType::CREATED,
854 },
855 lsp::FileEvent {
856 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
857 typ: lsp::FileChangeType::CREATED,
858 },
859 ]
860 );
861}
862
863#[gpui::test]
864async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
865 init_test(cx);
866
867 let fs = FakeFs::new(cx.executor());
868 fs.insert_tree(
869 "/dir",
870 json!({
871 "a.rs": "let a = 1;",
872 "b.rs": "let b = 2;"
873 }),
874 )
875 .await;
876
877 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
878
879 let buffer_a = project
880 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
881 .await
882 .unwrap();
883 let buffer_b = project
884 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
885 .await
886 .unwrap();
887
888 project.update(cx, |project, cx| {
889 project
890 .update_diagnostics(
891 LanguageServerId(0),
892 lsp::PublishDiagnosticsParams {
893 uri: Url::from_file_path("/dir/a.rs").unwrap(),
894 version: None,
895 diagnostics: vec![lsp::Diagnostic {
896 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
897 severity: Some(lsp::DiagnosticSeverity::ERROR),
898 message: "error 1".to_string(),
899 ..Default::default()
900 }],
901 },
902 &[],
903 cx,
904 )
905 .unwrap();
906 project
907 .update_diagnostics(
908 LanguageServerId(0),
909 lsp::PublishDiagnosticsParams {
910 uri: Url::from_file_path("/dir/b.rs").unwrap(),
911 version: None,
912 diagnostics: vec![lsp::Diagnostic {
913 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
914 severity: Some(lsp::DiagnosticSeverity::WARNING),
915 message: "error 2".to_string(),
916 ..Default::default()
917 }],
918 },
919 &[],
920 cx,
921 )
922 .unwrap();
923 });
924
925 buffer_a.update(cx, |buffer, _| {
926 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
927 assert_eq!(
928 chunks
929 .iter()
930 .map(|(s, d)| (s.as_str(), *d))
931 .collect::<Vec<_>>(),
932 &[
933 ("let ", None),
934 ("a", Some(DiagnosticSeverity::ERROR)),
935 (" = 1;", None),
936 ]
937 );
938 });
939 buffer_b.update(cx, |buffer, _| {
940 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
941 assert_eq!(
942 chunks
943 .iter()
944 .map(|(s, d)| (s.as_str(), *d))
945 .collect::<Vec<_>>(),
946 &[
947 ("let ", None),
948 ("b", Some(DiagnosticSeverity::WARNING)),
949 (" = 2;", None),
950 ]
951 );
952 });
953}
954
955#[gpui::test]
956async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
957 init_test(cx);
958
959 let fs = FakeFs::new(cx.executor());
960 fs.insert_tree(
961 "/root",
962 json!({
963 "dir": {
964 ".git": {
965 "HEAD": "ref: refs/heads/main",
966 },
967 ".gitignore": "b.rs",
968 "a.rs": "let a = 1;",
969 "b.rs": "let b = 2;",
970 },
971 "other.rs": "let b = c;"
972 }),
973 )
974 .await;
975
976 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
977 let (worktree, _) = project
978 .update(cx, |project, cx| {
979 project.find_or_create_local_worktree("/root/dir", true, cx)
980 })
981 .await
982 .unwrap();
983 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
984
985 let (worktree, _) = project
986 .update(cx, |project, cx| {
987 project.find_or_create_local_worktree("/root/other.rs", false, cx)
988 })
989 .await
990 .unwrap();
991 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
992
993 let server_id = LanguageServerId(0);
994 project.update(cx, |project, cx| {
995 project
996 .update_diagnostics(
997 server_id,
998 lsp::PublishDiagnosticsParams {
999 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1000 version: None,
1001 diagnostics: vec![lsp::Diagnostic {
1002 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1003 severity: Some(lsp::DiagnosticSeverity::ERROR),
1004 message: "unused variable 'b'".to_string(),
1005 ..Default::default()
1006 }],
1007 },
1008 &[],
1009 cx,
1010 )
1011 .unwrap();
1012 project
1013 .update_diagnostics(
1014 server_id,
1015 lsp::PublishDiagnosticsParams {
1016 uri: Url::from_file_path("/root/other.rs").unwrap(),
1017 version: None,
1018 diagnostics: vec![lsp::Diagnostic {
1019 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1020 severity: Some(lsp::DiagnosticSeverity::ERROR),
1021 message: "unknown variable 'c'".to_string(),
1022 ..Default::default()
1023 }],
1024 },
1025 &[],
1026 cx,
1027 )
1028 .unwrap();
1029 });
1030
1031 let main_ignored_buffer = project
1032 .update(cx, |project, cx| {
1033 project.open_buffer((main_worktree_id, "b.rs"), cx)
1034 })
1035 .await
1036 .unwrap();
1037 main_ignored_buffer.update(cx, |buffer, _| {
1038 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1039 assert_eq!(
1040 chunks
1041 .iter()
1042 .map(|(s, d)| (s.as_str(), *d))
1043 .collect::<Vec<_>>(),
1044 &[
1045 ("let ", None),
1046 ("b", Some(DiagnosticSeverity::ERROR)),
1047 (" = 2;", None),
1048 ],
1049 "Gigitnored buffers should still get in-buffer diagnostics",
1050 );
1051 });
1052 let other_buffer = project
1053 .update(cx, |project, cx| {
1054 project.open_buffer((other_worktree_id, ""), cx)
1055 })
1056 .await
1057 .unwrap();
1058 other_buffer.update(cx, |buffer, _| {
1059 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1060 assert_eq!(
1061 chunks
1062 .iter()
1063 .map(|(s, d)| (s.as_str(), *d))
1064 .collect::<Vec<_>>(),
1065 &[
1066 ("let b = ", None),
1067 ("c", Some(DiagnosticSeverity::ERROR)),
1068 (";", None),
1069 ],
1070 "Buffers from hidden projects should still get in-buffer diagnostics"
1071 );
1072 });
1073
1074 project.update(cx, |project, cx| {
1075 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1076 assert_eq!(
1077 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1078 vec![(
1079 ProjectPath {
1080 worktree_id: main_worktree_id,
1081 path: Arc::from(Path::new("b.rs")),
1082 },
1083 server_id,
1084 DiagnosticSummary {
1085 error_count: 1,
1086 warning_count: 0,
1087 }
1088 )]
1089 );
1090 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1091 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1092 });
1093}
1094
1095#[gpui::test]
1096async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1097 init_test(cx);
1098
1099 let progress_token = "the-progress-token";
1100
1101 let fs = FakeFs::new(cx.executor());
1102 fs.insert_tree(
1103 "/dir",
1104 json!({
1105 "a.rs": "fn a() { A }",
1106 "b.rs": "const y: i32 = 1",
1107 }),
1108 )
1109 .await;
1110
1111 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1112 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1113
1114 language_registry.add(rust_lang());
1115 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1116 "Rust",
1117 FakeLspAdapter {
1118 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1119 disk_based_diagnostics_sources: vec!["disk".into()],
1120 ..Default::default()
1121 },
1122 );
1123
1124 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1125
1126 // Cause worktree to start the fake language server
1127 let _buffer = project
1128 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1129 .await
1130 .unwrap();
1131
1132 let mut events = cx.events(&project);
1133
1134 let fake_server = fake_servers.next().await.unwrap();
1135 assert_eq!(
1136 events.next().await.unwrap(),
1137 Event::LanguageServerAdded(LanguageServerId(0)),
1138 );
1139
1140 fake_server
1141 .start_progress(format!("{}/0", progress_token))
1142 .await;
1143 assert_eq!(
1144 events.next().await.unwrap(),
1145 Event::DiskBasedDiagnosticsStarted {
1146 language_server_id: LanguageServerId(0),
1147 }
1148 );
1149
1150 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1151 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1152 version: None,
1153 diagnostics: vec![lsp::Diagnostic {
1154 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1155 severity: Some(lsp::DiagnosticSeverity::ERROR),
1156 message: "undefined variable 'A'".to_string(),
1157 ..Default::default()
1158 }],
1159 });
1160 assert_eq!(
1161 events.next().await.unwrap(),
1162 Event::DiagnosticsUpdated {
1163 language_server_id: LanguageServerId(0),
1164 path: (worktree_id, Path::new("a.rs")).into()
1165 }
1166 );
1167
1168 fake_server.end_progress(format!("{}/0", progress_token));
1169 assert_eq!(
1170 events.next().await.unwrap(),
1171 Event::DiskBasedDiagnosticsFinished {
1172 language_server_id: LanguageServerId(0)
1173 }
1174 );
1175
1176 let buffer = project
1177 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1178 .await
1179 .unwrap();
1180
1181 buffer.update(cx, |buffer, _| {
1182 let snapshot = buffer.snapshot();
1183 let diagnostics = snapshot
1184 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1185 .collect::<Vec<_>>();
1186 assert_eq!(
1187 diagnostics,
1188 &[DiagnosticEntry {
1189 range: Point::new(0, 9)..Point::new(0, 10),
1190 diagnostic: Diagnostic {
1191 severity: lsp::DiagnosticSeverity::ERROR,
1192 message: "undefined variable 'A'".to_string(),
1193 group_id: 0,
1194 is_primary: true,
1195 ..Default::default()
1196 }
1197 }]
1198 )
1199 });
1200
1201 // Ensure publishing empty diagnostics twice only results in one update event.
1202 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1203 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1204 version: None,
1205 diagnostics: Default::default(),
1206 });
1207 assert_eq!(
1208 events.next().await.unwrap(),
1209 Event::DiagnosticsUpdated {
1210 language_server_id: LanguageServerId(0),
1211 path: (worktree_id, Path::new("a.rs")).into()
1212 }
1213 );
1214
1215 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1216 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1217 version: None,
1218 diagnostics: Default::default(),
1219 });
1220 cx.executor().run_until_parked();
1221 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1222}
1223
1224#[gpui::test]
1225async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1226 init_test(cx);
1227
1228 let progress_token = "the-progress-token";
1229
1230 let fs = FakeFs::new(cx.executor());
1231 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1232
1233 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1234
1235 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1236 language_registry.add(rust_lang());
1237 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1238 "Rust",
1239 FakeLspAdapter {
1240 name: "the-language-server",
1241 disk_based_diagnostics_sources: vec!["disk".into()],
1242 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1243 ..Default::default()
1244 },
1245 );
1246
1247 let buffer = project
1248 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1249 .await
1250 .unwrap();
1251
1252 // Simulate diagnostics starting to update.
1253 let fake_server = fake_servers.next().await.unwrap();
1254 fake_server.start_progress(progress_token).await;
1255
1256 // Restart the server before the diagnostics finish updating.
1257 project.update(cx, |project, cx| {
1258 project.restart_language_servers_for_buffers([buffer], cx);
1259 });
1260 let mut events = cx.events(&project);
1261
1262 // Simulate the newly started server sending more diagnostics.
1263 let fake_server = fake_servers.next().await.unwrap();
1264 assert_eq!(
1265 events.next().await.unwrap(),
1266 Event::LanguageServerAdded(LanguageServerId(1))
1267 );
1268 fake_server.start_progress(progress_token).await;
1269 assert_eq!(
1270 events.next().await.unwrap(),
1271 Event::DiskBasedDiagnosticsStarted {
1272 language_server_id: LanguageServerId(1)
1273 }
1274 );
1275 project.update(cx, |project, _| {
1276 assert_eq!(
1277 project
1278 .language_servers_running_disk_based_diagnostics()
1279 .collect::<Vec<_>>(),
1280 [LanguageServerId(1)]
1281 );
1282 });
1283
1284 // All diagnostics are considered done, despite the old server's diagnostic
1285 // task never completing.
1286 fake_server.end_progress(progress_token);
1287 assert_eq!(
1288 events.next().await.unwrap(),
1289 Event::DiskBasedDiagnosticsFinished {
1290 language_server_id: LanguageServerId(1)
1291 }
1292 );
1293 project.update(cx, |project, _| {
1294 assert_eq!(
1295 project
1296 .language_servers_running_disk_based_diagnostics()
1297 .collect::<Vec<_>>(),
1298 [] as [language::LanguageServerId; 0]
1299 );
1300 });
1301}
1302
1303#[gpui::test]
1304async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1305 init_test(cx);
1306
1307 let fs = FakeFs::new(cx.executor());
1308 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1309
1310 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1311
1312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1313 language_registry.add(rust_lang());
1314 let mut fake_servers =
1315 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1316
1317 let buffer = project
1318 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1319 .await
1320 .unwrap();
1321
1322 // Publish diagnostics
1323 let fake_server = fake_servers.next().await.unwrap();
1324 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1325 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1326 version: None,
1327 diagnostics: vec![lsp::Diagnostic {
1328 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1329 severity: Some(lsp::DiagnosticSeverity::ERROR),
1330 message: "the message".to_string(),
1331 ..Default::default()
1332 }],
1333 });
1334
1335 cx.executor().run_until_parked();
1336 buffer.update(cx, |buffer, _| {
1337 assert_eq!(
1338 buffer
1339 .snapshot()
1340 .diagnostics_in_range::<_, usize>(0..1, false)
1341 .map(|entry| entry.diagnostic.message.clone())
1342 .collect::<Vec<_>>(),
1343 ["the message".to_string()]
1344 );
1345 });
1346 project.update(cx, |project, cx| {
1347 assert_eq!(
1348 project.diagnostic_summary(false, cx),
1349 DiagnosticSummary {
1350 error_count: 1,
1351 warning_count: 0,
1352 }
1353 );
1354 });
1355
1356 project.update(cx, |project, cx| {
1357 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1358 });
1359
1360 // The diagnostics are cleared.
1361 cx.executor().run_until_parked();
1362 buffer.update(cx, |buffer, _| {
1363 assert_eq!(
1364 buffer
1365 .snapshot()
1366 .diagnostics_in_range::<_, usize>(0..1, false)
1367 .map(|entry| entry.diagnostic.message.clone())
1368 .collect::<Vec<_>>(),
1369 Vec::<String>::new(),
1370 );
1371 });
1372 project.update(cx, |project, cx| {
1373 assert_eq!(
1374 project.diagnostic_summary(false, cx),
1375 DiagnosticSummary {
1376 error_count: 0,
1377 warning_count: 0,
1378 }
1379 );
1380 });
1381}
1382
1383#[gpui::test]
1384async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1385 init_test(cx);
1386
1387 let fs = FakeFs::new(cx.executor());
1388 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1389
1390 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1391 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1392
1393 language_registry.add(rust_lang());
1394 let mut fake_servers =
1395 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1396
1397 let buffer = project
1398 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1399 .await
1400 .unwrap();
1401
1402 // Before restarting the server, report diagnostics with an unknown buffer version.
1403 let fake_server = fake_servers.next().await.unwrap();
1404 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1405 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1406 version: Some(10000),
1407 diagnostics: Vec::new(),
1408 });
1409 cx.executor().run_until_parked();
1410
1411 project.update(cx, |project, cx| {
1412 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1413 });
1414 let mut fake_server = fake_servers.next().await.unwrap();
1415 let notification = fake_server
1416 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1417 .await
1418 .text_document;
1419 assert_eq!(notification.version, 0);
1420}
1421
1422#[gpui::test]
1423async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1424 init_test(cx);
1425
1426 let fs = FakeFs::new(cx.executor());
1427 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1428 .await;
1429
1430 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1431 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1432
1433 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1434 "Rust",
1435 FakeLspAdapter {
1436 name: "rust-lsp",
1437 ..Default::default()
1438 },
1439 );
1440 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1441 "JavaScript",
1442 FakeLspAdapter {
1443 name: "js-lsp",
1444 ..Default::default()
1445 },
1446 );
1447 language_registry.add(rust_lang());
1448 language_registry.add(js_lang());
1449
1450 let _rs_buffer = project
1451 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1452 .await
1453 .unwrap();
1454 let _js_buffer = project
1455 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1456 .await
1457 .unwrap();
1458
1459 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1460 assert_eq!(
1461 fake_rust_server_1
1462 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1463 .await
1464 .text_document
1465 .uri
1466 .as_str(),
1467 "file:///dir/a.rs"
1468 );
1469
1470 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1471 assert_eq!(
1472 fake_js_server
1473 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1474 .await
1475 .text_document
1476 .uri
1477 .as_str(),
1478 "file:///dir/b.js"
1479 );
1480
1481 // Disable Rust language server, ensuring only that server gets stopped.
1482 cx.update(|cx| {
1483 SettingsStore::update_global(cx, |settings, cx| {
1484 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1485 settings.languages.insert(
1486 Arc::from("Rust"),
1487 LanguageSettingsContent {
1488 enable_language_server: Some(false),
1489 ..Default::default()
1490 },
1491 );
1492 });
1493 })
1494 });
1495 fake_rust_server_1
1496 .receive_notification::<lsp::notification::Exit>()
1497 .await;
1498
1499 // Enable Rust and disable JavaScript language servers, ensuring that the
1500 // former gets started again and that the latter stops.
1501 cx.update(|cx| {
1502 SettingsStore::update_global(cx, |settings, cx| {
1503 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1504 settings.languages.insert(
1505 Arc::from("Rust"),
1506 LanguageSettingsContent {
1507 enable_language_server: Some(true),
1508 ..Default::default()
1509 },
1510 );
1511 settings.languages.insert(
1512 Arc::from("JavaScript"),
1513 LanguageSettingsContent {
1514 enable_language_server: Some(false),
1515 ..Default::default()
1516 },
1517 );
1518 });
1519 })
1520 });
1521 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1522 assert_eq!(
1523 fake_rust_server_2
1524 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1525 .await
1526 .text_document
1527 .uri
1528 .as_str(),
1529 "file:///dir/a.rs"
1530 );
1531 fake_js_server
1532 .receive_notification::<lsp::notification::Exit>()
1533 .await;
1534}
1535
1536#[gpui::test(iterations = 3)]
1537async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1538 init_test(cx);
1539
1540 let text = "
1541 fn a() { A }
1542 fn b() { BB }
1543 fn c() { CCC }
1544 "
1545 .unindent();
1546
1547 let fs = FakeFs::new(cx.executor());
1548 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1549
1550 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1551 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1552
1553 language_registry.add(rust_lang());
1554 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1555 "Rust",
1556 FakeLspAdapter {
1557 disk_based_diagnostics_sources: vec!["disk".into()],
1558 ..Default::default()
1559 },
1560 );
1561
1562 let buffer = project
1563 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1564 .await
1565 .unwrap();
1566
1567 let mut fake_server = fake_servers.next().await.unwrap();
1568 let open_notification = fake_server
1569 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1570 .await;
1571
1572 // Edit the buffer, moving the content down
1573 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1574 let change_notification_1 = fake_server
1575 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1576 .await;
1577 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1578
1579 // Report some diagnostics for the initial version of the buffer
1580 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1581 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1582 version: Some(open_notification.text_document.version),
1583 diagnostics: vec![
1584 lsp::Diagnostic {
1585 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1586 severity: Some(DiagnosticSeverity::ERROR),
1587 message: "undefined variable 'A'".to_string(),
1588 source: Some("disk".to_string()),
1589 ..Default::default()
1590 },
1591 lsp::Diagnostic {
1592 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1593 severity: Some(DiagnosticSeverity::ERROR),
1594 message: "undefined variable 'BB'".to_string(),
1595 source: Some("disk".to_string()),
1596 ..Default::default()
1597 },
1598 lsp::Diagnostic {
1599 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1600 severity: Some(DiagnosticSeverity::ERROR),
1601 source: Some("disk".to_string()),
1602 message: "undefined variable 'CCC'".to_string(),
1603 ..Default::default()
1604 },
1605 ],
1606 });
1607
1608 // The diagnostics have moved down since they were created.
1609 cx.executor().run_until_parked();
1610 buffer.update(cx, |buffer, _| {
1611 assert_eq!(
1612 buffer
1613 .snapshot()
1614 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1615 .collect::<Vec<_>>(),
1616 &[
1617 DiagnosticEntry {
1618 range: Point::new(3, 9)..Point::new(3, 11),
1619 diagnostic: Diagnostic {
1620 source: Some("disk".into()),
1621 severity: DiagnosticSeverity::ERROR,
1622 message: "undefined variable 'BB'".to_string(),
1623 is_disk_based: true,
1624 group_id: 1,
1625 is_primary: true,
1626 ..Default::default()
1627 },
1628 },
1629 DiagnosticEntry {
1630 range: Point::new(4, 9)..Point::new(4, 12),
1631 diagnostic: Diagnostic {
1632 source: Some("disk".into()),
1633 severity: DiagnosticSeverity::ERROR,
1634 message: "undefined variable 'CCC'".to_string(),
1635 is_disk_based: true,
1636 group_id: 2,
1637 is_primary: true,
1638 ..Default::default()
1639 }
1640 }
1641 ]
1642 );
1643 assert_eq!(
1644 chunks_with_diagnostics(buffer, 0..buffer.len()),
1645 [
1646 ("\n\nfn a() { ".to_string(), None),
1647 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1648 (" }\nfn b() { ".to_string(), None),
1649 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1650 (" }\nfn c() { ".to_string(), None),
1651 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1652 (" }\n".to_string(), None),
1653 ]
1654 );
1655 assert_eq!(
1656 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1657 [
1658 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1659 (" }\nfn c() { ".to_string(), None),
1660 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1661 ]
1662 );
1663 });
1664
1665 // Ensure overlapping diagnostics are highlighted correctly.
1666 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1667 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1668 version: Some(open_notification.text_document.version),
1669 diagnostics: vec![
1670 lsp::Diagnostic {
1671 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1672 severity: Some(DiagnosticSeverity::ERROR),
1673 message: "undefined variable 'A'".to_string(),
1674 source: Some("disk".to_string()),
1675 ..Default::default()
1676 },
1677 lsp::Diagnostic {
1678 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1679 severity: Some(DiagnosticSeverity::WARNING),
1680 message: "unreachable statement".to_string(),
1681 source: Some("disk".to_string()),
1682 ..Default::default()
1683 },
1684 ],
1685 });
1686
1687 cx.executor().run_until_parked();
1688 buffer.update(cx, |buffer, _| {
1689 assert_eq!(
1690 buffer
1691 .snapshot()
1692 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1693 .collect::<Vec<_>>(),
1694 &[
1695 DiagnosticEntry {
1696 range: Point::new(2, 9)..Point::new(2, 12),
1697 diagnostic: Diagnostic {
1698 source: Some("disk".into()),
1699 severity: DiagnosticSeverity::WARNING,
1700 message: "unreachable statement".to_string(),
1701 is_disk_based: true,
1702 group_id: 4,
1703 is_primary: true,
1704 ..Default::default()
1705 }
1706 },
1707 DiagnosticEntry {
1708 range: Point::new(2, 9)..Point::new(2, 10),
1709 diagnostic: Diagnostic {
1710 source: Some("disk".into()),
1711 severity: DiagnosticSeverity::ERROR,
1712 message: "undefined variable 'A'".to_string(),
1713 is_disk_based: true,
1714 group_id: 3,
1715 is_primary: true,
1716 ..Default::default()
1717 },
1718 }
1719 ]
1720 );
1721 assert_eq!(
1722 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1723 [
1724 ("fn a() { ".to_string(), None),
1725 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1726 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1727 ("\n".to_string(), None),
1728 ]
1729 );
1730 assert_eq!(
1731 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1732 [
1733 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1734 ("\n".to_string(), None),
1735 ]
1736 );
1737 });
1738
1739 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1740 // changes since the last save.
1741 buffer.update(cx, |buffer, cx| {
1742 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1743 buffer.edit(
1744 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1745 None,
1746 cx,
1747 );
1748 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1749 });
1750 let change_notification_2 = fake_server
1751 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1752 .await;
1753 assert!(
1754 change_notification_2.text_document.version > change_notification_1.text_document.version
1755 );
1756
1757 // Handle out-of-order diagnostics
1758 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1759 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1760 version: Some(change_notification_2.text_document.version),
1761 diagnostics: vec![
1762 lsp::Diagnostic {
1763 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1764 severity: Some(DiagnosticSeverity::ERROR),
1765 message: "undefined variable 'BB'".to_string(),
1766 source: Some("disk".to_string()),
1767 ..Default::default()
1768 },
1769 lsp::Diagnostic {
1770 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1771 severity: Some(DiagnosticSeverity::WARNING),
1772 message: "undefined variable 'A'".to_string(),
1773 source: Some("disk".to_string()),
1774 ..Default::default()
1775 },
1776 ],
1777 });
1778
1779 cx.executor().run_until_parked();
1780 buffer.update(cx, |buffer, _| {
1781 assert_eq!(
1782 buffer
1783 .snapshot()
1784 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1785 .collect::<Vec<_>>(),
1786 &[
1787 DiagnosticEntry {
1788 range: Point::new(2, 21)..Point::new(2, 22),
1789 diagnostic: Diagnostic {
1790 source: Some("disk".into()),
1791 severity: DiagnosticSeverity::WARNING,
1792 message: "undefined variable 'A'".to_string(),
1793 is_disk_based: true,
1794 group_id: 6,
1795 is_primary: true,
1796 ..Default::default()
1797 }
1798 },
1799 DiagnosticEntry {
1800 range: Point::new(3, 9)..Point::new(3, 14),
1801 diagnostic: Diagnostic {
1802 source: Some("disk".into()),
1803 severity: DiagnosticSeverity::ERROR,
1804 message: "undefined variable 'BB'".to_string(),
1805 is_disk_based: true,
1806 group_id: 5,
1807 is_primary: true,
1808 ..Default::default()
1809 },
1810 }
1811 ]
1812 );
1813 });
1814}
1815
1816#[gpui::test]
1817async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1818 init_test(cx);
1819
1820 let text = concat!(
1821 "let one = ;\n", //
1822 "let two = \n",
1823 "let three = 3;\n",
1824 );
1825
1826 let fs = FakeFs::new(cx.executor());
1827 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1828
1829 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1830 let buffer = project
1831 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1832 .await
1833 .unwrap();
1834
1835 project.update(cx, |project, cx| {
1836 project
1837 .update_buffer_diagnostics(
1838 &buffer,
1839 LanguageServerId(0),
1840 None,
1841 vec![
1842 DiagnosticEntry {
1843 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1844 diagnostic: Diagnostic {
1845 severity: DiagnosticSeverity::ERROR,
1846 message: "syntax error 1".to_string(),
1847 ..Default::default()
1848 },
1849 },
1850 DiagnosticEntry {
1851 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1852 diagnostic: Diagnostic {
1853 severity: DiagnosticSeverity::ERROR,
1854 message: "syntax error 2".to_string(),
1855 ..Default::default()
1856 },
1857 },
1858 ],
1859 cx,
1860 )
1861 .unwrap();
1862 });
1863
1864 // An empty range is extended forward to include the following character.
1865 // At the end of a line, an empty range is extended backward to include
1866 // the preceding character.
1867 buffer.update(cx, |buffer, _| {
1868 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1869 assert_eq!(
1870 chunks
1871 .iter()
1872 .map(|(s, d)| (s.as_str(), *d))
1873 .collect::<Vec<_>>(),
1874 &[
1875 ("let one = ", None),
1876 (";", Some(DiagnosticSeverity::ERROR)),
1877 ("\nlet two =", None),
1878 (" ", Some(DiagnosticSeverity::ERROR)),
1879 ("\nlet three = 3;\n", None)
1880 ]
1881 );
1882 });
1883}
1884
1885#[gpui::test]
1886async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1887 init_test(cx);
1888
1889 let fs = FakeFs::new(cx.executor());
1890 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1891 .await;
1892
1893 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1894
1895 project.update(cx, |project, cx| {
1896 project
1897 .update_diagnostic_entries(
1898 LanguageServerId(0),
1899 Path::new("/dir/a.rs").to_owned(),
1900 None,
1901 vec![DiagnosticEntry {
1902 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1903 diagnostic: Diagnostic {
1904 severity: DiagnosticSeverity::ERROR,
1905 is_primary: true,
1906 message: "syntax error a1".to_string(),
1907 ..Default::default()
1908 },
1909 }],
1910 cx,
1911 )
1912 .unwrap();
1913 project
1914 .update_diagnostic_entries(
1915 LanguageServerId(1),
1916 Path::new("/dir/a.rs").to_owned(),
1917 None,
1918 vec![DiagnosticEntry {
1919 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1920 diagnostic: Diagnostic {
1921 severity: DiagnosticSeverity::ERROR,
1922 is_primary: true,
1923 message: "syntax error b1".to_string(),
1924 ..Default::default()
1925 },
1926 }],
1927 cx,
1928 )
1929 .unwrap();
1930
1931 assert_eq!(
1932 project.diagnostic_summary(false, cx),
1933 DiagnosticSummary {
1934 error_count: 2,
1935 warning_count: 0,
1936 }
1937 );
1938 });
1939}
1940
1941#[gpui::test]
1942async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1943 init_test(cx);
1944
1945 let text = "
1946 fn a() {
1947 f1();
1948 }
1949 fn b() {
1950 f2();
1951 }
1952 fn c() {
1953 f3();
1954 }
1955 "
1956 .unindent();
1957
1958 let fs = FakeFs::new(cx.executor());
1959 fs.insert_tree(
1960 "/dir",
1961 json!({
1962 "a.rs": text.clone(),
1963 }),
1964 )
1965 .await;
1966
1967 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1968
1969 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1970 language_registry.add(rust_lang());
1971 let mut fake_servers =
1972 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1973
1974 let buffer = project
1975 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1976 .await
1977 .unwrap();
1978
1979 let mut fake_server = fake_servers.next().await.unwrap();
1980 let lsp_document_version = fake_server
1981 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1982 .await
1983 .text_document
1984 .version;
1985
1986 // Simulate editing the buffer after the language server computes some edits.
1987 buffer.update(cx, |buffer, cx| {
1988 buffer.edit(
1989 [(
1990 Point::new(0, 0)..Point::new(0, 0),
1991 "// above first function\n",
1992 )],
1993 None,
1994 cx,
1995 );
1996 buffer.edit(
1997 [(
1998 Point::new(2, 0)..Point::new(2, 0),
1999 " // inside first function\n",
2000 )],
2001 None,
2002 cx,
2003 );
2004 buffer.edit(
2005 [(
2006 Point::new(6, 4)..Point::new(6, 4),
2007 "// inside second function ",
2008 )],
2009 None,
2010 cx,
2011 );
2012
2013 assert_eq!(
2014 buffer.text(),
2015 "
2016 // above first function
2017 fn a() {
2018 // inside first function
2019 f1();
2020 }
2021 fn b() {
2022 // inside second function f2();
2023 }
2024 fn c() {
2025 f3();
2026 }
2027 "
2028 .unindent()
2029 );
2030 });
2031
2032 let edits = project
2033 .update(cx, |project, cx| {
2034 project.edits_from_lsp(
2035 &buffer,
2036 vec![
2037 // replace body of first function
2038 lsp::TextEdit {
2039 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2040 new_text: "
2041 fn a() {
2042 f10();
2043 }
2044 "
2045 .unindent(),
2046 },
2047 // edit inside second function
2048 lsp::TextEdit {
2049 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2050 new_text: "00".into(),
2051 },
2052 // edit inside third function via two distinct edits
2053 lsp::TextEdit {
2054 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2055 new_text: "4000".into(),
2056 },
2057 lsp::TextEdit {
2058 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2059 new_text: "".into(),
2060 },
2061 ],
2062 LanguageServerId(0),
2063 Some(lsp_document_version),
2064 cx,
2065 )
2066 })
2067 .await
2068 .unwrap();
2069
2070 buffer.update(cx, |buffer, cx| {
2071 for (range, new_text) in edits {
2072 buffer.edit([(range, new_text)], None, cx);
2073 }
2074 assert_eq!(
2075 buffer.text(),
2076 "
2077 // above first function
2078 fn a() {
2079 // inside first function
2080 f10();
2081 }
2082 fn b() {
2083 // inside second function f200();
2084 }
2085 fn c() {
2086 f4000();
2087 }
2088 "
2089 .unindent()
2090 );
2091 });
2092}
2093
2094#[gpui::test]
2095async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2096 init_test(cx);
2097
2098 let text = "
2099 use a::b;
2100 use a::c;
2101
2102 fn f() {
2103 b();
2104 c();
2105 }
2106 "
2107 .unindent();
2108
2109 let fs = FakeFs::new(cx.executor());
2110 fs.insert_tree(
2111 "/dir",
2112 json!({
2113 "a.rs": text.clone(),
2114 }),
2115 )
2116 .await;
2117
2118 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2119 let buffer = project
2120 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2121 .await
2122 .unwrap();
2123
2124 // Simulate the language server sending us a small edit in the form of a very large diff.
2125 // Rust-analyzer does this when performing a merge-imports code action.
2126 let edits = project
2127 .update(cx, |project, cx| {
2128 project.edits_from_lsp(
2129 &buffer,
2130 [
2131 // Replace the first use statement without editing the semicolon.
2132 lsp::TextEdit {
2133 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2134 new_text: "a::{b, c}".into(),
2135 },
2136 // Reinsert the remainder of the file between the semicolon and the final
2137 // newline of the file.
2138 lsp::TextEdit {
2139 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2140 new_text: "\n\n".into(),
2141 },
2142 lsp::TextEdit {
2143 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2144 new_text: "
2145 fn f() {
2146 b();
2147 c();
2148 }"
2149 .unindent(),
2150 },
2151 // Delete everything after the first newline of the file.
2152 lsp::TextEdit {
2153 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2154 new_text: "".into(),
2155 },
2156 ],
2157 LanguageServerId(0),
2158 None,
2159 cx,
2160 )
2161 })
2162 .await
2163 .unwrap();
2164
2165 buffer.update(cx, |buffer, cx| {
2166 let edits = edits
2167 .into_iter()
2168 .map(|(range, text)| {
2169 (
2170 range.start.to_point(buffer)..range.end.to_point(buffer),
2171 text,
2172 )
2173 })
2174 .collect::<Vec<_>>();
2175
2176 assert_eq!(
2177 edits,
2178 [
2179 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2180 (Point::new(1, 0)..Point::new(2, 0), "".into())
2181 ]
2182 );
2183
2184 for (range, new_text) in edits {
2185 buffer.edit([(range, new_text)], None, cx);
2186 }
2187 assert_eq!(
2188 buffer.text(),
2189 "
2190 use a::{b, c};
2191
2192 fn f() {
2193 b();
2194 c();
2195 }
2196 "
2197 .unindent()
2198 );
2199 });
2200}
2201
2202#[gpui::test]
2203async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2204 init_test(cx);
2205
2206 let text = "
2207 use a::b;
2208 use a::c;
2209
2210 fn f() {
2211 b();
2212 c();
2213 }
2214 "
2215 .unindent();
2216
2217 let fs = FakeFs::new(cx.executor());
2218 fs.insert_tree(
2219 "/dir",
2220 json!({
2221 "a.rs": text.clone(),
2222 }),
2223 )
2224 .await;
2225
2226 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2227 let buffer = project
2228 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2229 .await
2230 .unwrap();
2231
2232 // Simulate the language server sending us edits in a non-ordered fashion,
2233 // with ranges sometimes being inverted or pointing to invalid locations.
2234 let edits = project
2235 .update(cx, |project, cx| {
2236 project.edits_from_lsp(
2237 &buffer,
2238 [
2239 lsp::TextEdit {
2240 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2241 new_text: "\n\n".into(),
2242 },
2243 lsp::TextEdit {
2244 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2245 new_text: "a::{b, c}".into(),
2246 },
2247 lsp::TextEdit {
2248 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2249 new_text: "".into(),
2250 },
2251 lsp::TextEdit {
2252 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2253 new_text: "
2254 fn f() {
2255 b();
2256 c();
2257 }"
2258 .unindent(),
2259 },
2260 ],
2261 LanguageServerId(0),
2262 None,
2263 cx,
2264 )
2265 })
2266 .await
2267 .unwrap();
2268
2269 buffer.update(cx, |buffer, cx| {
2270 let edits = edits
2271 .into_iter()
2272 .map(|(range, text)| {
2273 (
2274 range.start.to_point(buffer)..range.end.to_point(buffer),
2275 text,
2276 )
2277 })
2278 .collect::<Vec<_>>();
2279
2280 assert_eq!(
2281 edits,
2282 [
2283 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2284 (Point::new(1, 0)..Point::new(2, 0), "".into())
2285 ]
2286 );
2287
2288 for (range, new_text) in edits {
2289 buffer.edit([(range, new_text)], None, cx);
2290 }
2291 assert_eq!(
2292 buffer.text(),
2293 "
2294 use a::{b, c};
2295
2296 fn f() {
2297 b();
2298 c();
2299 }
2300 "
2301 .unindent()
2302 );
2303 });
2304}
2305
2306fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2307 buffer: &Buffer,
2308 range: Range<T>,
2309) -> Vec<(String, Option<DiagnosticSeverity>)> {
2310 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2311 for chunk in buffer.snapshot().chunks(range, true) {
2312 if chunks.last().map_or(false, |prev_chunk| {
2313 prev_chunk.1 == chunk.diagnostic_severity
2314 }) {
2315 chunks.last_mut().unwrap().0.push_str(chunk.text);
2316 } else {
2317 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2318 }
2319 }
2320 chunks
2321}
2322
2323#[gpui::test(iterations = 10)]
2324async fn test_definition(cx: &mut gpui::TestAppContext) {
2325 init_test(cx);
2326
2327 let fs = FakeFs::new(cx.executor());
2328 fs.insert_tree(
2329 "/dir",
2330 json!({
2331 "a.rs": "const fn a() { A }",
2332 "b.rs": "const y: i32 = crate::a()",
2333 }),
2334 )
2335 .await;
2336
2337 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2338
2339 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2340 language_registry.add(rust_lang());
2341 let mut fake_servers =
2342 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2343
2344 let buffer = project
2345 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2346 .await
2347 .unwrap();
2348
2349 let fake_server = fake_servers.next().await.unwrap();
2350 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2351 let params = params.text_document_position_params;
2352 assert_eq!(
2353 params.text_document.uri.to_file_path().unwrap(),
2354 Path::new("/dir/b.rs"),
2355 );
2356 assert_eq!(params.position, lsp::Position::new(0, 22));
2357
2358 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2359 lsp::Location::new(
2360 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2361 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2362 ),
2363 )))
2364 });
2365
2366 let mut definitions = project
2367 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2368 .await
2369 .unwrap();
2370
2371 // Assert no new language server started
2372 cx.executor().run_until_parked();
2373 assert!(fake_servers.try_next().is_err());
2374
2375 assert_eq!(definitions.len(), 1);
2376 let definition = definitions.pop().unwrap();
2377 cx.update(|cx| {
2378 let target_buffer = definition.target.buffer.read(cx);
2379 assert_eq!(
2380 target_buffer
2381 .file()
2382 .unwrap()
2383 .as_local()
2384 .unwrap()
2385 .abs_path(cx),
2386 Path::new("/dir/a.rs"),
2387 );
2388 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2389 assert_eq!(
2390 list_worktrees(&project, cx),
2391 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2392 );
2393
2394 drop(definition);
2395 });
2396 cx.update(|cx| {
2397 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2398 });
2399
2400 fn list_worktrees<'a>(
2401 project: &'a Model<Project>,
2402 cx: &'a AppContext,
2403 ) -> Vec<(&'a Path, bool)> {
2404 project
2405 .read(cx)
2406 .worktrees()
2407 .map(|worktree| {
2408 let worktree = worktree.read(cx);
2409 (
2410 worktree.as_local().unwrap().abs_path().as_ref(),
2411 worktree.is_visible(),
2412 )
2413 })
2414 .collect::<Vec<_>>()
2415 }
2416}
2417
2418#[gpui::test]
2419async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2420 init_test(cx);
2421
2422 let fs = FakeFs::new(cx.executor());
2423 fs.insert_tree(
2424 "/dir",
2425 json!({
2426 "a.ts": "",
2427 }),
2428 )
2429 .await;
2430
2431 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2432
2433 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2434 language_registry.add(typescript_lang());
2435 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2436 "TypeScript",
2437 FakeLspAdapter {
2438 capabilities: lsp::ServerCapabilities {
2439 completion_provider: Some(lsp::CompletionOptions {
2440 trigger_characters: Some(vec![":".to_string()]),
2441 ..Default::default()
2442 }),
2443 ..Default::default()
2444 },
2445 ..Default::default()
2446 },
2447 );
2448
2449 let buffer = project
2450 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2451 .await
2452 .unwrap();
2453
2454 let fake_server = fake_language_servers.next().await.unwrap();
2455
2456 let text = "let a = b.fqn";
2457 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2458 let completions = project.update(cx, |project, cx| {
2459 project.completions(&buffer, text.len(), cx)
2460 });
2461
2462 fake_server
2463 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2464 Ok(Some(lsp::CompletionResponse::Array(vec![
2465 lsp::CompletionItem {
2466 label: "fullyQualifiedName?".into(),
2467 insert_text: Some("fullyQualifiedName".into()),
2468 ..Default::default()
2469 },
2470 ])))
2471 })
2472 .next()
2473 .await;
2474 let completions = completions.await.unwrap();
2475 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2476 assert_eq!(completions.len(), 1);
2477 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2478 assert_eq!(
2479 completions[0].old_range.to_offset(&snapshot),
2480 text.len() - 3..text.len()
2481 );
2482
2483 let text = "let a = \"atoms/cmp\"";
2484 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2485 let completions = project.update(cx, |project, cx| {
2486 project.completions(&buffer, text.len() - 1, cx)
2487 });
2488
2489 fake_server
2490 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2491 Ok(Some(lsp::CompletionResponse::Array(vec![
2492 lsp::CompletionItem {
2493 label: "component".into(),
2494 ..Default::default()
2495 },
2496 ])))
2497 })
2498 .next()
2499 .await;
2500 let completions = completions.await.unwrap();
2501 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2502 assert_eq!(completions.len(), 1);
2503 assert_eq!(completions[0].new_text, "component");
2504 assert_eq!(
2505 completions[0].old_range.to_offset(&snapshot),
2506 text.len() - 4..text.len() - 1
2507 );
2508}
2509
2510#[gpui::test]
2511async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2512 init_test(cx);
2513
2514 let fs = FakeFs::new(cx.executor());
2515 fs.insert_tree(
2516 "/dir",
2517 json!({
2518 "a.ts": "",
2519 }),
2520 )
2521 .await;
2522
2523 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2524
2525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2526 language_registry.add(typescript_lang());
2527 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2528 "TypeScript",
2529 FakeLspAdapter {
2530 capabilities: lsp::ServerCapabilities {
2531 completion_provider: Some(lsp::CompletionOptions {
2532 trigger_characters: Some(vec![":".to_string()]),
2533 ..Default::default()
2534 }),
2535 ..Default::default()
2536 },
2537 ..Default::default()
2538 },
2539 );
2540
2541 let buffer = project
2542 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2543 .await
2544 .unwrap();
2545
2546 let fake_server = fake_language_servers.next().await.unwrap();
2547
2548 let text = "let a = b.fqn";
2549 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2550 let completions = project.update(cx, |project, cx| {
2551 project.completions(&buffer, text.len(), cx)
2552 });
2553
2554 fake_server
2555 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2556 Ok(Some(lsp::CompletionResponse::Array(vec![
2557 lsp::CompletionItem {
2558 label: "fullyQualifiedName?".into(),
2559 insert_text: Some("fully\rQualified\r\nName".into()),
2560 ..Default::default()
2561 },
2562 ])))
2563 })
2564 .next()
2565 .await;
2566 let completions = completions.await.unwrap();
2567 assert_eq!(completions.len(), 1);
2568 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2569}
2570
2571#[gpui::test(iterations = 10)]
2572async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2573 init_test(cx);
2574
2575 let fs = FakeFs::new(cx.executor());
2576 fs.insert_tree(
2577 "/dir",
2578 json!({
2579 "a.ts": "a",
2580 }),
2581 )
2582 .await;
2583
2584 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2585
2586 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2587 language_registry.add(typescript_lang());
2588 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2589 "TypeScript",
2590 FakeLspAdapter {
2591 capabilities: lsp::ServerCapabilities {
2592 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2593 lsp::CodeActionOptions {
2594 resolve_provider: Some(true),
2595 ..lsp::CodeActionOptions::default()
2596 },
2597 )),
2598 ..lsp::ServerCapabilities::default()
2599 },
2600 ..FakeLspAdapter::default()
2601 },
2602 );
2603
2604 let buffer = project
2605 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2606 .await
2607 .unwrap();
2608
2609 let fake_server = fake_language_servers.next().await.unwrap();
2610
2611 // Language server returns code actions that contain commands, and not edits.
2612 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2613 fake_server
2614 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2615 Ok(Some(vec![
2616 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2617 title: "The code action".into(),
2618 data: Some(serde_json::json!({
2619 "command": "_the/command",
2620 })),
2621 ..lsp::CodeAction::default()
2622 }),
2623 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2624 title: "two".into(),
2625 ..lsp::CodeAction::default()
2626 }),
2627 ]))
2628 })
2629 .next()
2630 .await;
2631
2632 let action = actions.await[0].clone();
2633 let apply = project.update(cx, |project, cx| {
2634 project.apply_code_action(buffer.clone(), action, true, cx)
2635 });
2636
2637 // Resolving the code action does not populate its edits. In absence of
2638 // edits, we must execute the given command.
2639 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2640 |mut action, _| async move {
2641 if action.data.is_some() {
2642 action.command = Some(lsp::Command {
2643 title: "The command".into(),
2644 command: "_the/command".into(),
2645 arguments: Some(vec![json!("the-argument")]),
2646 });
2647 }
2648 Ok(action)
2649 },
2650 );
2651
2652 // While executing the command, the language server sends the editor
2653 // a `workspaceEdit` request.
2654 fake_server
2655 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2656 let fake = fake_server.clone();
2657 move |params, _| {
2658 assert_eq!(params.command, "_the/command");
2659 let fake = fake.clone();
2660 async move {
2661 fake.server
2662 .request::<lsp::request::ApplyWorkspaceEdit>(
2663 lsp::ApplyWorkspaceEditParams {
2664 label: None,
2665 edit: lsp::WorkspaceEdit {
2666 changes: Some(
2667 [(
2668 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2669 vec![lsp::TextEdit {
2670 range: lsp::Range::new(
2671 lsp::Position::new(0, 0),
2672 lsp::Position::new(0, 0),
2673 ),
2674 new_text: "X".into(),
2675 }],
2676 )]
2677 .into_iter()
2678 .collect(),
2679 ),
2680 ..Default::default()
2681 },
2682 },
2683 )
2684 .await
2685 .unwrap();
2686 Ok(Some(json!(null)))
2687 }
2688 }
2689 })
2690 .next()
2691 .await;
2692
2693 // Applying the code action returns a project transaction containing the edits
2694 // sent by the language server in its `workspaceEdit` request.
2695 let transaction = apply.await.unwrap();
2696 assert!(transaction.0.contains_key(&buffer));
2697 buffer.update(cx, |buffer, cx| {
2698 assert_eq!(buffer.text(), "Xa");
2699 buffer.undo(cx);
2700 assert_eq!(buffer.text(), "a");
2701 });
2702}
2703
2704#[gpui::test(iterations = 10)]
2705async fn test_save_file(cx: &mut gpui::TestAppContext) {
2706 init_test(cx);
2707
2708 let fs = FakeFs::new(cx.executor());
2709 fs.insert_tree(
2710 "/dir",
2711 json!({
2712 "file1": "the old contents",
2713 }),
2714 )
2715 .await;
2716
2717 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2718 let buffer = project
2719 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2720 .await
2721 .unwrap();
2722 buffer.update(cx, |buffer, cx| {
2723 assert_eq!(buffer.text(), "the old contents");
2724 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2725 });
2726
2727 project
2728 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2729 .await
2730 .unwrap();
2731
2732 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2733 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2734}
2735
2736#[gpui::test(iterations = 30)]
2737async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2738 init_test(cx);
2739
2740 let fs = FakeFs::new(cx.executor().clone());
2741 fs.insert_tree(
2742 "/dir",
2743 json!({
2744 "file1": "the original contents",
2745 }),
2746 )
2747 .await;
2748
2749 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2750 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2751 let buffer = project
2752 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2753 .await
2754 .unwrap();
2755
2756 // Simulate buffer diffs being slow, so that they don't complete before
2757 // the next file change occurs.
2758 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2759
2760 // Change the buffer's file on disk, and then wait for the file change
2761 // to be detected by the worktree, so that the buffer starts reloading.
2762 fs.save(
2763 "/dir/file1".as_ref(),
2764 &"the first contents".into(),
2765 Default::default(),
2766 )
2767 .await
2768 .unwrap();
2769 worktree.next_event(cx).await;
2770
2771 // Change the buffer's file again. Depending on the random seed, the
2772 // previous file change may still be in progress.
2773 fs.save(
2774 "/dir/file1".as_ref(),
2775 &"the second contents".into(),
2776 Default::default(),
2777 )
2778 .await
2779 .unwrap();
2780 worktree.next_event(cx).await;
2781
2782 cx.executor().run_until_parked();
2783 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2784 buffer.read_with(cx, |buffer, _| {
2785 assert_eq!(buffer.text(), on_disk_text);
2786 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2787 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2788 });
2789}
2790
2791#[gpui::test(iterations = 30)]
2792async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2793 init_test(cx);
2794
2795 let fs = FakeFs::new(cx.executor().clone());
2796 fs.insert_tree(
2797 "/dir",
2798 json!({
2799 "file1": "the original contents",
2800 }),
2801 )
2802 .await;
2803
2804 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2805 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2806 let buffer = project
2807 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2808 .await
2809 .unwrap();
2810
2811 // Simulate buffer diffs being slow, so that they don't complete before
2812 // the next file change occurs.
2813 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2814
2815 // Change the buffer's file on disk, and then wait for the file change
2816 // to be detected by the worktree, so that the buffer starts reloading.
2817 fs.save(
2818 "/dir/file1".as_ref(),
2819 &"the first contents".into(),
2820 Default::default(),
2821 )
2822 .await
2823 .unwrap();
2824 worktree.next_event(cx).await;
2825
2826 cx.executor()
2827 .spawn(cx.executor().simulate_random_delay())
2828 .await;
2829
2830 // Perform a noop edit, causing the buffer's version to increase.
2831 buffer.update(cx, |buffer, cx| {
2832 buffer.edit([(0..0, " ")], None, cx);
2833 buffer.undo(cx);
2834 });
2835
2836 cx.executor().run_until_parked();
2837 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2838 buffer.read_with(cx, |buffer, _| {
2839 let buffer_text = buffer.text();
2840 if buffer_text == on_disk_text {
2841 assert!(
2842 !buffer.is_dirty() && !buffer.has_conflict(),
2843 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2844 );
2845 }
2846 // If the file change occurred while the buffer was processing the first
2847 // change, the buffer will be in a conflicting state.
2848 else {
2849 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2850 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2851 }
2852 });
2853}
2854
2855#[gpui::test]
2856async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2857 init_test(cx);
2858
2859 let fs = FakeFs::new(cx.executor());
2860 fs.insert_tree(
2861 "/dir",
2862 json!({
2863 "file1": "the old contents",
2864 }),
2865 )
2866 .await;
2867
2868 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2869 let buffer = project
2870 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2871 .await
2872 .unwrap();
2873 buffer.update(cx, |buffer, cx| {
2874 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2875 });
2876
2877 project
2878 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2879 .await
2880 .unwrap();
2881
2882 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2883 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2884}
2885
2886#[gpui::test]
2887async fn test_save_as(cx: &mut gpui::TestAppContext) {
2888 init_test(cx);
2889
2890 let fs = FakeFs::new(cx.executor());
2891 fs.insert_tree("/dir", json!({})).await;
2892
2893 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2894
2895 let languages = project.update(cx, |project, _| project.languages().clone());
2896 languages.add(rust_lang());
2897
2898 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2899 buffer.update(cx, |buffer, cx| {
2900 buffer.edit([(0..0, "abc")], None, cx);
2901 assert!(buffer.is_dirty());
2902 assert!(!buffer.has_conflict());
2903 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2904 });
2905 project
2906 .update(cx, |project, cx| {
2907 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2908 let path = ProjectPath {
2909 worktree_id,
2910 path: Arc::from(Path::new("file1.rs")),
2911 };
2912 project.save_buffer_as(buffer.clone(), path, cx)
2913 })
2914 .await
2915 .unwrap();
2916 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2917
2918 cx.executor().run_until_parked();
2919 buffer.update(cx, |buffer, cx| {
2920 assert_eq!(
2921 buffer.file().unwrap().full_path(cx),
2922 Path::new("dir/file1.rs")
2923 );
2924 assert!(!buffer.is_dirty());
2925 assert!(!buffer.has_conflict());
2926 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2927 });
2928
2929 let opened_buffer = project
2930 .update(cx, |project, cx| {
2931 project.open_local_buffer("/dir/file1.rs", cx)
2932 })
2933 .await
2934 .unwrap();
2935 assert_eq!(opened_buffer, buffer);
2936}
2937
2938#[gpui::test(retries = 5)]
2939async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2940 init_test(cx);
2941 cx.executor().allow_parking();
2942
2943 let dir = temp_tree(json!({
2944 "a": {
2945 "file1": "",
2946 "file2": "",
2947 "file3": "",
2948 },
2949 "b": {
2950 "c": {
2951 "file4": "",
2952 "file5": "",
2953 }
2954 }
2955 }));
2956
2957 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2958
2959 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2960 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2961 async move { buffer.await.unwrap() }
2962 };
2963 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2964 project.update(cx, |project, cx| {
2965 let tree = project.worktrees().next().unwrap();
2966 tree.read(cx)
2967 .entry_for_path(path)
2968 .unwrap_or_else(|| panic!("no entry for path {}", path))
2969 .id
2970 })
2971 };
2972
2973 let buffer2 = buffer_for_path("a/file2", cx).await;
2974 let buffer3 = buffer_for_path("a/file3", cx).await;
2975 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2976 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2977
2978 let file2_id = id_for_path("a/file2", cx);
2979 let file3_id = id_for_path("a/file3", cx);
2980 let file4_id = id_for_path("b/c/file4", cx);
2981
2982 // Create a remote copy of this worktree.
2983 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2984 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
2985
2986 let updates = Arc::new(Mutex::new(Vec::new()));
2987 tree.update(cx, |tree, cx| {
2988 let updates = updates.clone();
2989 tree.observe_updates(0, cx, move |update| {
2990 updates.lock().push(update);
2991 async { true }
2992 });
2993 });
2994
2995 let remote = cx.update(|cx| {
2996 Worktree::remote(
2997 0,
2998 1,
2999 metadata,
3000 Box::new(CollabRemoteWorktreeClient(project.read(cx).client())),
3001 cx,
3002 )
3003 });
3004
3005 cx.executor().run_until_parked();
3006
3007 cx.update(|cx| {
3008 assert!(!buffer2.read(cx).is_dirty());
3009 assert!(!buffer3.read(cx).is_dirty());
3010 assert!(!buffer4.read(cx).is_dirty());
3011 assert!(!buffer5.read(cx).is_dirty());
3012 });
3013
3014 // Rename and delete files and directories.
3015 tree.flush_fs_events(cx).await;
3016 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3017 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3018 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3019 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3020 tree.flush_fs_events(cx).await;
3021
3022 let expected_paths = vec![
3023 "a",
3024 "a/file1",
3025 "a/file2.new",
3026 "b",
3027 "d",
3028 "d/file3",
3029 "d/file4",
3030 ];
3031
3032 cx.update(|app| {
3033 assert_eq!(
3034 tree.read(app)
3035 .paths()
3036 .map(|p| p.to_str().unwrap())
3037 .collect::<Vec<_>>(),
3038 expected_paths
3039 );
3040 });
3041
3042 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3043 assert_eq!(id_for_path("d/file3", cx), file3_id);
3044 assert_eq!(id_for_path("d/file4", cx), file4_id);
3045
3046 cx.update(|cx| {
3047 assert_eq!(
3048 buffer2.read(cx).file().unwrap().path().as_ref(),
3049 Path::new("a/file2.new")
3050 );
3051 assert_eq!(
3052 buffer3.read(cx).file().unwrap().path().as_ref(),
3053 Path::new("d/file3")
3054 );
3055 assert_eq!(
3056 buffer4.read(cx).file().unwrap().path().as_ref(),
3057 Path::new("d/file4")
3058 );
3059 assert_eq!(
3060 buffer5.read(cx).file().unwrap().path().as_ref(),
3061 Path::new("b/c/file5")
3062 );
3063
3064 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3065 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3066 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3067 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3068 });
3069
3070 // Update the remote worktree. Check that it becomes consistent with the
3071 // local worktree.
3072 cx.executor().run_until_parked();
3073
3074 remote.update(cx, |remote, _| {
3075 for update in updates.lock().drain(..) {
3076 remote.as_remote_mut().unwrap().update_from_remote(update);
3077 }
3078 });
3079 cx.executor().run_until_parked();
3080 remote.update(cx, |remote, _| {
3081 assert_eq!(
3082 remote
3083 .paths()
3084 .map(|p| p.to_str().unwrap())
3085 .collect::<Vec<_>>(),
3086 expected_paths
3087 );
3088 });
3089}
3090
3091#[gpui::test(iterations = 10)]
3092async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3093 init_test(cx);
3094
3095 let fs = FakeFs::new(cx.executor());
3096 fs.insert_tree(
3097 "/dir",
3098 json!({
3099 "a": {
3100 "file1": "",
3101 }
3102 }),
3103 )
3104 .await;
3105
3106 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3107 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3108 let tree_id = tree.update(cx, |tree, _| tree.id());
3109
3110 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3111 project.update(cx, |project, cx| {
3112 let tree = project.worktrees().next().unwrap();
3113 tree.read(cx)
3114 .entry_for_path(path)
3115 .unwrap_or_else(|| panic!("no entry for path {}", path))
3116 .id
3117 })
3118 };
3119
3120 let dir_id = id_for_path("a", cx);
3121 let file_id = id_for_path("a/file1", cx);
3122 let buffer = project
3123 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3124 .await
3125 .unwrap();
3126 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3127
3128 project
3129 .update(cx, |project, cx| {
3130 project.rename_entry(dir_id, Path::new("b"), cx)
3131 })
3132 .unwrap()
3133 .await
3134 .to_included()
3135 .unwrap();
3136 cx.executor().run_until_parked();
3137
3138 assert_eq!(id_for_path("b", cx), dir_id);
3139 assert_eq!(id_for_path("b/file1", cx), file_id);
3140 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3141}
3142
3143#[gpui::test]
3144async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(
3149 "/dir",
3150 json!({
3151 "a.txt": "a-contents",
3152 "b.txt": "b-contents",
3153 }),
3154 )
3155 .await;
3156
3157 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3158
3159 // Spawn multiple tasks to open paths, repeating some paths.
3160 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3161 (
3162 p.open_local_buffer("/dir/a.txt", cx),
3163 p.open_local_buffer("/dir/b.txt", cx),
3164 p.open_local_buffer("/dir/a.txt", cx),
3165 )
3166 });
3167
3168 let buffer_a_1 = buffer_a_1.await.unwrap();
3169 let buffer_a_2 = buffer_a_2.await.unwrap();
3170 let buffer_b = buffer_b.await.unwrap();
3171 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3172 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3173
3174 // There is only one buffer per path.
3175 let buffer_a_id = buffer_a_1.entity_id();
3176 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3177
3178 // Open the same path again while it is still open.
3179 drop(buffer_a_1);
3180 let buffer_a_3 = project
3181 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3182 .await
3183 .unwrap();
3184
3185 // There's still only one buffer per path.
3186 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3187}
3188
3189#[gpui::test]
3190async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3191 init_test(cx);
3192
3193 let fs = FakeFs::new(cx.executor());
3194 fs.insert_tree(
3195 "/dir",
3196 json!({
3197 "file1": "abc",
3198 "file2": "def",
3199 "file3": "ghi",
3200 }),
3201 )
3202 .await;
3203
3204 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3205
3206 let buffer1 = project
3207 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3208 .await
3209 .unwrap();
3210 let events = Arc::new(Mutex::new(Vec::new()));
3211
3212 // initially, the buffer isn't dirty.
3213 buffer1.update(cx, |buffer, cx| {
3214 cx.subscribe(&buffer1, {
3215 let events = events.clone();
3216 move |_, _, event, _| match event {
3217 BufferEvent::Operation(_) => {}
3218 _ => events.lock().push(event.clone()),
3219 }
3220 })
3221 .detach();
3222
3223 assert!(!buffer.is_dirty());
3224 assert!(events.lock().is_empty());
3225
3226 buffer.edit([(1..2, "")], None, cx);
3227 });
3228
3229 // after the first edit, the buffer is dirty, and emits a dirtied event.
3230 buffer1.update(cx, |buffer, cx| {
3231 assert!(buffer.text() == "ac");
3232 assert!(buffer.is_dirty());
3233 assert_eq!(
3234 *events.lock(),
3235 &[language::Event::Edited, language::Event::DirtyChanged]
3236 );
3237 events.lock().clear();
3238 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3239 });
3240
3241 // after saving, the buffer is not dirty, and emits a saved event.
3242 buffer1.update(cx, |buffer, cx| {
3243 assert!(!buffer.is_dirty());
3244 assert_eq!(*events.lock(), &[language::Event::Saved]);
3245 events.lock().clear();
3246
3247 buffer.edit([(1..1, "B")], None, cx);
3248 buffer.edit([(2..2, "D")], None, cx);
3249 });
3250
3251 // after editing again, the buffer is dirty, and emits another dirty event.
3252 buffer1.update(cx, |buffer, cx| {
3253 assert!(buffer.text() == "aBDc");
3254 assert!(buffer.is_dirty());
3255 assert_eq!(
3256 *events.lock(),
3257 &[
3258 language::Event::Edited,
3259 language::Event::DirtyChanged,
3260 language::Event::Edited,
3261 ],
3262 );
3263 events.lock().clear();
3264
3265 // After restoring the buffer to its previously-saved state,
3266 // the buffer is not considered dirty anymore.
3267 buffer.edit([(1..3, "")], None, cx);
3268 assert!(buffer.text() == "ac");
3269 assert!(!buffer.is_dirty());
3270 });
3271
3272 assert_eq!(
3273 *events.lock(),
3274 &[language::Event::Edited, language::Event::DirtyChanged]
3275 );
3276
3277 // When a file is deleted, the buffer is considered dirty.
3278 let events = Arc::new(Mutex::new(Vec::new()));
3279 let buffer2 = project
3280 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3281 .await
3282 .unwrap();
3283 buffer2.update(cx, |_, cx| {
3284 cx.subscribe(&buffer2, {
3285 let events = events.clone();
3286 move |_, _, event, _| events.lock().push(event.clone())
3287 })
3288 .detach();
3289 });
3290
3291 fs.remove_file("/dir/file2".as_ref(), Default::default())
3292 .await
3293 .unwrap();
3294 cx.executor().run_until_parked();
3295 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3296 assert_eq!(
3297 *events.lock(),
3298 &[
3299 language::Event::DirtyChanged,
3300 language::Event::FileHandleChanged
3301 ]
3302 );
3303
3304 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3305 let events = Arc::new(Mutex::new(Vec::new()));
3306 let buffer3 = project
3307 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3308 .await
3309 .unwrap();
3310 buffer3.update(cx, |_, cx| {
3311 cx.subscribe(&buffer3, {
3312 let events = events.clone();
3313 move |_, _, event, _| events.lock().push(event.clone())
3314 })
3315 .detach();
3316 });
3317
3318 buffer3.update(cx, |buffer, cx| {
3319 buffer.edit([(0..0, "x")], None, cx);
3320 });
3321 events.lock().clear();
3322 fs.remove_file("/dir/file3".as_ref(), Default::default())
3323 .await
3324 .unwrap();
3325 cx.executor().run_until_parked();
3326 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3327 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3328}
3329
3330#[gpui::test]
3331async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3332 init_test(cx);
3333
3334 let initial_contents = "aaa\nbbbbb\nc\n";
3335 let fs = FakeFs::new(cx.executor());
3336 fs.insert_tree(
3337 "/dir",
3338 json!({
3339 "the-file": initial_contents,
3340 }),
3341 )
3342 .await;
3343 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3344 let buffer = project
3345 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3346 .await
3347 .unwrap();
3348
3349 let anchors = (0..3)
3350 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3351 .collect::<Vec<_>>();
3352
3353 // Change the file on disk, adding two new lines of text, and removing
3354 // one line.
3355 buffer.update(cx, |buffer, _| {
3356 assert!(!buffer.is_dirty());
3357 assert!(!buffer.has_conflict());
3358 });
3359 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3360 fs.save(
3361 "/dir/the-file".as_ref(),
3362 &new_contents.into(),
3363 LineEnding::Unix,
3364 )
3365 .await
3366 .unwrap();
3367
3368 // Because the buffer was not modified, it is reloaded from disk. Its
3369 // contents are edited according to the diff between the old and new
3370 // file contents.
3371 cx.executor().run_until_parked();
3372 buffer.update(cx, |buffer, _| {
3373 assert_eq!(buffer.text(), new_contents);
3374 assert!(!buffer.is_dirty());
3375 assert!(!buffer.has_conflict());
3376
3377 let anchor_positions = anchors
3378 .iter()
3379 .map(|anchor| anchor.to_point(&*buffer))
3380 .collect::<Vec<_>>();
3381 assert_eq!(
3382 anchor_positions,
3383 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3384 );
3385 });
3386
3387 // Modify the buffer
3388 buffer.update(cx, |buffer, cx| {
3389 buffer.edit([(0..0, " ")], None, cx);
3390 assert!(buffer.is_dirty());
3391 assert!(!buffer.has_conflict());
3392 });
3393
3394 // Change the file on disk again, adding blank lines to the beginning.
3395 fs.save(
3396 "/dir/the-file".as_ref(),
3397 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3398 LineEnding::Unix,
3399 )
3400 .await
3401 .unwrap();
3402
3403 // Because the buffer is modified, it doesn't reload from disk, but is
3404 // marked as having a conflict.
3405 cx.executor().run_until_parked();
3406 buffer.update(cx, |buffer, _| {
3407 assert!(buffer.has_conflict());
3408 });
3409}
3410
3411#[gpui::test]
3412async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3413 init_test(cx);
3414
3415 let fs = FakeFs::new(cx.executor());
3416 fs.insert_tree(
3417 "/dir",
3418 json!({
3419 "file1": "a\nb\nc\n",
3420 "file2": "one\r\ntwo\r\nthree\r\n",
3421 }),
3422 )
3423 .await;
3424
3425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3426 let buffer1 = project
3427 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3428 .await
3429 .unwrap();
3430 let buffer2 = project
3431 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3432 .await
3433 .unwrap();
3434
3435 buffer1.update(cx, |buffer, _| {
3436 assert_eq!(buffer.text(), "a\nb\nc\n");
3437 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3438 });
3439 buffer2.update(cx, |buffer, _| {
3440 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3441 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3442 });
3443
3444 // Change a file's line endings on disk from unix to windows. The buffer's
3445 // state updates correctly.
3446 fs.save(
3447 "/dir/file1".as_ref(),
3448 &"aaa\nb\nc\n".into(),
3449 LineEnding::Windows,
3450 )
3451 .await
3452 .unwrap();
3453 cx.executor().run_until_parked();
3454 buffer1.update(cx, |buffer, _| {
3455 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3456 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3457 });
3458
3459 // Save a file with windows line endings. The file is written correctly.
3460 buffer2.update(cx, |buffer, cx| {
3461 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3462 });
3463 project
3464 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3465 .await
3466 .unwrap();
3467 assert_eq!(
3468 fs.load("/dir/file2".as_ref()).await.unwrap(),
3469 "one\r\ntwo\r\nthree\r\nfour\r\n",
3470 );
3471}
3472
3473#[gpui::test]
3474async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3475 init_test(cx);
3476
3477 let fs = FakeFs::new(cx.executor());
3478 fs.insert_tree(
3479 "/the-dir",
3480 json!({
3481 "a.rs": "
3482 fn foo(mut v: Vec<usize>) {
3483 for x in &v {
3484 v.push(1);
3485 }
3486 }
3487 "
3488 .unindent(),
3489 }),
3490 )
3491 .await;
3492
3493 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3494 let buffer = project
3495 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3496 .await
3497 .unwrap();
3498
3499 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3500 let message = lsp::PublishDiagnosticsParams {
3501 uri: buffer_uri.clone(),
3502 diagnostics: vec![
3503 lsp::Diagnostic {
3504 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3505 severity: Some(DiagnosticSeverity::WARNING),
3506 message: "error 1".to_string(),
3507 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3508 location: lsp::Location {
3509 uri: buffer_uri.clone(),
3510 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3511 },
3512 message: "error 1 hint 1".to_string(),
3513 }]),
3514 ..Default::default()
3515 },
3516 lsp::Diagnostic {
3517 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3518 severity: Some(DiagnosticSeverity::HINT),
3519 message: "error 1 hint 1".to_string(),
3520 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3521 location: lsp::Location {
3522 uri: buffer_uri.clone(),
3523 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3524 },
3525 message: "original diagnostic".to_string(),
3526 }]),
3527 ..Default::default()
3528 },
3529 lsp::Diagnostic {
3530 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3531 severity: Some(DiagnosticSeverity::ERROR),
3532 message: "error 2".to_string(),
3533 related_information: Some(vec![
3534 lsp::DiagnosticRelatedInformation {
3535 location: lsp::Location {
3536 uri: buffer_uri.clone(),
3537 range: lsp::Range::new(
3538 lsp::Position::new(1, 13),
3539 lsp::Position::new(1, 15),
3540 ),
3541 },
3542 message: "error 2 hint 1".to_string(),
3543 },
3544 lsp::DiagnosticRelatedInformation {
3545 location: lsp::Location {
3546 uri: buffer_uri.clone(),
3547 range: lsp::Range::new(
3548 lsp::Position::new(1, 13),
3549 lsp::Position::new(1, 15),
3550 ),
3551 },
3552 message: "error 2 hint 2".to_string(),
3553 },
3554 ]),
3555 ..Default::default()
3556 },
3557 lsp::Diagnostic {
3558 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3559 severity: Some(DiagnosticSeverity::HINT),
3560 message: "error 2 hint 1".to_string(),
3561 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3562 location: lsp::Location {
3563 uri: buffer_uri.clone(),
3564 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3565 },
3566 message: "original diagnostic".to_string(),
3567 }]),
3568 ..Default::default()
3569 },
3570 lsp::Diagnostic {
3571 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3572 severity: Some(DiagnosticSeverity::HINT),
3573 message: "error 2 hint 2".to_string(),
3574 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3575 location: lsp::Location {
3576 uri: buffer_uri,
3577 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3578 },
3579 message: "original diagnostic".to_string(),
3580 }]),
3581 ..Default::default()
3582 },
3583 ],
3584 version: None,
3585 };
3586
3587 project
3588 .update(cx, |p, cx| {
3589 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3590 })
3591 .unwrap();
3592 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3593
3594 assert_eq!(
3595 buffer
3596 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3597 .collect::<Vec<_>>(),
3598 &[
3599 DiagnosticEntry {
3600 range: Point::new(1, 8)..Point::new(1, 9),
3601 diagnostic: Diagnostic {
3602 severity: DiagnosticSeverity::WARNING,
3603 message: "error 1".to_string(),
3604 group_id: 1,
3605 is_primary: true,
3606 ..Default::default()
3607 }
3608 },
3609 DiagnosticEntry {
3610 range: Point::new(1, 8)..Point::new(1, 9),
3611 diagnostic: Diagnostic {
3612 severity: DiagnosticSeverity::HINT,
3613 message: "error 1 hint 1".to_string(),
3614 group_id: 1,
3615 is_primary: false,
3616 ..Default::default()
3617 }
3618 },
3619 DiagnosticEntry {
3620 range: Point::new(1, 13)..Point::new(1, 15),
3621 diagnostic: Diagnostic {
3622 severity: DiagnosticSeverity::HINT,
3623 message: "error 2 hint 1".to_string(),
3624 group_id: 0,
3625 is_primary: false,
3626 ..Default::default()
3627 }
3628 },
3629 DiagnosticEntry {
3630 range: Point::new(1, 13)..Point::new(1, 15),
3631 diagnostic: Diagnostic {
3632 severity: DiagnosticSeverity::HINT,
3633 message: "error 2 hint 2".to_string(),
3634 group_id: 0,
3635 is_primary: false,
3636 ..Default::default()
3637 }
3638 },
3639 DiagnosticEntry {
3640 range: Point::new(2, 8)..Point::new(2, 17),
3641 diagnostic: Diagnostic {
3642 severity: DiagnosticSeverity::ERROR,
3643 message: "error 2".to_string(),
3644 group_id: 0,
3645 is_primary: true,
3646 ..Default::default()
3647 }
3648 }
3649 ]
3650 );
3651
3652 assert_eq!(
3653 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3654 &[
3655 DiagnosticEntry {
3656 range: Point::new(1, 13)..Point::new(1, 15),
3657 diagnostic: Diagnostic {
3658 severity: DiagnosticSeverity::HINT,
3659 message: "error 2 hint 1".to_string(),
3660 group_id: 0,
3661 is_primary: false,
3662 ..Default::default()
3663 }
3664 },
3665 DiagnosticEntry {
3666 range: Point::new(1, 13)..Point::new(1, 15),
3667 diagnostic: Diagnostic {
3668 severity: DiagnosticSeverity::HINT,
3669 message: "error 2 hint 2".to_string(),
3670 group_id: 0,
3671 is_primary: false,
3672 ..Default::default()
3673 }
3674 },
3675 DiagnosticEntry {
3676 range: Point::new(2, 8)..Point::new(2, 17),
3677 diagnostic: Diagnostic {
3678 severity: DiagnosticSeverity::ERROR,
3679 message: "error 2".to_string(),
3680 group_id: 0,
3681 is_primary: true,
3682 ..Default::default()
3683 }
3684 }
3685 ]
3686 );
3687
3688 assert_eq!(
3689 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3690 &[
3691 DiagnosticEntry {
3692 range: Point::new(1, 8)..Point::new(1, 9),
3693 diagnostic: Diagnostic {
3694 severity: DiagnosticSeverity::WARNING,
3695 message: "error 1".to_string(),
3696 group_id: 1,
3697 is_primary: true,
3698 ..Default::default()
3699 }
3700 },
3701 DiagnosticEntry {
3702 range: Point::new(1, 8)..Point::new(1, 9),
3703 diagnostic: Diagnostic {
3704 severity: DiagnosticSeverity::HINT,
3705 message: "error 1 hint 1".to_string(),
3706 group_id: 1,
3707 is_primary: false,
3708 ..Default::default()
3709 }
3710 },
3711 ]
3712 );
3713}
3714
3715#[gpui::test]
3716async fn test_rename(cx: &mut gpui::TestAppContext) {
3717 init_test(cx);
3718
3719 let fs = FakeFs::new(cx.executor());
3720 fs.insert_tree(
3721 "/dir",
3722 json!({
3723 "one.rs": "const ONE: usize = 1;",
3724 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3725 }),
3726 )
3727 .await;
3728
3729 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3730
3731 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3732 language_registry.add(rust_lang());
3733 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3734 "Rust",
3735 FakeLspAdapter {
3736 capabilities: lsp::ServerCapabilities {
3737 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3738 prepare_provider: Some(true),
3739 work_done_progress_options: Default::default(),
3740 })),
3741 ..Default::default()
3742 },
3743 ..Default::default()
3744 },
3745 );
3746
3747 let buffer = project
3748 .update(cx, |project, cx| {
3749 project.open_local_buffer("/dir/one.rs", cx)
3750 })
3751 .await
3752 .unwrap();
3753
3754 let fake_server = fake_servers.next().await.unwrap();
3755
3756 let response = project.update(cx, |project, cx| {
3757 project.prepare_rename(buffer.clone(), 7, cx)
3758 });
3759 fake_server
3760 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3761 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3762 assert_eq!(params.position, lsp::Position::new(0, 7));
3763 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3764 lsp::Position::new(0, 6),
3765 lsp::Position::new(0, 9),
3766 ))))
3767 })
3768 .next()
3769 .await
3770 .unwrap();
3771 let range = response.await.unwrap().unwrap();
3772 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3773 assert_eq!(range, 6..9);
3774
3775 let response = project.update(cx, |project, cx| {
3776 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3777 });
3778 fake_server
3779 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3780 assert_eq!(
3781 params.text_document_position.text_document.uri.as_str(),
3782 "file:///dir/one.rs"
3783 );
3784 assert_eq!(
3785 params.text_document_position.position,
3786 lsp::Position::new(0, 7)
3787 );
3788 assert_eq!(params.new_name, "THREE");
3789 Ok(Some(lsp::WorkspaceEdit {
3790 changes: Some(
3791 [
3792 (
3793 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3794 vec![lsp::TextEdit::new(
3795 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3796 "THREE".to_string(),
3797 )],
3798 ),
3799 (
3800 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3801 vec![
3802 lsp::TextEdit::new(
3803 lsp::Range::new(
3804 lsp::Position::new(0, 24),
3805 lsp::Position::new(0, 27),
3806 ),
3807 "THREE".to_string(),
3808 ),
3809 lsp::TextEdit::new(
3810 lsp::Range::new(
3811 lsp::Position::new(0, 35),
3812 lsp::Position::new(0, 38),
3813 ),
3814 "THREE".to_string(),
3815 ),
3816 ],
3817 ),
3818 ]
3819 .into_iter()
3820 .collect(),
3821 ),
3822 ..Default::default()
3823 }))
3824 })
3825 .next()
3826 .await
3827 .unwrap();
3828 let mut transaction = response.await.unwrap().0;
3829 assert_eq!(transaction.len(), 2);
3830 assert_eq!(
3831 transaction
3832 .remove_entry(&buffer)
3833 .unwrap()
3834 .0
3835 .update(cx, |buffer, _| buffer.text()),
3836 "const THREE: usize = 1;"
3837 );
3838 assert_eq!(
3839 transaction
3840 .into_keys()
3841 .next()
3842 .unwrap()
3843 .update(cx, |buffer, _| buffer.text()),
3844 "const TWO: usize = one::THREE + one::THREE;"
3845 );
3846}
3847
3848#[gpui::test]
3849async fn test_search(cx: &mut gpui::TestAppContext) {
3850 init_test(cx);
3851
3852 let fs = FakeFs::new(cx.executor());
3853 fs.insert_tree(
3854 "/dir",
3855 json!({
3856 "one.rs": "const ONE: usize = 1;",
3857 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3858 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3859 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3860 }),
3861 )
3862 .await;
3863 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3864 assert_eq!(
3865 search(
3866 &project,
3867 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3868 cx
3869 )
3870 .await
3871 .unwrap(),
3872 HashMap::from_iter([
3873 ("dir/two.rs".to_string(), vec![6..9]),
3874 ("dir/three.rs".to_string(), vec![37..40])
3875 ])
3876 );
3877
3878 let buffer_4 = project
3879 .update(cx, |project, cx| {
3880 project.open_local_buffer("/dir/four.rs", cx)
3881 })
3882 .await
3883 .unwrap();
3884 buffer_4.update(cx, |buffer, cx| {
3885 let text = "two::TWO";
3886 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3887 });
3888
3889 assert_eq!(
3890 search(
3891 &project,
3892 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3893 cx
3894 )
3895 .await
3896 .unwrap(),
3897 HashMap::from_iter([
3898 ("dir/two.rs".to_string(), vec![6..9]),
3899 ("dir/three.rs".to_string(), vec![37..40]),
3900 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3901 ])
3902 );
3903}
3904
3905#[gpui::test]
3906async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3907 init_test(cx);
3908
3909 let search_query = "file";
3910
3911 let fs = FakeFs::new(cx.executor());
3912 fs.insert_tree(
3913 "/dir",
3914 json!({
3915 "one.rs": r#"// Rust file one"#,
3916 "one.ts": r#"// TypeScript file one"#,
3917 "two.rs": r#"// Rust file two"#,
3918 "two.ts": r#"// TypeScript file two"#,
3919 }),
3920 )
3921 .await;
3922 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3923
3924 assert!(
3925 search(
3926 &project,
3927 SearchQuery::text(
3928 search_query,
3929 false,
3930 true,
3931 false,
3932 vec![PathMatcher::new("*.odd").unwrap()],
3933 Vec::new()
3934 )
3935 .unwrap(),
3936 cx
3937 )
3938 .await
3939 .unwrap()
3940 .is_empty(),
3941 "If no inclusions match, no files should be returned"
3942 );
3943
3944 assert_eq!(
3945 search(
3946 &project,
3947 SearchQuery::text(
3948 search_query,
3949 false,
3950 true,
3951 false,
3952 vec![PathMatcher::new("*.rs").unwrap()],
3953 Vec::new()
3954 )
3955 .unwrap(),
3956 cx
3957 )
3958 .await
3959 .unwrap(),
3960 HashMap::from_iter([
3961 ("dir/one.rs".to_string(), vec![8..12]),
3962 ("dir/two.rs".to_string(), vec![8..12]),
3963 ]),
3964 "Rust only search should give only Rust files"
3965 );
3966
3967 assert_eq!(
3968 search(
3969 &project,
3970 SearchQuery::text(
3971 search_query,
3972 false,
3973 true,
3974 false,
3975 vec![
3976 PathMatcher::new("*.ts").unwrap(),
3977 PathMatcher::new("*.odd").unwrap(),
3978 ],
3979 Vec::new()
3980 ).unwrap(),
3981 cx
3982 )
3983 .await
3984 .unwrap(),
3985 HashMap::from_iter([
3986 ("dir/one.ts".to_string(), vec![14..18]),
3987 ("dir/two.ts".to_string(), vec![14..18]),
3988 ]),
3989 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3990 );
3991
3992 assert_eq!(
3993 search(
3994 &project,
3995 SearchQuery::text(
3996 search_query,
3997 false,
3998 true,
3999 false,
4000 vec![
4001 PathMatcher::new("*.rs").unwrap(),
4002 PathMatcher::new("*.ts").unwrap(),
4003 PathMatcher::new("*.odd").unwrap(),
4004 ],
4005 Vec::new()
4006 ).unwrap(),
4007 cx
4008 )
4009 .await
4010 .unwrap(),
4011 HashMap::from_iter([
4012 ("dir/two.ts".to_string(), vec![14..18]),
4013 ("dir/one.rs".to_string(), vec![8..12]),
4014 ("dir/one.ts".to_string(), vec![14..18]),
4015 ("dir/two.rs".to_string(), vec![8..12]),
4016 ]),
4017 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4018 );
4019}
4020
4021#[gpui::test]
4022async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4023 init_test(cx);
4024
4025 let search_query = "file";
4026
4027 let fs = FakeFs::new(cx.executor());
4028 fs.insert_tree(
4029 "/dir",
4030 json!({
4031 "one.rs": r#"// Rust file one"#,
4032 "one.ts": r#"// TypeScript file one"#,
4033 "two.rs": r#"// Rust file two"#,
4034 "two.ts": r#"// TypeScript file two"#,
4035 }),
4036 )
4037 .await;
4038 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4039
4040 assert_eq!(
4041 search(
4042 &project,
4043 SearchQuery::text(
4044 search_query,
4045 false,
4046 true,
4047 false,
4048 Vec::new(),
4049 vec![PathMatcher::new("*.odd").unwrap()],
4050 )
4051 .unwrap(),
4052 cx
4053 )
4054 .await
4055 .unwrap(),
4056 HashMap::from_iter([
4057 ("dir/one.rs".to_string(), vec![8..12]),
4058 ("dir/one.ts".to_string(), vec![14..18]),
4059 ("dir/two.rs".to_string(), vec![8..12]),
4060 ("dir/two.ts".to_string(), vec![14..18]),
4061 ]),
4062 "If no exclusions match, all files should be returned"
4063 );
4064
4065 assert_eq!(
4066 search(
4067 &project,
4068 SearchQuery::text(
4069 search_query,
4070 false,
4071 true,
4072 false,
4073 Vec::new(),
4074 vec![PathMatcher::new("*.rs").unwrap()],
4075 )
4076 .unwrap(),
4077 cx
4078 )
4079 .await
4080 .unwrap(),
4081 HashMap::from_iter([
4082 ("dir/one.ts".to_string(), vec![14..18]),
4083 ("dir/two.ts".to_string(), vec![14..18]),
4084 ]),
4085 "Rust exclusion search should give only TypeScript files"
4086 );
4087
4088 assert_eq!(
4089 search(
4090 &project,
4091 SearchQuery::text(
4092 search_query,
4093 false,
4094 true,
4095 false,
4096 Vec::new(),
4097 vec![
4098 PathMatcher::new("*.ts").unwrap(),
4099 PathMatcher::new("*.odd").unwrap(),
4100 ],
4101 ).unwrap(),
4102 cx
4103 )
4104 .await
4105 .unwrap(),
4106 HashMap::from_iter([
4107 ("dir/one.rs".to_string(), vec![8..12]),
4108 ("dir/two.rs".to_string(), vec![8..12]),
4109 ]),
4110 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4111 );
4112
4113 assert!(
4114 search(
4115 &project,
4116 SearchQuery::text(
4117 search_query,
4118 false,
4119 true,
4120 false,
4121 Vec::new(),
4122 vec![
4123 PathMatcher::new("*.rs").unwrap(),
4124 PathMatcher::new("*.ts").unwrap(),
4125 PathMatcher::new("*.odd").unwrap(),
4126 ],
4127 ).unwrap(),
4128 cx
4129 )
4130 .await
4131 .unwrap().is_empty(),
4132 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4133 );
4134}
4135
4136#[gpui::test]
4137async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4138 init_test(cx);
4139
4140 let search_query = "file";
4141
4142 let fs = FakeFs::new(cx.executor());
4143 fs.insert_tree(
4144 "/dir",
4145 json!({
4146 "one.rs": r#"// Rust file one"#,
4147 "one.ts": r#"// TypeScript file one"#,
4148 "two.rs": r#"// Rust file two"#,
4149 "two.ts": r#"// TypeScript file two"#,
4150 }),
4151 )
4152 .await;
4153 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4154
4155 assert!(
4156 search(
4157 &project,
4158 SearchQuery::text(
4159 search_query,
4160 false,
4161 true,
4162 false,
4163 vec![PathMatcher::new("*.odd").unwrap()],
4164 vec![PathMatcher::new("*.odd").unwrap()],
4165 )
4166 .unwrap(),
4167 cx
4168 )
4169 .await
4170 .unwrap()
4171 .is_empty(),
4172 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4173 );
4174
4175 assert!(
4176 search(
4177 &project,
4178 SearchQuery::text(
4179 search_query,
4180 false,
4181 true,
4182 false,
4183 vec![PathMatcher::new("*.ts").unwrap()],
4184 vec![PathMatcher::new("*.ts").unwrap()],
4185 ).unwrap(),
4186 cx
4187 )
4188 .await
4189 .unwrap()
4190 .is_empty(),
4191 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4192 );
4193
4194 assert!(
4195 search(
4196 &project,
4197 SearchQuery::text(
4198 search_query,
4199 false,
4200 true,
4201 false,
4202 vec![
4203 PathMatcher::new("*.ts").unwrap(),
4204 PathMatcher::new("*.odd").unwrap()
4205 ],
4206 vec![
4207 PathMatcher::new("*.ts").unwrap(),
4208 PathMatcher::new("*.odd").unwrap()
4209 ],
4210 )
4211 .unwrap(),
4212 cx
4213 )
4214 .await
4215 .unwrap()
4216 .is_empty(),
4217 "Non-matching inclusions and exclusions should not change that."
4218 );
4219
4220 assert_eq!(
4221 search(
4222 &project,
4223 SearchQuery::text(
4224 search_query,
4225 false,
4226 true,
4227 false,
4228 vec![
4229 PathMatcher::new("*.ts").unwrap(),
4230 PathMatcher::new("*.odd").unwrap()
4231 ],
4232 vec![
4233 PathMatcher::new("*.rs").unwrap(),
4234 PathMatcher::new("*.odd").unwrap()
4235 ],
4236 )
4237 .unwrap(),
4238 cx
4239 )
4240 .await
4241 .unwrap(),
4242 HashMap::from_iter([
4243 ("dir/one.ts".to_string(), vec![14..18]),
4244 ("dir/two.ts".to_string(), vec![14..18]),
4245 ]),
4246 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4247 );
4248}
4249
4250#[gpui::test]
4251async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4252 init_test(cx);
4253
4254 let fs = FakeFs::new(cx.executor());
4255 fs.insert_tree(
4256 "/worktree-a",
4257 json!({
4258 "haystack.rs": r#"// NEEDLE"#,
4259 "haystack.ts": r#"// NEEDLE"#,
4260 }),
4261 )
4262 .await;
4263 fs.insert_tree(
4264 "/worktree-b",
4265 json!({
4266 "haystack.rs": r#"// NEEDLE"#,
4267 "haystack.ts": r#"// NEEDLE"#,
4268 }),
4269 )
4270 .await;
4271
4272 let project = Project::test(
4273 fs.clone(),
4274 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4275 cx,
4276 )
4277 .await;
4278
4279 assert_eq!(
4280 search(
4281 &project,
4282 SearchQuery::text(
4283 "NEEDLE",
4284 false,
4285 true,
4286 false,
4287 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4288 Vec::new()
4289 )
4290 .unwrap(),
4291 cx
4292 )
4293 .await
4294 .unwrap(),
4295 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4296 "should only return results from included worktree"
4297 );
4298 assert_eq!(
4299 search(
4300 &project,
4301 SearchQuery::text(
4302 "NEEDLE",
4303 false,
4304 true,
4305 false,
4306 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4307 Vec::new()
4308 )
4309 .unwrap(),
4310 cx
4311 )
4312 .await
4313 .unwrap(),
4314 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4315 "should only return results from included worktree"
4316 );
4317
4318 assert_eq!(
4319 search(
4320 &project,
4321 SearchQuery::text(
4322 "NEEDLE",
4323 false,
4324 true,
4325 false,
4326 vec![PathMatcher::new("*.ts").unwrap()],
4327 Vec::new()
4328 )
4329 .unwrap(),
4330 cx
4331 )
4332 .await
4333 .unwrap(),
4334 HashMap::from_iter([
4335 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4336 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4337 ]),
4338 "should return results from both worktrees"
4339 );
4340}
4341
4342#[gpui::test]
4343async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4344 init_test(cx);
4345
4346 let fs = FakeFs::new(cx.background_executor.clone());
4347 fs.insert_tree(
4348 "/dir",
4349 json!({
4350 ".git": {},
4351 ".gitignore": "**/target\n/node_modules\n",
4352 "target": {
4353 "index.txt": "index_key:index_value"
4354 },
4355 "node_modules": {
4356 "eslint": {
4357 "index.ts": "const eslint_key = 'eslint value'",
4358 "package.json": r#"{ "some_key": "some value" }"#,
4359 },
4360 "prettier": {
4361 "index.ts": "const prettier_key = 'prettier value'",
4362 "package.json": r#"{ "other_key": "other value" }"#,
4363 },
4364 },
4365 "package.json": r#"{ "main_key": "main value" }"#,
4366 }),
4367 )
4368 .await;
4369 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4370
4371 let query = "key";
4372 assert_eq!(
4373 search(
4374 &project,
4375 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4376 cx
4377 )
4378 .await
4379 .unwrap(),
4380 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4381 "Only one non-ignored file should have the query"
4382 );
4383
4384 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4385 assert_eq!(
4386 search(
4387 &project,
4388 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4389 cx
4390 )
4391 .await
4392 .unwrap(),
4393 HashMap::from_iter([
4394 ("dir/package.json".to_string(), vec![8..11]),
4395 ("dir/target/index.txt".to_string(), vec![6..9]),
4396 (
4397 "dir/node_modules/prettier/package.json".to_string(),
4398 vec![9..12]
4399 ),
4400 (
4401 "dir/node_modules/prettier/index.ts".to_string(),
4402 vec![15..18]
4403 ),
4404 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4405 (
4406 "dir/node_modules/eslint/package.json".to_string(),
4407 vec![8..11]
4408 ),
4409 ]),
4410 "Unrestricted search with ignored directories should find every file with the query"
4411 );
4412
4413 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4414 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4415 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4416 assert_eq!(
4417 search(
4418 &project,
4419 SearchQuery::text(
4420 query,
4421 false,
4422 false,
4423 true,
4424 files_to_include,
4425 files_to_exclude,
4426 )
4427 .unwrap(),
4428 cx
4429 )
4430 .await
4431 .unwrap(),
4432 HashMap::from_iter([(
4433 "dir/node_modules/prettier/package.json".to_string(),
4434 vec![9..12]
4435 )]),
4436 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4437 );
4438}
4439
4440#[test]
4441fn test_glob_literal_prefix() {
4442 assert_eq!(glob_literal_prefix("**/*.js"), "");
4443 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4444 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4445 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4446}
4447
4448#[gpui::test]
4449async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4450 init_test(cx);
4451
4452 let fs = FakeFs::new(cx.executor().clone());
4453 fs.insert_tree(
4454 "/one/two",
4455 json!({
4456 "three": {
4457 "a.txt": "",
4458 "four": {}
4459 },
4460 "c.rs": ""
4461 }),
4462 )
4463 .await;
4464
4465 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4466 project
4467 .update(cx, |project, cx| {
4468 let id = project.worktrees().next().unwrap().read(cx).id();
4469 project.create_entry((id, "b.."), true, cx)
4470 })
4471 .unwrap()
4472 .await
4473 .to_included()
4474 .unwrap();
4475
4476 // Can't create paths outside the project
4477 let result = project
4478 .update(cx, |project, cx| {
4479 let id = project.worktrees().next().unwrap().read(cx).id();
4480 project.create_entry((id, "../../boop"), true, cx)
4481 })
4482 .await;
4483 assert!(result.is_err());
4484
4485 // Can't create paths with '..'
4486 let result = project
4487 .update(cx, |project, cx| {
4488 let id = project.worktrees().next().unwrap().read(cx).id();
4489 project.create_entry((id, "four/../beep"), true, cx)
4490 })
4491 .await;
4492 assert!(result.is_err());
4493
4494 assert_eq!(
4495 fs.paths(true),
4496 vec![
4497 PathBuf::from("/"),
4498 PathBuf::from("/one"),
4499 PathBuf::from("/one/two"),
4500 PathBuf::from("/one/two/c.rs"),
4501 PathBuf::from("/one/two/three"),
4502 PathBuf::from("/one/two/three/a.txt"),
4503 PathBuf::from("/one/two/three/b.."),
4504 PathBuf::from("/one/two/three/four"),
4505 ]
4506 );
4507
4508 // And we cannot open buffers with '..'
4509 let result = project
4510 .update(cx, |project, cx| {
4511 let id = project.worktrees().next().unwrap().read(cx).id();
4512 project.open_buffer((id, "../c.rs"), cx)
4513 })
4514 .await;
4515 assert!(result.is_err())
4516}
4517
4518#[gpui::test]
4519async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4520 init_test(cx);
4521
4522 let fs = FakeFs::new(cx.executor());
4523 fs.insert_tree(
4524 "/dir",
4525 json!({
4526 "a.tsx": "a",
4527 }),
4528 )
4529 .await;
4530
4531 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4532
4533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4534 language_registry.add(tsx_lang());
4535 let language_server_names = [
4536 "TypeScriptServer",
4537 "TailwindServer",
4538 "ESLintServer",
4539 "NoHoverCapabilitiesServer",
4540 ];
4541 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4542 "tsx",
4543 true,
4544 FakeLspAdapter {
4545 name: &language_server_names[0],
4546 capabilities: lsp::ServerCapabilities {
4547 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4548 ..lsp::ServerCapabilities::default()
4549 },
4550 ..FakeLspAdapter::default()
4551 },
4552 );
4553 let _a = language_registry.register_specific_fake_lsp_adapter(
4554 "tsx",
4555 false,
4556 FakeLspAdapter {
4557 name: &language_server_names[1],
4558 capabilities: lsp::ServerCapabilities {
4559 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4560 ..lsp::ServerCapabilities::default()
4561 },
4562 ..FakeLspAdapter::default()
4563 },
4564 );
4565 let _b = language_registry.register_specific_fake_lsp_adapter(
4566 "tsx",
4567 false,
4568 FakeLspAdapter {
4569 name: &language_server_names[2],
4570 capabilities: lsp::ServerCapabilities {
4571 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4572 ..lsp::ServerCapabilities::default()
4573 },
4574 ..FakeLspAdapter::default()
4575 },
4576 );
4577 let _c = language_registry.register_specific_fake_lsp_adapter(
4578 "tsx",
4579 false,
4580 FakeLspAdapter {
4581 name: &language_server_names[3],
4582 capabilities: lsp::ServerCapabilities {
4583 hover_provider: None,
4584 ..lsp::ServerCapabilities::default()
4585 },
4586 ..FakeLspAdapter::default()
4587 },
4588 );
4589
4590 let buffer = project
4591 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4592 .await
4593 .unwrap();
4594 cx.executor().run_until_parked();
4595
4596 let mut servers_with_hover_requests = HashMap::default();
4597 for i in 0..language_server_names.len() {
4598 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4599 panic!(
4600 "Failed to get language server #{i} with name {}",
4601 &language_server_names[i]
4602 )
4603 });
4604 let new_server_name = new_server.server.name();
4605 assert!(
4606 !servers_with_hover_requests.contains_key(new_server_name),
4607 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4608 );
4609 let new_server_name = new_server_name.to_string();
4610 match new_server_name.as_str() {
4611 "TailwindServer" | "TypeScriptServer" => {
4612 servers_with_hover_requests.insert(
4613 new_server_name.clone(),
4614 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4615 let name = new_server_name.clone();
4616 async move {
4617 Ok(Some(lsp::Hover {
4618 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4619 format!("{name} hover"),
4620 )),
4621 range: None,
4622 }))
4623 }
4624 }),
4625 );
4626 }
4627 "ESLintServer" => {
4628 servers_with_hover_requests.insert(
4629 new_server_name,
4630 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4631 |_, _| async move { Ok(None) },
4632 ),
4633 );
4634 }
4635 "NoHoverCapabilitiesServer" => {
4636 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4637 |_, _| async move {
4638 panic!(
4639 "Should not call for hovers server with no corresponding capabilities"
4640 )
4641 },
4642 );
4643 }
4644 unexpected => panic!("Unexpected server name: {unexpected}"),
4645 }
4646 }
4647
4648 let hover_task = project.update(cx, |project, cx| {
4649 project.hover(&buffer, Point::new(0, 0), cx)
4650 });
4651 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4652 |mut hover_request| async move {
4653 hover_request
4654 .next()
4655 .await
4656 .expect("All hover requests should have been triggered")
4657 },
4658 ))
4659 .await;
4660 assert_eq!(
4661 vec!["TailwindServer hover", "TypeScriptServer hover"],
4662 hover_task
4663 .await
4664 .into_iter()
4665 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4666 .sorted()
4667 .collect::<Vec<_>>(),
4668 "Should receive hover responses from all related servers with hover capabilities"
4669 );
4670}
4671
4672#[gpui::test]
4673async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4674 init_test(cx);
4675
4676 let fs = FakeFs::new(cx.executor());
4677 fs.insert_tree(
4678 "/dir",
4679 json!({
4680 "a.ts": "a",
4681 }),
4682 )
4683 .await;
4684
4685 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4686
4687 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4688 language_registry.add(typescript_lang());
4689 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4690 "TypeScript",
4691 FakeLspAdapter {
4692 capabilities: lsp::ServerCapabilities {
4693 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4694 ..lsp::ServerCapabilities::default()
4695 },
4696 ..FakeLspAdapter::default()
4697 },
4698 );
4699
4700 let buffer = project
4701 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4702 .await
4703 .unwrap();
4704 cx.executor().run_until_parked();
4705
4706 let fake_server = fake_language_servers
4707 .next()
4708 .await
4709 .expect("failed to get the language server");
4710
4711 let mut request_handled =
4712 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4713 Ok(Some(lsp::Hover {
4714 contents: lsp::HoverContents::Array(vec![
4715 lsp::MarkedString::String("".to_string()),
4716 lsp::MarkedString::String(" ".to_string()),
4717 lsp::MarkedString::String("\n\n\n".to_string()),
4718 ]),
4719 range: None,
4720 }))
4721 });
4722
4723 let hover_task = project.update(cx, |project, cx| {
4724 project.hover(&buffer, Point::new(0, 0), cx)
4725 });
4726 let () = request_handled
4727 .next()
4728 .await
4729 .expect("All hover requests should have been triggered");
4730 assert_eq!(
4731 Vec::<String>::new(),
4732 hover_task
4733 .await
4734 .into_iter()
4735 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4736 .sorted()
4737 .collect::<Vec<_>>(),
4738 "Empty hover parts should be ignored"
4739 );
4740}
4741
4742#[gpui::test]
4743async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4744 init_test(cx);
4745
4746 let fs = FakeFs::new(cx.executor());
4747 fs.insert_tree(
4748 "/dir",
4749 json!({
4750 "a.tsx": "a",
4751 }),
4752 )
4753 .await;
4754
4755 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4756
4757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4758 language_registry.add(tsx_lang());
4759 let language_server_names = [
4760 "TypeScriptServer",
4761 "TailwindServer",
4762 "ESLintServer",
4763 "NoActionsCapabilitiesServer",
4764 ];
4765 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4766 "tsx",
4767 true,
4768 FakeLspAdapter {
4769 name: &language_server_names[0],
4770 capabilities: lsp::ServerCapabilities {
4771 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4772 ..lsp::ServerCapabilities::default()
4773 },
4774 ..FakeLspAdapter::default()
4775 },
4776 );
4777 let _a = language_registry.register_specific_fake_lsp_adapter(
4778 "tsx",
4779 false,
4780 FakeLspAdapter {
4781 name: &language_server_names[1],
4782 capabilities: lsp::ServerCapabilities {
4783 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4784 ..lsp::ServerCapabilities::default()
4785 },
4786 ..FakeLspAdapter::default()
4787 },
4788 );
4789 let _b = language_registry.register_specific_fake_lsp_adapter(
4790 "tsx",
4791 false,
4792 FakeLspAdapter {
4793 name: &language_server_names[2],
4794 capabilities: lsp::ServerCapabilities {
4795 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4796 ..lsp::ServerCapabilities::default()
4797 },
4798 ..FakeLspAdapter::default()
4799 },
4800 );
4801 let _c = language_registry.register_specific_fake_lsp_adapter(
4802 "tsx",
4803 false,
4804 FakeLspAdapter {
4805 name: &language_server_names[3],
4806 capabilities: lsp::ServerCapabilities {
4807 code_action_provider: None,
4808 ..lsp::ServerCapabilities::default()
4809 },
4810 ..FakeLspAdapter::default()
4811 },
4812 );
4813
4814 let buffer = project
4815 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4816 .await
4817 .unwrap();
4818 cx.executor().run_until_parked();
4819
4820 let mut servers_with_actions_requests = HashMap::default();
4821 for i in 0..language_server_names.len() {
4822 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4823 panic!(
4824 "Failed to get language server #{i} with name {}",
4825 &language_server_names[i]
4826 )
4827 });
4828 let new_server_name = new_server.server.name();
4829 assert!(
4830 !servers_with_actions_requests.contains_key(new_server_name),
4831 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4832 );
4833 let new_server_name = new_server_name.to_string();
4834 match new_server_name.as_str() {
4835 "TailwindServer" | "TypeScriptServer" => {
4836 servers_with_actions_requests.insert(
4837 new_server_name.clone(),
4838 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4839 move |_, _| {
4840 let name = new_server_name.clone();
4841 async move {
4842 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4843 lsp::CodeAction {
4844 title: format!("{name} code action"),
4845 ..lsp::CodeAction::default()
4846 },
4847 )]))
4848 }
4849 },
4850 ),
4851 );
4852 }
4853 "ESLintServer" => {
4854 servers_with_actions_requests.insert(
4855 new_server_name,
4856 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4857 |_, _| async move { Ok(None) },
4858 ),
4859 );
4860 }
4861 "NoActionsCapabilitiesServer" => {
4862 let _never_handled = new_server
4863 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4864 panic!(
4865 "Should not call for code actions server with no corresponding capabilities"
4866 )
4867 });
4868 }
4869 unexpected => panic!("Unexpected server name: {unexpected}"),
4870 }
4871 }
4872
4873 let code_actions_task = project.update(cx, |project, cx| {
4874 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4875 });
4876 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4877 |mut code_actions_request| async move {
4878 code_actions_request
4879 .next()
4880 .await
4881 .expect("All code actions requests should have been triggered")
4882 },
4883 ))
4884 .await;
4885 assert_eq!(
4886 vec!["TailwindServer code action", "TypeScriptServer code action"],
4887 code_actions_task
4888 .await
4889 .into_iter()
4890 .map(|code_action| code_action.lsp_action.title)
4891 .sorted()
4892 .collect::<Vec<_>>(),
4893 "Should receive code actions responses from all related servers with hover capabilities"
4894 );
4895}
4896
4897#[gpui::test]
4898async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
4899 init_test(cx);
4900
4901 let fs = FakeFs::new(cx.executor());
4902 fs.insert_tree(
4903 "/dir",
4904 json!({
4905 "a.rs": "let a = 1;",
4906 "b.rs": "let b = 2;",
4907 "c.rs": "let c = 2;",
4908 }),
4909 )
4910 .await;
4911
4912 let project = Project::test(
4913 fs,
4914 [
4915 "/dir/a.rs".as_ref(),
4916 "/dir/b.rs".as_ref(),
4917 "/dir/c.rs".as_ref(),
4918 ],
4919 cx,
4920 )
4921 .await;
4922
4923 // check the initial state and get the worktrees
4924 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
4925 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
4926 assert_eq!(worktrees.len(), 3);
4927
4928 let worktree_a = worktrees[0].read(cx);
4929 let worktree_b = worktrees[1].read(cx);
4930 let worktree_c = worktrees[2].read(cx);
4931
4932 // check they start in the right order
4933 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
4934 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
4935 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
4936
4937 (
4938 worktrees[0].clone(),
4939 worktrees[1].clone(),
4940 worktrees[2].clone(),
4941 )
4942 });
4943
4944 // move first worktree to after the second
4945 // [a, b, c] -> [b, a, c]
4946 project
4947 .update(cx, |project, cx| {
4948 let first = worktree_a.read(cx);
4949 let second = worktree_b.read(cx);
4950 project.move_worktree(first.id(), second.id(), cx)
4951 })
4952 .expect("moving first after second");
4953
4954 // check the state after moving
4955 project.update(cx, |project, cx| {
4956 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
4957 assert_eq!(worktrees.len(), 3);
4958
4959 let first = worktrees[0].read(cx);
4960 let second = worktrees[1].read(cx);
4961 let third = worktrees[2].read(cx);
4962
4963 // check they are now in the right order
4964 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
4965 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
4966 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
4967 });
4968
4969 // move the second worktree to before the first
4970 // [b, a, c] -> [a, b, c]
4971 project
4972 .update(cx, |project, cx| {
4973 let second = worktree_a.read(cx);
4974 let first = worktree_b.read(cx);
4975 project.move_worktree(first.id(), second.id(), cx)
4976 })
4977 .expect("moving second before first");
4978
4979 // check the state after moving
4980 project.update(cx, |project, cx| {
4981 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
4982 assert_eq!(worktrees.len(), 3);
4983
4984 let first = worktrees[0].read(cx);
4985 let second = worktrees[1].read(cx);
4986 let third = worktrees[2].read(cx);
4987
4988 // check they are now in the right order
4989 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
4990 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
4991 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
4992 });
4993
4994 // move the second worktree to after the third
4995 // [a, b, c] -> [a, c, b]
4996 project
4997 .update(cx, |project, cx| {
4998 let second = worktree_b.read(cx);
4999 let third = worktree_c.read(cx);
5000 project.move_worktree(second.id(), third.id(), cx)
5001 })
5002 .expect("moving second after third");
5003
5004 // check the state after moving
5005 project.update(cx, |project, cx| {
5006 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5007 assert_eq!(worktrees.len(), 3);
5008
5009 let first = worktrees[0].read(cx);
5010 let second = worktrees[1].read(cx);
5011 let third = worktrees[2].read(cx);
5012
5013 // check they are now in the right order
5014 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5015 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5016 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5017 });
5018
5019 // move the third worktree to before the second
5020 // [a, c, b] -> [a, b, c]
5021 project
5022 .update(cx, |project, cx| {
5023 let third = worktree_c.read(cx);
5024 let second = worktree_b.read(cx);
5025 project.move_worktree(third.id(), second.id(), cx)
5026 })
5027 .expect("moving third before second");
5028
5029 // check the state after moving
5030 project.update(cx, |project, cx| {
5031 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5032 assert_eq!(worktrees.len(), 3);
5033
5034 let first = worktrees[0].read(cx);
5035 let second = worktrees[1].read(cx);
5036 let third = worktrees[2].read(cx);
5037
5038 // check they are now in the right order
5039 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5040 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5041 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5042 });
5043
5044 // move the first worktree to after the third
5045 // [a, b, c] -> [b, c, a]
5046 project
5047 .update(cx, |project, cx| {
5048 let first = worktree_a.read(cx);
5049 let third = worktree_c.read(cx);
5050 project.move_worktree(first.id(), third.id(), cx)
5051 })
5052 .expect("moving first after third");
5053
5054 // check the state after moving
5055 project.update(cx, |project, cx| {
5056 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5057 assert_eq!(worktrees.len(), 3);
5058
5059 let first = worktrees[0].read(cx);
5060 let second = worktrees[1].read(cx);
5061 let third = worktrees[2].read(cx);
5062
5063 // check they are now in the right order
5064 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5065 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5066 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5067 });
5068
5069 // move the third worktree to before the first
5070 // [b, c, a] -> [a, b, c]
5071 project
5072 .update(cx, |project, cx| {
5073 let third = worktree_a.read(cx);
5074 let first = worktree_b.read(cx);
5075 project.move_worktree(third.id(), first.id(), cx)
5076 })
5077 .expect("moving third before first");
5078
5079 // check the state after moving
5080 project.update(cx, |project, cx| {
5081 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5082 assert_eq!(worktrees.len(), 3);
5083
5084 let first = worktrees[0].read(cx);
5085 let second = worktrees[1].read(cx);
5086 let third = worktrees[2].read(cx);
5087
5088 // check they are now in the right order
5089 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5090 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5091 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5092 });
5093}
5094
5095async fn search(
5096 project: &Model<Project>,
5097 query: SearchQuery,
5098 cx: &mut gpui::TestAppContext,
5099) -> Result<HashMap<String, Vec<Range<usize>>>> {
5100 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5101 let mut results = HashMap::default();
5102 while let Some(search_result) = search_rx.next().await {
5103 match search_result {
5104 SearchResult::Buffer { buffer, ranges } => {
5105 results.entry(buffer).or_insert(ranges);
5106 }
5107 SearchResult::LimitReached => {}
5108 }
5109 }
5110 Ok(results
5111 .into_iter()
5112 .map(|(buffer, ranges)| {
5113 buffer.update(cx, |buffer, cx| {
5114 let path = buffer
5115 .file()
5116 .unwrap()
5117 .full_path(cx)
5118 .to_string_lossy()
5119 .to_string();
5120 let ranges = ranges
5121 .into_iter()
5122 .map(|range| range.to_offset(buffer))
5123 .collect::<Vec<_>>();
5124 (path, ranges)
5125 })
5126 })
5127 .collect())
5128}
5129
5130fn init_test(cx: &mut gpui::TestAppContext) {
5131 if std::env::var("RUST_LOG").is_ok() {
5132 env_logger::try_init().ok();
5133 }
5134
5135 cx.update(|cx| {
5136 let settings_store = SettingsStore::test(cx);
5137 cx.set_global(settings_store);
5138 release_channel::init(SemanticVersion::default(), cx);
5139 language::init(cx);
5140 Project::init_settings(cx);
5141 });
5142}
5143
5144fn json_lang() -> Arc<Language> {
5145 Arc::new(Language::new(
5146 LanguageConfig {
5147 name: "JSON".into(),
5148 matcher: LanguageMatcher {
5149 path_suffixes: vec!["json".to_string()],
5150 ..Default::default()
5151 },
5152 ..Default::default()
5153 },
5154 None,
5155 ))
5156}
5157
5158fn js_lang() -> Arc<Language> {
5159 Arc::new(Language::new(
5160 LanguageConfig {
5161 name: Arc::from("JavaScript"),
5162 matcher: LanguageMatcher {
5163 path_suffixes: vec!["js".to_string()],
5164 ..Default::default()
5165 },
5166 ..Default::default()
5167 },
5168 None,
5169 ))
5170}
5171
5172fn rust_lang() -> Arc<Language> {
5173 Arc::new(Language::new(
5174 LanguageConfig {
5175 name: "Rust".into(),
5176 matcher: LanguageMatcher {
5177 path_suffixes: vec!["rs".to_string()],
5178 ..Default::default()
5179 },
5180 ..Default::default()
5181 },
5182 Some(tree_sitter_rust::language()),
5183 ))
5184}
5185
5186fn typescript_lang() -> Arc<Language> {
5187 Arc::new(Language::new(
5188 LanguageConfig {
5189 name: "TypeScript".into(),
5190 matcher: LanguageMatcher {
5191 path_suffixes: vec!["ts".to_string()],
5192 ..Default::default()
5193 },
5194 ..Default::default()
5195 },
5196 Some(tree_sitter_typescript::language_typescript()),
5197 ))
5198}
5199
5200fn tsx_lang() -> Arc<Language> {
5201 Arc::new(Language::new(
5202 LanguageConfig {
5203 name: "tsx".into(),
5204 matcher: LanguageMatcher {
5205 path_suffixes: vec!["tsx".to_string()],
5206 ..Default::default()
5207 },
5208 ..Default::default()
5209 },
5210 Some(tree_sitter_typescript::language_tsx()),
5211 ))
5212}
5213
5214fn get_all_tasks(
5215 project: &Model<Project>,
5216 worktree_id: Option<WorktreeId>,
5217 task_context: &TaskContext,
5218 cx: &mut AppContext,
5219) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5220 let resolved_tasks = project.update(cx, |project, cx| {
5221 project
5222 .task_inventory()
5223 .read(cx)
5224 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5225 });
5226
5227 cx.spawn(|_| async move {
5228 let (mut old, new) = resolved_tasks.await;
5229 old.extend(new);
5230 old
5231 })
5232}