1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::NumberOrString;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/tmp").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128 let task_context = TaskContext::default();
129
130 cx.executor().run_until_parked();
131 let worktree_id = cx.update(|cx| {
132 project.update(cx, |project, cx| {
133 project.worktrees().next().unwrap().read(cx).id()
134 })
135 });
136 let global_task_source_kind = TaskSourceKind::Worktree {
137 id: worktree_id,
138 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
139 id_base: "local_tasks_for_worktree".into(),
140 };
141
142 let all_tasks = cx
143 .update(|cx| {
144 let tree = worktree.read(cx);
145
146 let settings_a = language_settings(
147 None,
148 Some(
149 &(File::for_entry(
150 tree.entry_for_path("a/a.rs").unwrap().clone(),
151 worktree.clone(),
152 ) as _),
153 ),
154 cx,
155 );
156 let settings_b = language_settings(
157 None,
158 Some(
159 &(File::for_entry(
160 tree.entry_for_path("b/b.rs").unwrap().clone(),
161 worktree.clone(),
162 ) as _),
163 ),
164 cx,
165 );
166
167 assert_eq!(settings_a.tab_size.get(), 8);
168 assert_eq!(settings_b.tab_size.get(), 2);
169
170 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
171 })
172 .await
173 .into_iter()
174 .map(|(source_kind, task)| {
175 let resolved = task.resolved.unwrap();
176 (
177 source_kind,
178 task.resolved_label,
179 resolved.args,
180 resolved.env,
181 )
182 })
183 .collect::<Vec<_>>();
184 assert_eq!(
185 all_tasks,
186 vec![
187 (
188 global_task_source_kind.clone(),
189 "cargo check".to_string(),
190 vec!["check".to_string(), "--all".to_string()],
191 HashMap::default(),
192 ),
193 (
194 TaskSourceKind::Worktree {
195 id: worktree_id,
196 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
197 id_base: "local_tasks_for_worktree".into(),
198 },
199 "cargo check".to_string(),
200 vec!["check".to_string()],
201 HashMap::default(),
202 ),
203 ]
204 );
205
206 let (_, resolved_task) = cx
207 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
208 .await
209 .into_iter()
210 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
211 .expect("should have one global task");
212 project.update(cx, |project, cx| {
213 project.task_inventory().update(cx, |inventory, _| {
214 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
215 });
216 });
217
218 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
219 label: "cargo check".to_string(),
220 command: "cargo".to_string(),
221 args: vec![
222 "check".to_string(),
223 "--all".to_string(),
224 "--all-targets".to_string(),
225 ],
226 env: HashMap::from_iter(Some((
227 "RUSTFLAGS".to_string(),
228 "-Zunstable-options".to_string(),
229 ))),
230 ..TaskTemplate::default()
231 }]))
232 .unwrap();
233 let (tx, rx) = futures::channel::mpsc::unbounded();
234 cx.update(|cx| {
235 project.update(cx, |project, cx| {
236 project.task_inventory().update(cx, |inventory, cx| {
237 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
238 inventory.add_source(
239 global_task_source_kind.clone(),
240 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
241 cx,
242 );
243 });
244 })
245 });
246 tx.unbounded_send(tasks).unwrap();
247
248 cx.run_until_parked();
249 let all_tasks = cx
250 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
251 .await
252 .into_iter()
253 .map(|(source_kind, task)| {
254 let resolved = task.resolved.unwrap();
255 (
256 source_kind,
257 task.resolved_label,
258 resolved.args,
259 resolved.env,
260 )
261 })
262 .collect::<Vec<_>>();
263 assert_eq!(
264 all_tasks,
265 vec![
266 (
267 TaskSourceKind::Worktree {
268 id: worktree_id,
269 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
270 id_base: "local_tasks_for_worktree".into(),
271 },
272 "cargo check".to_string(),
273 vec![
274 "check".to_string(),
275 "--all".to_string(),
276 "--all-targets".to_string()
277 ],
278 HashMap::from_iter(Some((
279 "RUSTFLAGS".to_string(),
280 "-Zunstable-options".to_string()
281 ))),
282 ),
283 (
284 TaskSourceKind::Worktree {
285 id: worktree_id,
286 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
287 id_base: "local_tasks_for_worktree".into(),
288 },
289 "cargo check".to_string(),
290 vec!["check".to_string()],
291 HashMap::default(),
292 ),
293 ]
294 );
295}
296
297#[gpui::test]
298async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
299 init_test(cx);
300
301 let fs = FakeFs::new(cx.executor());
302 fs.insert_tree(
303 "/the-root",
304 json!({
305 "test.rs": "const A: i32 = 1;",
306 "test2.rs": "",
307 "Cargo.toml": "a = 1",
308 "package.json": "{\"a\": 1}",
309 }),
310 )
311 .await;
312
313 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
315
316 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
317 "Rust",
318 FakeLspAdapter {
319 name: "the-rust-language-server",
320 capabilities: lsp::ServerCapabilities {
321 completion_provider: Some(lsp::CompletionOptions {
322 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
323 ..Default::default()
324 }),
325 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
326 lsp::TextDocumentSyncOptions {
327 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
328 ..Default::default()
329 },
330 )),
331 ..Default::default()
332 },
333 ..Default::default()
334 },
335 );
336 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
337 "JSON",
338 FakeLspAdapter {
339 name: "the-json-language-server",
340 capabilities: lsp::ServerCapabilities {
341 completion_provider: Some(lsp::CompletionOptions {
342 trigger_characters: Some(vec![":".to_string()]),
343 ..Default::default()
344 }),
345 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
346 lsp::TextDocumentSyncOptions {
347 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
348 ..Default::default()
349 },
350 )),
351 ..Default::default()
352 },
353 ..Default::default()
354 },
355 );
356
357 // Open a buffer without an associated language server.
358 let toml_buffer = project
359 .update(cx, |project, cx| {
360 project.open_local_buffer("/the-root/Cargo.toml", cx)
361 })
362 .await
363 .unwrap();
364
365 // Open a buffer with an associated language server before the language for it has been loaded.
366 let rust_buffer = project
367 .update(cx, |project, cx| {
368 project.open_local_buffer("/the-root/test.rs", cx)
369 })
370 .await
371 .unwrap();
372 rust_buffer.update(cx, |buffer, _| {
373 assert_eq!(buffer.language().map(|l| l.name()), None);
374 });
375
376 // Now we add the languages to the project, and ensure they get assigned to all
377 // the relevant open buffers.
378 language_registry.add(json_lang());
379 language_registry.add(rust_lang());
380 cx.executor().run_until_parked();
381 rust_buffer.update(cx, |buffer, _| {
382 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
383 });
384
385 // A server is started up, and it is notified about Rust files.
386 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
387 assert_eq!(
388 fake_rust_server
389 .receive_notification::<lsp::notification::DidOpenTextDocument>()
390 .await
391 .text_document,
392 lsp::TextDocumentItem {
393 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
394 version: 0,
395 text: "const A: i32 = 1;".to_string(),
396 language_id: "rust".to_string(),
397 }
398 );
399
400 // The buffer is configured based on the language server's capabilities.
401 rust_buffer.update(cx, |buffer, _| {
402 assert_eq!(
403 buffer.completion_triggers(),
404 &[".".to_string(), "::".to_string()]
405 );
406 });
407 toml_buffer.update(cx, |buffer, _| {
408 assert!(buffer.completion_triggers().is_empty());
409 });
410
411 // Edit a buffer. The changes are reported to the language server.
412 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
413 assert_eq!(
414 fake_rust_server
415 .receive_notification::<lsp::notification::DidChangeTextDocument>()
416 .await
417 .text_document,
418 lsp::VersionedTextDocumentIdentifier::new(
419 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
420 1
421 )
422 );
423
424 // Open a third buffer with a different associated language server.
425 let json_buffer = project
426 .update(cx, |project, cx| {
427 project.open_local_buffer("/the-root/package.json", cx)
428 })
429 .await
430 .unwrap();
431
432 // A json language server is started up and is only notified about the json buffer.
433 let mut fake_json_server = fake_json_servers.next().await.unwrap();
434 assert_eq!(
435 fake_json_server
436 .receive_notification::<lsp::notification::DidOpenTextDocument>()
437 .await
438 .text_document,
439 lsp::TextDocumentItem {
440 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
441 version: 0,
442 text: "{\"a\": 1}".to_string(),
443 language_id: "json".to_string(),
444 }
445 );
446
447 // This buffer is configured based on the second language server's
448 // capabilities.
449 json_buffer.update(cx, |buffer, _| {
450 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
451 });
452
453 // When opening another buffer whose language server is already running,
454 // it is also configured based on the existing language server's capabilities.
455 let rust_buffer2 = project
456 .update(cx, |project, cx| {
457 project.open_local_buffer("/the-root/test2.rs", cx)
458 })
459 .await
460 .unwrap();
461 rust_buffer2.update(cx, |buffer, _| {
462 assert_eq!(
463 buffer.completion_triggers(),
464 &[".".to_string(), "::".to_string()]
465 );
466 });
467
468 // Changes are reported only to servers matching the buffer's language.
469 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
470 rust_buffer2.update(cx, |buffer, cx| {
471 buffer.edit([(0..0, "let x = 1;")], None, cx)
472 });
473 assert_eq!(
474 fake_rust_server
475 .receive_notification::<lsp::notification::DidChangeTextDocument>()
476 .await
477 .text_document,
478 lsp::VersionedTextDocumentIdentifier::new(
479 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
480 1
481 )
482 );
483
484 // Save notifications are reported to all servers.
485 project
486 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
487 .await
488 .unwrap();
489 assert_eq!(
490 fake_rust_server
491 .receive_notification::<lsp::notification::DidSaveTextDocument>()
492 .await
493 .text_document,
494 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
495 );
496 assert_eq!(
497 fake_json_server
498 .receive_notification::<lsp::notification::DidSaveTextDocument>()
499 .await
500 .text_document,
501 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
502 );
503
504 // Renames are reported only to servers matching the buffer's language.
505 fs.rename(
506 Path::new("/the-root/test2.rs"),
507 Path::new("/the-root/test3.rs"),
508 Default::default(),
509 )
510 .await
511 .unwrap();
512 assert_eq!(
513 fake_rust_server
514 .receive_notification::<lsp::notification::DidCloseTextDocument>()
515 .await
516 .text_document,
517 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
518 );
519 assert_eq!(
520 fake_rust_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 lsp::TextDocumentItem {
525 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
526 version: 0,
527 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
528 language_id: "rust".to_string(),
529 },
530 );
531
532 rust_buffer2.update(cx, |buffer, cx| {
533 buffer.update_diagnostics(
534 LanguageServerId(0),
535 DiagnosticSet::from_sorted_entries(
536 vec![DiagnosticEntry {
537 diagnostic: Default::default(),
538 range: Anchor::MIN..Anchor::MAX,
539 }],
540 &buffer.snapshot(),
541 ),
542 cx,
543 );
544 assert_eq!(
545 buffer
546 .snapshot()
547 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
548 .count(),
549 1
550 );
551 });
552
553 // When the rename changes the extension of the file, the buffer gets closed on the old
554 // language server and gets opened on the new one.
555 fs.rename(
556 Path::new("/the-root/test3.rs"),
557 Path::new("/the-root/test3.json"),
558 Default::default(),
559 )
560 .await
561 .unwrap();
562 assert_eq!(
563 fake_rust_server
564 .receive_notification::<lsp::notification::DidCloseTextDocument>()
565 .await
566 .text_document,
567 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
568 );
569 assert_eq!(
570 fake_json_server
571 .receive_notification::<lsp::notification::DidOpenTextDocument>()
572 .await
573 .text_document,
574 lsp::TextDocumentItem {
575 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
576 version: 0,
577 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
578 language_id: "json".to_string(),
579 },
580 );
581
582 // We clear the diagnostics, since the language has changed.
583 rust_buffer2.update(cx, |buffer, _| {
584 assert_eq!(
585 buffer
586 .snapshot()
587 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
588 .count(),
589 0
590 );
591 });
592
593 // The renamed file's version resets after changing language server.
594 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
595 assert_eq!(
596 fake_json_server
597 .receive_notification::<lsp::notification::DidChangeTextDocument>()
598 .await
599 .text_document,
600 lsp::VersionedTextDocumentIdentifier::new(
601 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
602 1
603 )
604 );
605
606 // Restart language servers
607 project.update(cx, |project, cx| {
608 project.restart_language_servers_for_buffers(
609 vec![rust_buffer.clone(), json_buffer.clone()],
610 cx,
611 );
612 });
613
614 let mut rust_shutdown_requests = fake_rust_server
615 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
616 let mut json_shutdown_requests = fake_json_server
617 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
618 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
619
620 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
621 let mut fake_json_server = fake_json_servers.next().await.unwrap();
622
623 // Ensure rust document is reopened in new rust language server
624 assert_eq!(
625 fake_rust_server
626 .receive_notification::<lsp::notification::DidOpenTextDocument>()
627 .await
628 .text_document,
629 lsp::TextDocumentItem {
630 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
631 version: 0,
632 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
633 language_id: "rust".to_string(),
634 }
635 );
636
637 // Ensure json documents are reopened in new json language server
638 assert_set_eq!(
639 [
640 fake_json_server
641 .receive_notification::<lsp::notification::DidOpenTextDocument>()
642 .await
643 .text_document,
644 fake_json_server
645 .receive_notification::<lsp::notification::DidOpenTextDocument>()
646 .await
647 .text_document,
648 ],
649 [
650 lsp::TextDocumentItem {
651 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
652 version: 0,
653 text: json_buffer.update(cx, |buffer, _| buffer.text()),
654 language_id: "json".to_string(),
655 },
656 lsp::TextDocumentItem {
657 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
658 version: 0,
659 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
660 language_id: "json".to_string(),
661 }
662 ]
663 );
664
665 // Close notifications are reported only to servers matching the buffer's language.
666 cx.update(|_| drop(json_buffer));
667 let close_message = lsp::DidCloseTextDocumentParams {
668 text_document: lsp::TextDocumentIdentifier::new(
669 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
670 ),
671 };
672 assert_eq!(
673 fake_json_server
674 .receive_notification::<lsp::notification::DidCloseTextDocument>()
675 .await,
676 close_message,
677 );
678}
679
680#[gpui::test]
681async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
682 init_test(cx);
683
684 let fs = FakeFs::new(cx.executor());
685 fs.insert_tree(
686 "/the-root",
687 json!({
688 ".gitignore": "target\n",
689 "src": {
690 "a.rs": "",
691 "b.rs": "",
692 },
693 "target": {
694 "x": {
695 "out": {
696 "x.rs": ""
697 }
698 },
699 "y": {
700 "out": {
701 "y.rs": "",
702 }
703 },
704 "z": {
705 "out": {
706 "z.rs": ""
707 }
708 }
709 }
710 }),
711 )
712 .await;
713
714 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
715 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
716 language_registry.add(rust_lang());
717 let mut fake_servers = language_registry.register_fake_lsp_adapter(
718 "Rust",
719 FakeLspAdapter {
720 name: "the-language-server",
721 ..Default::default()
722 },
723 );
724
725 cx.executor().run_until_parked();
726
727 // Start the language server by opening a buffer with a compatible file extension.
728 let _buffer = project
729 .update(cx, |project, cx| {
730 project.open_local_buffer("/the-root/src/a.rs", cx)
731 })
732 .await
733 .unwrap();
734
735 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
736 project.update(cx, |project, cx| {
737 let worktree = project.worktrees().next().unwrap();
738 assert_eq!(
739 worktree
740 .read(cx)
741 .snapshot()
742 .entries(true, 0)
743 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
744 .collect::<Vec<_>>(),
745 &[
746 (Path::new(""), false),
747 (Path::new(".gitignore"), false),
748 (Path::new("src"), false),
749 (Path::new("src/a.rs"), false),
750 (Path::new("src/b.rs"), false),
751 (Path::new("target"), true),
752 ]
753 );
754 });
755
756 let prev_read_dir_count = fs.read_dir_call_count();
757
758 // Keep track of the FS events reported to the language server.
759 let fake_server = fake_servers.next().await.unwrap();
760 let file_changes = Arc::new(Mutex::new(Vec::new()));
761 fake_server
762 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
763 registrations: vec![lsp::Registration {
764 id: Default::default(),
765 method: "workspace/didChangeWatchedFiles".to_string(),
766 register_options: serde_json::to_value(
767 lsp::DidChangeWatchedFilesRegistrationOptions {
768 watchers: vec![
769 lsp::FileSystemWatcher {
770 glob_pattern: lsp::GlobPattern::String(
771 "/the-root/Cargo.toml".to_string(),
772 ),
773 kind: None,
774 },
775 lsp::FileSystemWatcher {
776 glob_pattern: lsp::GlobPattern::String(
777 "/the-root/src/*.{rs,c}".to_string(),
778 ),
779 kind: None,
780 },
781 lsp::FileSystemWatcher {
782 glob_pattern: lsp::GlobPattern::String(
783 "/the-root/target/y/**/*.rs".to_string(),
784 ),
785 kind: None,
786 },
787 ],
788 },
789 )
790 .ok(),
791 }],
792 })
793 .await
794 .unwrap();
795 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
796 let file_changes = file_changes.clone();
797 move |params, _| {
798 let mut file_changes = file_changes.lock();
799 file_changes.extend(params.changes);
800 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
801 }
802 });
803
804 cx.executor().run_until_parked();
805 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
806 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
807
808 // Now the language server has asked us to watch an ignored directory path,
809 // so we recursively load it.
810 project.update(cx, |project, cx| {
811 let worktree = project.worktrees().next().unwrap();
812 assert_eq!(
813 worktree
814 .read(cx)
815 .snapshot()
816 .entries(true, 0)
817 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
818 .collect::<Vec<_>>(),
819 &[
820 (Path::new(""), false),
821 (Path::new(".gitignore"), false),
822 (Path::new("src"), false),
823 (Path::new("src/a.rs"), false),
824 (Path::new("src/b.rs"), false),
825 (Path::new("target"), true),
826 (Path::new("target/x"), true),
827 (Path::new("target/y"), true),
828 (Path::new("target/y/out"), true),
829 (Path::new("target/y/out/y.rs"), true),
830 (Path::new("target/z"), true),
831 ]
832 );
833 });
834
835 // Perform some file system mutations, two of which match the watched patterns,
836 // and one of which does not.
837 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
838 .await
839 .unwrap();
840 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
841 .await
842 .unwrap();
843 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
844 .await
845 .unwrap();
846 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
847 .await
848 .unwrap();
849 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
850 .await
851 .unwrap();
852
853 // The language server receives events for the FS mutations that match its watch patterns.
854 cx.executor().run_until_parked();
855 assert_eq!(
856 &*file_changes.lock(),
857 &[
858 lsp::FileEvent {
859 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
860 typ: lsp::FileChangeType::DELETED,
861 },
862 lsp::FileEvent {
863 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
864 typ: lsp::FileChangeType::CREATED,
865 },
866 lsp::FileEvent {
867 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
868 typ: lsp::FileChangeType::CREATED,
869 },
870 ]
871 );
872}
873
874#[gpui::test]
875async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
876 init_test(cx);
877
878 let fs = FakeFs::new(cx.executor());
879 fs.insert_tree(
880 "/dir",
881 json!({
882 "a.rs": "let a = 1;",
883 "b.rs": "let b = 2;"
884 }),
885 )
886 .await;
887
888 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
889
890 let buffer_a = project
891 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
892 .await
893 .unwrap();
894 let buffer_b = project
895 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
896 .await
897 .unwrap();
898
899 project.update(cx, |project, cx| {
900 project
901 .update_diagnostics(
902 LanguageServerId(0),
903 lsp::PublishDiagnosticsParams {
904 uri: Url::from_file_path("/dir/a.rs").unwrap(),
905 version: None,
906 diagnostics: vec![lsp::Diagnostic {
907 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
908 severity: Some(lsp::DiagnosticSeverity::ERROR),
909 message: "error 1".to_string(),
910 ..Default::default()
911 }],
912 },
913 &[],
914 cx,
915 )
916 .unwrap();
917 project
918 .update_diagnostics(
919 LanguageServerId(0),
920 lsp::PublishDiagnosticsParams {
921 uri: Url::from_file_path("/dir/b.rs").unwrap(),
922 version: None,
923 diagnostics: vec![lsp::Diagnostic {
924 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
925 severity: Some(lsp::DiagnosticSeverity::WARNING),
926 message: "error 2".to_string(),
927 ..Default::default()
928 }],
929 },
930 &[],
931 cx,
932 )
933 .unwrap();
934 });
935
936 buffer_a.update(cx, |buffer, _| {
937 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
938 assert_eq!(
939 chunks
940 .iter()
941 .map(|(s, d)| (s.as_str(), *d))
942 .collect::<Vec<_>>(),
943 &[
944 ("let ", None),
945 ("a", Some(DiagnosticSeverity::ERROR)),
946 (" = 1;", None),
947 ]
948 );
949 });
950 buffer_b.update(cx, |buffer, _| {
951 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
952 assert_eq!(
953 chunks
954 .iter()
955 .map(|(s, d)| (s.as_str(), *d))
956 .collect::<Vec<_>>(),
957 &[
958 ("let ", None),
959 ("b", Some(DiagnosticSeverity::WARNING)),
960 (" = 2;", None),
961 ]
962 );
963 });
964}
965
966#[gpui::test]
967async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
968 init_test(cx);
969
970 let fs = FakeFs::new(cx.executor());
971 fs.insert_tree(
972 "/root",
973 json!({
974 "dir": {
975 ".git": {
976 "HEAD": "ref: refs/heads/main",
977 },
978 ".gitignore": "b.rs",
979 "a.rs": "let a = 1;",
980 "b.rs": "let b = 2;",
981 },
982 "other.rs": "let b = c;"
983 }),
984 )
985 .await;
986
987 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
988 let (worktree, _) = project
989 .update(cx, |project, cx| {
990 project.find_or_create_local_worktree("/root/dir", true, cx)
991 })
992 .await
993 .unwrap();
994 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
995
996 let (worktree, _) = project
997 .update(cx, |project, cx| {
998 project.find_or_create_local_worktree("/root/other.rs", false, cx)
999 })
1000 .await
1001 .unwrap();
1002 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1003
1004 let server_id = LanguageServerId(0);
1005 project.update(cx, |project, cx| {
1006 project
1007 .update_diagnostics(
1008 server_id,
1009 lsp::PublishDiagnosticsParams {
1010 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1011 version: None,
1012 diagnostics: vec![lsp::Diagnostic {
1013 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1014 severity: Some(lsp::DiagnosticSeverity::ERROR),
1015 message: "unused variable 'b'".to_string(),
1016 ..Default::default()
1017 }],
1018 },
1019 &[],
1020 cx,
1021 )
1022 .unwrap();
1023 project
1024 .update_diagnostics(
1025 server_id,
1026 lsp::PublishDiagnosticsParams {
1027 uri: Url::from_file_path("/root/other.rs").unwrap(),
1028 version: None,
1029 diagnostics: vec![lsp::Diagnostic {
1030 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1031 severity: Some(lsp::DiagnosticSeverity::ERROR),
1032 message: "unknown variable 'c'".to_string(),
1033 ..Default::default()
1034 }],
1035 },
1036 &[],
1037 cx,
1038 )
1039 .unwrap();
1040 });
1041
1042 let main_ignored_buffer = project
1043 .update(cx, |project, cx| {
1044 project.open_buffer((main_worktree_id, "b.rs"), cx)
1045 })
1046 .await
1047 .unwrap();
1048 main_ignored_buffer.update(cx, |buffer, _| {
1049 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1050 assert_eq!(
1051 chunks
1052 .iter()
1053 .map(|(s, d)| (s.as_str(), *d))
1054 .collect::<Vec<_>>(),
1055 &[
1056 ("let ", None),
1057 ("b", Some(DiagnosticSeverity::ERROR)),
1058 (" = 2;", None),
1059 ],
1060 "Gigitnored buffers should still get in-buffer diagnostics",
1061 );
1062 });
1063 let other_buffer = project
1064 .update(cx, |project, cx| {
1065 project.open_buffer((other_worktree_id, ""), cx)
1066 })
1067 .await
1068 .unwrap();
1069 other_buffer.update(cx, |buffer, _| {
1070 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1071 assert_eq!(
1072 chunks
1073 .iter()
1074 .map(|(s, d)| (s.as_str(), *d))
1075 .collect::<Vec<_>>(),
1076 &[
1077 ("let b = ", None),
1078 ("c", Some(DiagnosticSeverity::ERROR)),
1079 (";", None),
1080 ],
1081 "Buffers from hidden projects should still get in-buffer diagnostics"
1082 );
1083 });
1084
1085 project.update(cx, |project, cx| {
1086 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1087 assert_eq!(
1088 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1089 vec![(
1090 ProjectPath {
1091 worktree_id: main_worktree_id,
1092 path: Arc::from(Path::new("b.rs")),
1093 },
1094 server_id,
1095 DiagnosticSummary {
1096 error_count: 1,
1097 warning_count: 0,
1098 }
1099 )]
1100 );
1101 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1102 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1103 });
1104}
1105
1106#[gpui::test]
1107async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1108 init_test(cx);
1109
1110 let progress_token = "the-progress-token";
1111
1112 let fs = FakeFs::new(cx.executor());
1113 fs.insert_tree(
1114 "/dir",
1115 json!({
1116 "a.rs": "fn a() { A }",
1117 "b.rs": "const y: i32 = 1",
1118 }),
1119 )
1120 .await;
1121
1122 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1123 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1124
1125 language_registry.add(rust_lang());
1126 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1127 "Rust",
1128 FakeLspAdapter {
1129 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1130 disk_based_diagnostics_sources: vec!["disk".into()],
1131 ..Default::default()
1132 },
1133 );
1134
1135 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1136
1137 // Cause worktree to start the fake language server
1138 let _buffer = project
1139 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1140 .await
1141 .unwrap();
1142
1143 let mut events = cx.events(&project);
1144
1145 let fake_server = fake_servers.next().await.unwrap();
1146 assert_eq!(
1147 events.next().await.unwrap(),
1148 Event::LanguageServerAdded(LanguageServerId(0)),
1149 );
1150
1151 fake_server
1152 .start_progress(format!("{}/0", progress_token))
1153 .await;
1154 assert_eq!(
1155 events.next().await.unwrap(),
1156 Event::DiskBasedDiagnosticsStarted {
1157 language_server_id: LanguageServerId(0),
1158 }
1159 );
1160
1161 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1162 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1163 version: None,
1164 diagnostics: vec![lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1166 severity: Some(lsp::DiagnosticSeverity::ERROR),
1167 message: "undefined variable 'A'".to_string(),
1168 ..Default::default()
1169 }],
1170 });
1171 assert_eq!(
1172 events.next().await.unwrap(),
1173 Event::DiagnosticsUpdated {
1174 language_server_id: LanguageServerId(0),
1175 path: (worktree_id, Path::new("a.rs")).into()
1176 }
1177 );
1178
1179 fake_server.end_progress(format!("{}/0", progress_token));
1180 assert_eq!(
1181 events.next().await.unwrap(),
1182 Event::DiskBasedDiagnosticsFinished {
1183 language_server_id: LanguageServerId(0)
1184 }
1185 );
1186
1187 let buffer = project
1188 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1189 .await
1190 .unwrap();
1191
1192 buffer.update(cx, |buffer, _| {
1193 let snapshot = buffer.snapshot();
1194 let diagnostics = snapshot
1195 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1196 .collect::<Vec<_>>();
1197 assert_eq!(
1198 diagnostics,
1199 &[DiagnosticEntry {
1200 range: Point::new(0, 9)..Point::new(0, 10),
1201 diagnostic: Diagnostic {
1202 severity: lsp::DiagnosticSeverity::ERROR,
1203 message: "undefined variable 'A'".to_string(),
1204 group_id: 0,
1205 is_primary: true,
1206 ..Default::default()
1207 }
1208 }]
1209 )
1210 });
1211
1212 // Ensure publishing empty diagnostics twice only results in one update event.
1213 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1214 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1215 version: None,
1216 diagnostics: Default::default(),
1217 });
1218 assert_eq!(
1219 events.next().await.unwrap(),
1220 Event::DiagnosticsUpdated {
1221 language_server_id: LanguageServerId(0),
1222 path: (worktree_id, Path::new("a.rs")).into()
1223 }
1224 );
1225
1226 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1227 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1228 version: None,
1229 diagnostics: Default::default(),
1230 });
1231 cx.executor().run_until_parked();
1232 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1233}
1234
1235#[gpui::test]
1236async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1237 init_test(cx);
1238
1239 let progress_token = "the-progress-token";
1240
1241 let fs = FakeFs::new(cx.executor());
1242 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1243
1244 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1245
1246 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1247 language_registry.add(rust_lang());
1248 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1249 "Rust",
1250 FakeLspAdapter {
1251 name: "the-language-server",
1252 disk_based_diagnostics_sources: vec!["disk".into()],
1253 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1254 ..Default::default()
1255 },
1256 );
1257
1258 let buffer = project
1259 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1260 .await
1261 .unwrap();
1262
1263 // Simulate diagnostics starting to update.
1264 let fake_server = fake_servers.next().await.unwrap();
1265 fake_server.start_progress(progress_token).await;
1266
1267 // Restart the server before the diagnostics finish updating.
1268 project.update(cx, |project, cx| {
1269 project.restart_language_servers_for_buffers([buffer], cx);
1270 });
1271 let mut events = cx.events(&project);
1272
1273 // Simulate the newly started server sending more diagnostics.
1274 let fake_server = fake_servers.next().await.unwrap();
1275 assert_eq!(
1276 events.next().await.unwrap(),
1277 Event::LanguageServerAdded(LanguageServerId(1))
1278 );
1279 fake_server.start_progress(progress_token).await;
1280 assert_eq!(
1281 events.next().await.unwrap(),
1282 Event::DiskBasedDiagnosticsStarted {
1283 language_server_id: LanguageServerId(1)
1284 }
1285 );
1286 project.update(cx, |project, _| {
1287 assert_eq!(
1288 project
1289 .language_servers_running_disk_based_diagnostics()
1290 .collect::<Vec<_>>(),
1291 [LanguageServerId(1)]
1292 );
1293 });
1294
1295 // All diagnostics are considered done, despite the old server's diagnostic
1296 // task never completing.
1297 fake_server.end_progress(progress_token);
1298 assert_eq!(
1299 events.next().await.unwrap(),
1300 Event::DiskBasedDiagnosticsFinished {
1301 language_server_id: LanguageServerId(1)
1302 }
1303 );
1304 project.update(cx, |project, _| {
1305 assert_eq!(
1306 project
1307 .language_servers_running_disk_based_diagnostics()
1308 .collect::<Vec<_>>(),
1309 [] as [language::LanguageServerId; 0]
1310 );
1311 });
1312}
1313
1314#[gpui::test]
1315async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1316 init_test(cx);
1317
1318 let fs = FakeFs::new(cx.executor());
1319 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1320
1321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1322
1323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1324 language_registry.add(rust_lang());
1325 let mut fake_servers =
1326 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1327
1328 let buffer = project
1329 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1330 .await
1331 .unwrap();
1332
1333 // Publish diagnostics
1334 let fake_server = fake_servers.next().await.unwrap();
1335 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1336 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1337 version: None,
1338 diagnostics: vec![lsp::Diagnostic {
1339 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1340 severity: Some(lsp::DiagnosticSeverity::ERROR),
1341 message: "the message".to_string(),
1342 ..Default::default()
1343 }],
1344 });
1345
1346 cx.executor().run_until_parked();
1347 buffer.update(cx, |buffer, _| {
1348 assert_eq!(
1349 buffer
1350 .snapshot()
1351 .diagnostics_in_range::<_, usize>(0..1, false)
1352 .map(|entry| entry.diagnostic.message.clone())
1353 .collect::<Vec<_>>(),
1354 ["the message".to_string()]
1355 );
1356 });
1357 project.update(cx, |project, cx| {
1358 assert_eq!(
1359 project.diagnostic_summary(false, cx),
1360 DiagnosticSummary {
1361 error_count: 1,
1362 warning_count: 0,
1363 }
1364 );
1365 });
1366
1367 project.update(cx, |project, cx| {
1368 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1369 });
1370
1371 // The diagnostics are cleared.
1372 cx.executor().run_until_parked();
1373 buffer.update(cx, |buffer, _| {
1374 assert_eq!(
1375 buffer
1376 .snapshot()
1377 .diagnostics_in_range::<_, usize>(0..1, false)
1378 .map(|entry| entry.diagnostic.message.clone())
1379 .collect::<Vec<_>>(),
1380 Vec::<String>::new(),
1381 );
1382 });
1383 project.update(cx, |project, cx| {
1384 assert_eq!(
1385 project.diagnostic_summary(false, cx),
1386 DiagnosticSummary {
1387 error_count: 0,
1388 warning_count: 0,
1389 }
1390 );
1391 });
1392}
1393
1394#[gpui::test]
1395async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1396 init_test(cx);
1397
1398 let fs = FakeFs::new(cx.executor());
1399 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1400
1401 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1402 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1403
1404 language_registry.add(rust_lang());
1405 let mut fake_servers =
1406 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1407
1408 let buffer = project
1409 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1410 .await
1411 .unwrap();
1412
1413 // Before restarting the server, report diagnostics with an unknown buffer version.
1414 let fake_server = fake_servers.next().await.unwrap();
1415 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1416 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1417 version: Some(10000),
1418 diagnostics: Vec::new(),
1419 });
1420 cx.executor().run_until_parked();
1421
1422 project.update(cx, |project, cx| {
1423 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1424 });
1425 let mut fake_server = fake_servers.next().await.unwrap();
1426 let notification = fake_server
1427 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1428 .await
1429 .text_document;
1430 assert_eq!(notification.version, 0);
1431}
1432
1433#[gpui::test]
1434async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1435 init_test(cx);
1436
1437 let progress_token = "the-progress-token";
1438
1439 let fs = FakeFs::new(cx.executor());
1440 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1441
1442 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1443
1444 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1445 language_registry.add(rust_lang());
1446 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1447 "Rust",
1448 FakeLspAdapter {
1449 name: "the-language-server",
1450 disk_based_diagnostics_sources: vec!["disk".into()],
1451 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1452 ..Default::default()
1453 },
1454 );
1455
1456 let buffer = project
1457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1458 .await
1459 .unwrap();
1460
1461 // Simulate diagnostics starting to update.
1462 let mut fake_server = fake_servers.next().await.unwrap();
1463 fake_server
1464 .start_progress_with(
1465 "another-token",
1466 lsp::WorkDoneProgressBegin {
1467 cancellable: Some(false),
1468 ..Default::default()
1469 },
1470 )
1471 .await;
1472 fake_server
1473 .start_progress_with(
1474 progress_token,
1475 lsp::WorkDoneProgressBegin {
1476 cancellable: Some(true),
1477 ..Default::default()
1478 },
1479 )
1480 .await;
1481 cx.executor().run_until_parked();
1482
1483 project.update(cx, |project, cx| {
1484 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1485 });
1486
1487 let cancel_notification = fake_server
1488 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1489 .await;
1490 assert_eq!(
1491 cancel_notification.token,
1492 NumberOrString::String(progress_token.into())
1493 );
1494}
1495
1496#[gpui::test]
1497async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1498 init_test(cx);
1499
1500 let fs = FakeFs::new(cx.executor());
1501 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1502 .await;
1503
1504 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1506
1507 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1508 "Rust",
1509 FakeLspAdapter {
1510 name: "rust-lsp",
1511 ..Default::default()
1512 },
1513 );
1514 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1515 "JavaScript",
1516 FakeLspAdapter {
1517 name: "js-lsp",
1518 ..Default::default()
1519 },
1520 );
1521 language_registry.add(rust_lang());
1522 language_registry.add(js_lang());
1523
1524 let _rs_buffer = project
1525 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1526 .await
1527 .unwrap();
1528 let _js_buffer = project
1529 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1530 .await
1531 .unwrap();
1532
1533 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1534 assert_eq!(
1535 fake_rust_server_1
1536 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1537 .await
1538 .text_document
1539 .uri
1540 .as_str(),
1541 "file:///dir/a.rs"
1542 );
1543
1544 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1545 assert_eq!(
1546 fake_js_server
1547 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1548 .await
1549 .text_document
1550 .uri
1551 .as_str(),
1552 "file:///dir/b.js"
1553 );
1554
1555 // Disable Rust language server, ensuring only that server gets stopped.
1556 cx.update(|cx| {
1557 SettingsStore::update_global(cx, |settings, cx| {
1558 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1559 settings.languages.insert(
1560 Arc::from("Rust"),
1561 LanguageSettingsContent {
1562 enable_language_server: Some(false),
1563 ..Default::default()
1564 },
1565 );
1566 });
1567 })
1568 });
1569 fake_rust_server_1
1570 .receive_notification::<lsp::notification::Exit>()
1571 .await;
1572
1573 // Enable Rust and disable JavaScript language servers, ensuring that the
1574 // former gets started again and that the latter stops.
1575 cx.update(|cx| {
1576 SettingsStore::update_global(cx, |settings, cx| {
1577 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1578 settings.languages.insert(
1579 Arc::from("Rust"),
1580 LanguageSettingsContent {
1581 enable_language_server: Some(true),
1582 ..Default::default()
1583 },
1584 );
1585 settings.languages.insert(
1586 Arc::from("JavaScript"),
1587 LanguageSettingsContent {
1588 enable_language_server: Some(false),
1589 ..Default::default()
1590 },
1591 );
1592 });
1593 })
1594 });
1595 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1596 assert_eq!(
1597 fake_rust_server_2
1598 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1599 .await
1600 .text_document
1601 .uri
1602 .as_str(),
1603 "file:///dir/a.rs"
1604 );
1605 fake_js_server
1606 .receive_notification::<lsp::notification::Exit>()
1607 .await;
1608}
1609
1610#[gpui::test(iterations = 3)]
1611async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1612 init_test(cx);
1613
1614 let text = "
1615 fn a() { A }
1616 fn b() { BB }
1617 fn c() { CCC }
1618 "
1619 .unindent();
1620
1621 let fs = FakeFs::new(cx.executor());
1622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1623
1624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1626
1627 language_registry.add(rust_lang());
1628 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1629 "Rust",
1630 FakeLspAdapter {
1631 disk_based_diagnostics_sources: vec!["disk".into()],
1632 ..Default::default()
1633 },
1634 );
1635
1636 let buffer = project
1637 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1638 .await
1639 .unwrap();
1640
1641 let mut fake_server = fake_servers.next().await.unwrap();
1642 let open_notification = fake_server
1643 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1644 .await;
1645
1646 // Edit the buffer, moving the content down
1647 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1648 let change_notification_1 = fake_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await;
1651 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1652
1653 // Report some diagnostics for the initial version of the buffer
1654 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1655 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1656 version: Some(open_notification.text_document.version),
1657 diagnostics: vec![
1658 lsp::Diagnostic {
1659 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1660 severity: Some(DiagnosticSeverity::ERROR),
1661 message: "undefined variable 'A'".to_string(),
1662 source: Some("disk".to_string()),
1663 ..Default::default()
1664 },
1665 lsp::Diagnostic {
1666 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1667 severity: Some(DiagnosticSeverity::ERROR),
1668 message: "undefined variable 'BB'".to_string(),
1669 source: Some("disk".to_string()),
1670 ..Default::default()
1671 },
1672 lsp::Diagnostic {
1673 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1674 severity: Some(DiagnosticSeverity::ERROR),
1675 source: Some("disk".to_string()),
1676 message: "undefined variable 'CCC'".to_string(),
1677 ..Default::default()
1678 },
1679 ],
1680 });
1681
1682 // The diagnostics have moved down since they were created.
1683 cx.executor().run_until_parked();
1684 buffer.update(cx, |buffer, _| {
1685 assert_eq!(
1686 buffer
1687 .snapshot()
1688 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1689 .collect::<Vec<_>>(),
1690 &[
1691 DiagnosticEntry {
1692 range: Point::new(3, 9)..Point::new(3, 11),
1693 diagnostic: Diagnostic {
1694 source: Some("disk".into()),
1695 severity: DiagnosticSeverity::ERROR,
1696 message: "undefined variable 'BB'".to_string(),
1697 is_disk_based: true,
1698 group_id: 1,
1699 is_primary: true,
1700 ..Default::default()
1701 },
1702 },
1703 DiagnosticEntry {
1704 range: Point::new(4, 9)..Point::new(4, 12),
1705 diagnostic: Diagnostic {
1706 source: Some("disk".into()),
1707 severity: DiagnosticSeverity::ERROR,
1708 message: "undefined variable 'CCC'".to_string(),
1709 is_disk_based: true,
1710 group_id: 2,
1711 is_primary: true,
1712 ..Default::default()
1713 }
1714 }
1715 ]
1716 );
1717 assert_eq!(
1718 chunks_with_diagnostics(buffer, 0..buffer.len()),
1719 [
1720 ("\n\nfn a() { ".to_string(), None),
1721 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1722 (" }\nfn b() { ".to_string(), None),
1723 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1724 (" }\nfn c() { ".to_string(), None),
1725 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1726 (" }\n".to_string(), None),
1727 ]
1728 );
1729 assert_eq!(
1730 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1731 [
1732 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1733 (" }\nfn c() { ".to_string(), None),
1734 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1735 ]
1736 );
1737 });
1738
1739 // Ensure overlapping diagnostics are highlighted correctly.
1740 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1741 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1742 version: Some(open_notification.text_document.version),
1743 diagnostics: vec![
1744 lsp::Diagnostic {
1745 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1746 severity: Some(DiagnosticSeverity::ERROR),
1747 message: "undefined variable 'A'".to_string(),
1748 source: Some("disk".to_string()),
1749 ..Default::default()
1750 },
1751 lsp::Diagnostic {
1752 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1753 severity: Some(DiagnosticSeverity::WARNING),
1754 message: "unreachable statement".to_string(),
1755 source: Some("disk".to_string()),
1756 ..Default::default()
1757 },
1758 ],
1759 });
1760
1761 cx.executor().run_until_parked();
1762 buffer.update(cx, |buffer, _| {
1763 assert_eq!(
1764 buffer
1765 .snapshot()
1766 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1767 .collect::<Vec<_>>(),
1768 &[
1769 DiagnosticEntry {
1770 range: Point::new(2, 9)..Point::new(2, 12),
1771 diagnostic: Diagnostic {
1772 source: Some("disk".into()),
1773 severity: DiagnosticSeverity::WARNING,
1774 message: "unreachable statement".to_string(),
1775 is_disk_based: true,
1776 group_id: 4,
1777 is_primary: true,
1778 ..Default::default()
1779 }
1780 },
1781 DiagnosticEntry {
1782 range: Point::new(2, 9)..Point::new(2, 10),
1783 diagnostic: Diagnostic {
1784 source: Some("disk".into()),
1785 severity: DiagnosticSeverity::ERROR,
1786 message: "undefined variable 'A'".to_string(),
1787 is_disk_based: true,
1788 group_id: 3,
1789 is_primary: true,
1790 ..Default::default()
1791 },
1792 }
1793 ]
1794 );
1795 assert_eq!(
1796 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1797 [
1798 ("fn a() { ".to_string(), None),
1799 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1800 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1801 ("\n".to_string(), None),
1802 ]
1803 );
1804 assert_eq!(
1805 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1806 [
1807 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1808 ("\n".to_string(), None),
1809 ]
1810 );
1811 });
1812
1813 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1814 // changes since the last save.
1815 buffer.update(cx, |buffer, cx| {
1816 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1817 buffer.edit(
1818 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1819 None,
1820 cx,
1821 );
1822 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1823 });
1824 let change_notification_2 = fake_server
1825 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1826 .await;
1827 assert!(
1828 change_notification_2.text_document.version > change_notification_1.text_document.version
1829 );
1830
1831 // Handle out-of-order diagnostics
1832 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1833 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1834 version: Some(change_notification_2.text_document.version),
1835 diagnostics: vec![
1836 lsp::Diagnostic {
1837 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1838 severity: Some(DiagnosticSeverity::ERROR),
1839 message: "undefined variable 'BB'".to_string(),
1840 source: Some("disk".to_string()),
1841 ..Default::default()
1842 },
1843 lsp::Diagnostic {
1844 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1845 severity: Some(DiagnosticSeverity::WARNING),
1846 message: "undefined variable 'A'".to_string(),
1847 source: Some("disk".to_string()),
1848 ..Default::default()
1849 },
1850 ],
1851 });
1852
1853 cx.executor().run_until_parked();
1854 buffer.update(cx, |buffer, _| {
1855 assert_eq!(
1856 buffer
1857 .snapshot()
1858 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1859 .collect::<Vec<_>>(),
1860 &[
1861 DiagnosticEntry {
1862 range: Point::new(2, 21)..Point::new(2, 22),
1863 diagnostic: Diagnostic {
1864 source: Some("disk".into()),
1865 severity: DiagnosticSeverity::WARNING,
1866 message: "undefined variable 'A'".to_string(),
1867 is_disk_based: true,
1868 group_id: 6,
1869 is_primary: true,
1870 ..Default::default()
1871 }
1872 },
1873 DiagnosticEntry {
1874 range: Point::new(3, 9)..Point::new(3, 14),
1875 diagnostic: Diagnostic {
1876 source: Some("disk".into()),
1877 severity: DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'BB'".to_string(),
1879 is_disk_based: true,
1880 group_id: 5,
1881 is_primary: true,
1882 ..Default::default()
1883 },
1884 }
1885 ]
1886 );
1887 });
1888}
1889
1890#[gpui::test]
1891async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1892 init_test(cx);
1893
1894 let text = concat!(
1895 "let one = ;\n", //
1896 "let two = \n",
1897 "let three = 3;\n",
1898 );
1899
1900 let fs = FakeFs::new(cx.executor());
1901 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1902
1903 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1904 let buffer = project
1905 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1906 .await
1907 .unwrap();
1908
1909 project.update(cx, |project, cx| {
1910 project
1911 .update_buffer_diagnostics(
1912 &buffer,
1913 LanguageServerId(0),
1914 None,
1915 vec![
1916 DiagnosticEntry {
1917 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1918 diagnostic: Diagnostic {
1919 severity: DiagnosticSeverity::ERROR,
1920 message: "syntax error 1".to_string(),
1921 ..Default::default()
1922 },
1923 },
1924 DiagnosticEntry {
1925 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1926 diagnostic: Diagnostic {
1927 severity: DiagnosticSeverity::ERROR,
1928 message: "syntax error 2".to_string(),
1929 ..Default::default()
1930 },
1931 },
1932 ],
1933 cx,
1934 )
1935 .unwrap();
1936 });
1937
1938 // An empty range is extended forward to include the following character.
1939 // At the end of a line, an empty range is extended backward to include
1940 // the preceding character.
1941 buffer.update(cx, |buffer, _| {
1942 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1943 assert_eq!(
1944 chunks
1945 .iter()
1946 .map(|(s, d)| (s.as_str(), *d))
1947 .collect::<Vec<_>>(),
1948 &[
1949 ("let one = ", None),
1950 (";", Some(DiagnosticSeverity::ERROR)),
1951 ("\nlet two =", None),
1952 (" ", Some(DiagnosticSeverity::ERROR)),
1953 ("\nlet three = 3;\n", None)
1954 ]
1955 );
1956 });
1957}
1958
1959#[gpui::test]
1960async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1961 init_test(cx);
1962
1963 let fs = FakeFs::new(cx.executor());
1964 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1965 .await;
1966
1967 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1968
1969 project.update(cx, |project, cx| {
1970 project
1971 .update_diagnostic_entries(
1972 LanguageServerId(0),
1973 Path::new("/dir/a.rs").to_owned(),
1974 None,
1975 vec![DiagnosticEntry {
1976 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1977 diagnostic: Diagnostic {
1978 severity: DiagnosticSeverity::ERROR,
1979 is_primary: true,
1980 message: "syntax error a1".to_string(),
1981 ..Default::default()
1982 },
1983 }],
1984 cx,
1985 )
1986 .unwrap();
1987 project
1988 .update_diagnostic_entries(
1989 LanguageServerId(1),
1990 Path::new("/dir/a.rs").to_owned(),
1991 None,
1992 vec![DiagnosticEntry {
1993 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1994 diagnostic: Diagnostic {
1995 severity: DiagnosticSeverity::ERROR,
1996 is_primary: true,
1997 message: "syntax error b1".to_string(),
1998 ..Default::default()
1999 },
2000 }],
2001 cx,
2002 )
2003 .unwrap();
2004
2005 assert_eq!(
2006 project.diagnostic_summary(false, cx),
2007 DiagnosticSummary {
2008 error_count: 2,
2009 warning_count: 0,
2010 }
2011 );
2012 });
2013}
2014
2015#[gpui::test]
2016async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2017 init_test(cx);
2018
2019 let text = "
2020 fn a() {
2021 f1();
2022 }
2023 fn b() {
2024 f2();
2025 }
2026 fn c() {
2027 f3();
2028 }
2029 "
2030 .unindent();
2031
2032 let fs = FakeFs::new(cx.executor());
2033 fs.insert_tree(
2034 "/dir",
2035 json!({
2036 "a.rs": text.clone(),
2037 }),
2038 )
2039 .await;
2040
2041 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2042
2043 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2044 language_registry.add(rust_lang());
2045 let mut fake_servers =
2046 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2047
2048 let buffer = project
2049 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2050 .await
2051 .unwrap();
2052
2053 let mut fake_server = fake_servers.next().await.unwrap();
2054 let lsp_document_version = fake_server
2055 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2056 .await
2057 .text_document
2058 .version;
2059
2060 // Simulate editing the buffer after the language server computes some edits.
2061 buffer.update(cx, |buffer, cx| {
2062 buffer.edit(
2063 [(
2064 Point::new(0, 0)..Point::new(0, 0),
2065 "// above first function\n",
2066 )],
2067 None,
2068 cx,
2069 );
2070 buffer.edit(
2071 [(
2072 Point::new(2, 0)..Point::new(2, 0),
2073 " // inside first function\n",
2074 )],
2075 None,
2076 cx,
2077 );
2078 buffer.edit(
2079 [(
2080 Point::new(6, 4)..Point::new(6, 4),
2081 "// inside second function ",
2082 )],
2083 None,
2084 cx,
2085 );
2086
2087 assert_eq!(
2088 buffer.text(),
2089 "
2090 // above first function
2091 fn a() {
2092 // inside first function
2093 f1();
2094 }
2095 fn b() {
2096 // inside second function f2();
2097 }
2098 fn c() {
2099 f3();
2100 }
2101 "
2102 .unindent()
2103 );
2104 });
2105
2106 let edits = project
2107 .update(cx, |project, cx| {
2108 project.edits_from_lsp(
2109 &buffer,
2110 vec![
2111 // replace body of first function
2112 lsp::TextEdit {
2113 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2114 new_text: "
2115 fn a() {
2116 f10();
2117 }
2118 "
2119 .unindent(),
2120 },
2121 // edit inside second function
2122 lsp::TextEdit {
2123 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2124 new_text: "00".into(),
2125 },
2126 // edit inside third function via two distinct edits
2127 lsp::TextEdit {
2128 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2129 new_text: "4000".into(),
2130 },
2131 lsp::TextEdit {
2132 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2133 new_text: "".into(),
2134 },
2135 ],
2136 LanguageServerId(0),
2137 Some(lsp_document_version),
2138 cx,
2139 )
2140 })
2141 .await
2142 .unwrap();
2143
2144 buffer.update(cx, |buffer, cx| {
2145 for (range, new_text) in edits {
2146 buffer.edit([(range, new_text)], None, cx);
2147 }
2148 assert_eq!(
2149 buffer.text(),
2150 "
2151 // above first function
2152 fn a() {
2153 // inside first function
2154 f10();
2155 }
2156 fn b() {
2157 // inside second function f200();
2158 }
2159 fn c() {
2160 f4000();
2161 }
2162 "
2163 .unindent()
2164 );
2165 });
2166}
2167
2168#[gpui::test]
2169async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2170 init_test(cx);
2171
2172 let text = "
2173 use a::b;
2174 use a::c;
2175
2176 fn f() {
2177 b();
2178 c();
2179 }
2180 "
2181 .unindent();
2182
2183 let fs = FakeFs::new(cx.executor());
2184 fs.insert_tree(
2185 "/dir",
2186 json!({
2187 "a.rs": text.clone(),
2188 }),
2189 )
2190 .await;
2191
2192 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2193 let buffer = project
2194 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2195 .await
2196 .unwrap();
2197
2198 // Simulate the language server sending us a small edit in the form of a very large diff.
2199 // Rust-analyzer does this when performing a merge-imports code action.
2200 let edits = project
2201 .update(cx, |project, cx| {
2202 project.edits_from_lsp(
2203 &buffer,
2204 [
2205 // Replace the first use statement without editing the semicolon.
2206 lsp::TextEdit {
2207 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2208 new_text: "a::{b, c}".into(),
2209 },
2210 // Reinsert the remainder of the file between the semicolon and the final
2211 // newline of the file.
2212 lsp::TextEdit {
2213 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2214 new_text: "\n\n".into(),
2215 },
2216 lsp::TextEdit {
2217 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2218 new_text: "
2219 fn f() {
2220 b();
2221 c();
2222 }"
2223 .unindent(),
2224 },
2225 // Delete everything after the first newline of the file.
2226 lsp::TextEdit {
2227 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2228 new_text: "".into(),
2229 },
2230 ],
2231 LanguageServerId(0),
2232 None,
2233 cx,
2234 )
2235 })
2236 .await
2237 .unwrap();
2238
2239 buffer.update(cx, |buffer, cx| {
2240 let edits = edits
2241 .into_iter()
2242 .map(|(range, text)| {
2243 (
2244 range.start.to_point(buffer)..range.end.to_point(buffer),
2245 text,
2246 )
2247 })
2248 .collect::<Vec<_>>();
2249
2250 assert_eq!(
2251 edits,
2252 [
2253 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2254 (Point::new(1, 0)..Point::new(2, 0), "".into())
2255 ]
2256 );
2257
2258 for (range, new_text) in edits {
2259 buffer.edit([(range, new_text)], None, cx);
2260 }
2261 assert_eq!(
2262 buffer.text(),
2263 "
2264 use a::{b, c};
2265
2266 fn f() {
2267 b();
2268 c();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = "
2281 use a::b;
2282 use a::c;
2283
2284 fn f() {
2285 b();
2286 c();
2287 }
2288 "
2289 .unindent();
2290
2291 let fs = FakeFs::new(cx.executor());
2292 fs.insert_tree(
2293 "/dir",
2294 json!({
2295 "a.rs": text.clone(),
2296 }),
2297 )
2298 .await;
2299
2300 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2301 let buffer = project
2302 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2303 .await
2304 .unwrap();
2305
2306 // Simulate the language server sending us edits in a non-ordered fashion,
2307 // with ranges sometimes being inverted or pointing to invalid locations.
2308 let edits = project
2309 .update(cx, |project, cx| {
2310 project.edits_from_lsp(
2311 &buffer,
2312 [
2313 lsp::TextEdit {
2314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2315 new_text: "\n\n".into(),
2316 },
2317 lsp::TextEdit {
2318 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2319 new_text: "a::{b, c}".into(),
2320 },
2321 lsp::TextEdit {
2322 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2323 new_text: "".into(),
2324 },
2325 lsp::TextEdit {
2326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2327 new_text: "
2328 fn f() {
2329 b();
2330 c();
2331 }"
2332 .unindent(),
2333 },
2334 ],
2335 LanguageServerId(0),
2336 None,
2337 cx,
2338 )
2339 })
2340 .await
2341 .unwrap();
2342
2343 buffer.update(cx, |buffer, cx| {
2344 let edits = edits
2345 .into_iter()
2346 .map(|(range, text)| {
2347 (
2348 range.start.to_point(buffer)..range.end.to_point(buffer),
2349 text,
2350 )
2351 })
2352 .collect::<Vec<_>>();
2353
2354 assert_eq!(
2355 edits,
2356 [
2357 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2358 (Point::new(1, 0)..Point::new(2, 0), "".into())
2359 ]
2360 );
2361
2362 for (range, new_text) in edits {
2363 buffer.edit([(range, new_text)], None, cx);
2364 }
2365 assert_eq!(
2366 buffer.text(),
2367 "
2368 use a::{b, c};
2369
2370 fn f() {
2371 b();
2372 c();
2373 }
2374 "
2375 .unindent()
2376 );
2377 });
2378}
2379
2380fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2381 buffer: &Buffer,
2382 range: Range<T>,
2383) -> Vec<(String, Option<DiagnosticSeverity>)> {
2384 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2385 for chunk in buffer.snapshot().chunks(range, true) {
2386 if chunks.last().map_or(false, |prev_chunk| {
2387 prev_chunk.1 == chunk.diagnostic_severity
2388 }) {
2389 chunks.last_mut().unwrap().0.push_str(chunk.text);
2390 } else {
2391 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2392 }
2393 }
2394 chunks
2395}
2396
2397#[gpui::test(iterations = 10)]
2398async fn test_definition(cx: &mut gpui::TestAppContext) {
2399 init_test(cx);
2400
2401 let fs = FakeFs::new(cx.executor());
2402 fs.insert_tree(
2403 "/dir",
2404 json!({
2405 "a.rs": "const fn a() { A }",
2406 "b.rs": "const y: i32 = crate::a()",
2407 }),
2408 )
2409 .await;
2410
2411 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2412
2413 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2414 language_registry.add(rust_lang());
2415 let mut fake_servers =
2416 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2417
2418 let buffer = project
2419 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2420 .await
2421 .unwrap();
2422
2423 let fake_server = fake_servers.next().await.unwrap();
2424 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2425 let params = params.text_document_position_params;
2426 assert_eq!(
2427 params.text_document.uri.to_file_path().unwrap(),
2428 Path::new("/dir/b.rs"),
2429 );
2430 assert_eq!(params.position, lsp::Position::new(0, 22));
2431
2432 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2433 lsp::Location::new(
2434 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2435 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2436 ),
2437 )))
2438 });
2439
2440 let mut definitions = project
2441 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2442 .await
2443 .unwrap();
2444
2445 // Assert no new language server started
2446 cx.executor().run_until_parked();
2447 assert!(fake_servers.try_next().is_err());
2448
2449 assert_eq!(definitions.len(), 1);
2450 let definition = definitions.pop().unwrap();
2451 cx.update(|cx| {
2452 let target_buffer = definition.target.buffer.read(cx);
2453 assert_eq!(
2454 target_buffer
2455 .file()
2456 .unwrap()
2457 .as_local()
2458 .unwrap()
2459 .abs_path(cx),
2460 Path::new("/dir/a.rs"),
2461 );
2462 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2463 assert_eq!(
2464 list_worktrees(&project, cx),
2465 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2466 );
2467
2468 drop(definition);
2469 });
2470 cx.update(|cx| {
2471 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2472 });
2473
2474 fn list_worktrees<'a>(
2475 project: &'a Model<Project>,
2476 cx: &'a AppContext,
2477 ) -> Vec<(&'a Path, bool)> {
2478 project
2479 .read(cx)
2480 .worktrees()
2481 .map(|worktree| {
2482 let worktree = worktree.read(cx);
2483 (
2484 worktree.as_local().unwrap().abs_path().as_ref(),
2485 worktree.is_visible(),
2486 )
2487 })
2488 .collect::<Vec<_>>()
2489 }
2490}
2491
2492#[gpui::test]
2493async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2494 init_test(cx);
2495
2496 let fs = FakeFs::new(cx.executor());
2497 fs.insert_tree(
2498 "/dir",
2499 json!({
2500 "a.ts": "",
2501 }),
2502 )
2503 .await;
2504
2505 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2506
2507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2508 language_registry.add(typescript_lang());
2509 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2510 "TypeScript",
2511 FakeLspAdapter {
2512 capabilities: lsp::ServerCapabilities {
2513 completion_provider: Some(lsp::CompletionOptions {
2514 trigger_characters: Some(vec![":".to_string()]),
2515 ..Default::default()
2516 }),
2517 ..Default::default()
2518 },
2519 ..Default::default()
2520 },
2521 );
2522
2523 let buffer = project
2524 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2525 .await
2526 .unwrap();
2527
2528 let fake_server = fake_language_servers.next().await.unwrap();
2529
2530 let text = "let a = b.fqn";
2531 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2532 let completions = project.update(cx, |project, cx| {
2533 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2534 });
2535
2536 fake_server
2537 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2538 Ok(Some(lsp::CompletionResponse::Array(vec![
2539 lsp::CompletionItem {
2540 label: "fullyQualifiedName?".into(),
2541 insert_text: Some("fullyQualifiedName".into()),
2542 ..Default::default()
2543 },
2544 ])))
2545 })
2546 .next()
2547 .await;
2548 let completions = completions.await.unwrap();
2549 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2550 assert_eq!(completions.len(), 1);
2551 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2552 assert_eq!(
2553 completions[0].old_range.to_offset(&snapshot),
2554 text.len() - 3..text.len()
2555 );
2556
2557 let text = "let a = \"atoms/cmp\"";
2558 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2559 let completions = project.update(cx, |project, cx| {
2560 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2561 });
2562
2563 fake_server
2564 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2565 Ok(Some(lsp::CompletionResponse::Array(vec![
2566 lsp::CompletionItem {
2567 label: "component".into(),
2568 ..Default::default()
2569 },
2570 ])))
2571 })
2572 .next()
2573 .await;
2574 let completions = completions.await.unwrap();
2575 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2576 assert_eq!(completions.len(), 1);
2577 assert_eq!(completions[0].new_text, "component");
2578 assert_eq!(
2579 completions[0].old_range.to_offset(&snapshot),
2580 text.len() - 4..text.len() - 1
2581 );
2582}
2583
2584#[gpui::test]
2585async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2586 init_test(cx);
2587
2588 let fs = FakeFs::new(cx.executor());
2589 fs.insert_tree(
2590 "/dir",
2591 json!({
2592 "a.ts": "",
2593 }),
2594 )
2595 .await;
2596
2597 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2598
2599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2600 language_registry.add(typescript_lang());
2601 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2602 "TypeScript",
2603 FakeLspAdapter {
2604 capabilities: lsp::ServerCapabilities {
2605 completion_provider: Some(lsp::CompletionOptions {
2606 trigger_characters: Some(vec![":".to_string()]),
2607 ..Default::default()
2608 }),
2609 ..Default::default()
2610 },
2611 ..Default::default()
2612 },
2613 );
2614
2615 let buffer = project
2616 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2617 .await
2618 .unwrap();
2619
2620 let fake_server = fake_language_servers.next().await.unwrap();
2621
2622 let text = "let a = b.fqn";
2623 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2624 let completions = project.update(cx, |project, cx| {
2625 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2626 });
2627
2628 fake_server
2629 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2630 Ok(Some(lsp::CompletionResponse::Array(vec![
2631 lsp::CompletionItem {
2632 label: "fullyQualifiedName?".into(),
2633 insert_text: Some("fully\rQualified\r\nName".into()),
2634 ..Default::default()
2635 },
2636 ])))
2637 })
2638 .next()
2639 .await;
2640 let completions = completions.await.unwrap();
2641 assert_eq!(completions.len(), 1);
2642 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2643}
2644
2645#[gpui::test(iterations = 10)]
2646async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2647 init_test(cx);
2648
2649 let fs = FakeFs::new(cx.executor());
2650 fs.insert_tree(
2651 "/dir",
2652 json!({
2653 "a.ts": "a",
2654 }),
2655 )
2656 .await;
2657
2658 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2659
2660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2661 language_registry.add(typescript_lang());
2662 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2663 "TypeScript",
2664 FakeLspAdapter {
2665 capabilities: lsp::ServerCapabilities {
2666 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2667 lsp::CodeActionOptions {
2668 resolve_provider: Some(true),
2669 ..lsp::CodeActionOptions::default()
2670 },
2671 )),
2672 ..lsp::ServerCapabilities::default()
2673 },
2674 ..FakeLspAdapter::default()
2675 },
2676 );
2677
2678 let buffer = project
2679 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2680 .await
2681 .unwrap();
2682
2683 let fake_server = fake_language_servers.next().await.unwrap();
2684
2685 // Language server returns code actions that contain commands, and not edits.
2686 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2687 fake_server
2688 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2689 Ok(Some(vec![
2690 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2691 title: "The code action".into(),
2692 data: Some(serde_json::json!({
2693 "command": "_the/command",
2694 })),
2695 ..lsp::CodeAction::default()
2696 }),
2697 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2698 title: "two".into(),
2699 ..lsp::CodeAction::default()
2700 }),
2701 ]))
2702 })
2703 .next()
2704 .await;
2705
2706 let action = actions.await[0].clone();
2707 let apply = project.update(cx, |project, cx| {
2708 project.apply_code_action(buffer.clone(), action, true, cx)
2709 });
2710
2711 // Resolving the code action does not populate its edits. In absence of
2712 // edits, we must execute the given command.
2713 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2714 |mut action, _| async move {
2715 if action.data.is_some() {
2716 action.command = Some(lsp::Command {
2717 title: "The command".into(),
2718 command: "_the/command".into(),
2719 arguments: Some(vec![json!("the-argument")]),
2720 });
2721 }
2722 Ok(action)
2723 },
2724 );
2725
2726 // While executing the command, the language server sends the editor
2727 // a `workspaceEdit` request.
2728 fake_server
2729 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2730 let fake = fake_server.clone();
2731 move |params, _| {
2732 assert_eq!(params.command, "_the/command");
2733 let fake = fake.clone();
2734 async move {
2735 fake.server
2736 .request::<lsp::request::ApplyWorkspaceEdit>(
2737 lsp::ApplyWorkspaceEditParams {
2738 label: None,
2739 edit: lsp::WorkspaceEdit {
2740 changes: Some(
2741 [(
2742 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2743 vec![lsp::TextEdit {
2744 range: lsp::Range::new(
2745 lsp::Position::new(0, 0),
2746 lsp::Position::new(0, 0),
2747 ),
2748 new_text: "X".into(),
2749 }],
2750 )]
2751 .into_iter()
2752 .collect(),
2753 ),
2754 ..Default::default()
2755 },
2756 },
2757 )
2758 .await
2759 .unwrap();
2760 Ok(Some(json!(null)))
2761 }
2762 }
2763 })
2764 .next()
2765 .await;
2766
2767 // Applying the code action returns a project transaction containing the edits
2768 // sent by the language server in its `workspaceEdit` request.
2769 let transaction = apply.await.unwrap();
2770 assert!(transaction.0.contains_key(&buffer));
2771 buffer.update(cx, |buffer, cx| {
2772 assert_eq!(buffer.text(), "Xa");
2773 buffer.undo(cx);
2774 assert_eq!(buffer.text(), "a");
2775 });
2776}
2777
2778#[gpui::test(iterations = 10)]
2779async fn test_save_file(cx: &mut gpui::TestAppContext) {
2780 init_test(cx);
2781
2782 let fs = FakeFs::new(cx.executor());
2783 fs.insert_tree(
2784 "/dir",
2785 json!({
2786 "file1": "the old contents",
2787 }),
2788 )
2789 .await;
2790
2791 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2792 let buffer = project
2793 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2794 .await
2795 .unwrap();
2796 buffer.update(cx, |buffer, cx| {
2797 assert_eq!(buffer.text(), "the old contents");
2798 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2799 });
2800
2801 project
2802 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2803 .await
2804 .unwrap();
2805
2806 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2807 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2808}
2809
2810#[gpui::test(iterations = 30)]
2811async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2812 init_test(cx);
2813
2814 let fs = FakeFs::new(cx.executor().clone());
2815 fs.insert_tree(
2816 "/dir",
2817 json!({
2818 "file1": "the original contents",
2819 }),
2820 )
2821 .await;
2822
2823 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2824 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2825 let buffer = project
2826 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2827 .await
2828 .unwrap();
2829
2830 // Simulate buffer diffs being slow, so that they don't complete before
2831 // the next file change occurs.
2832 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2833
2834 // Change the buffer's file on disk, and then wait for the file change
2835 // to be detected by the worktree, so that the buffer starts reloading.
2836 fs.save(
2837 "/dir/file1".as_ref(),
2838 &"the first contents".into(),
2839 Default::default(),
2840 )
2841 .await
2842 .unwrap();
2843 worktree.next_event(cx).await;
2844
2845 // Change the buffer's file again. Depending on the random seed, the
2846 // previous file change may still be in progress.
2847 fs.save(
2848 "/dir/file1".as_ref(),
2849 &"the second contents".into(),
2850 Default::default(),
2851 )
2852 .await
2853 .unwrap();
2854 worktree.next_event(cx).await;
2855
2856 cx.executor().run_until_parked();
2857 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2858 buffer.read_with(cx, |buffer, _| {
2859 assert_eq!(buffer.text(), on_disk_text);
2860 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2861 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2862 });
2863}
2864
2865#[gpui::test(iterations = 30)]
2866async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2867 init_test(cx);
2868
2869 let fs = FakeFs::new(cx.executor().clone());
2870 fs.insert_tree(
2871 "/dir",
2872 json!({
2873 "file1": "the original contents",
2874 }),
2875 )
2876 .await;
2877
2878 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2879 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2880 let buffer = project
2881 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2882 .await
2883 .unwrap();
2884
2885 // Simulate buffer diffs being slow, so that they don't complete before
2886 // the next file change occurs.
2887 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2888
2889 // Change the buffer's file on disk, and then wait for the file change
2890 // to be detected by the worktree, so that the buffer starts reloading.
2891 fs.save(
2892 "/dir/file1".as_ref(),
2893 &"the first contents".into(),
2894 Default::default(),
2895 )
2896 .await
2897 .unwrap();
2898 worktree.next_event(cx).await;
2899
2900 cx.executor()
2901 .spawn(cx.executor().simulate_random_delay())
2902 .await;
2903
2904 // Perform a noop edit, causing the buffer's version to increase.
2905 buffer.update(cx, |buffer, cx| {
2906 buffer.edit([(0..0, " ")], None, cx);
2907 buffer.undo(cx);
2908 });
2909
2910 cx.executor().run_until_parked();
2911 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2912 buffer.read_with(cx, |buffer, _| {
2913 let buffer_text = buffer.text();
2914 if buffer_text == on_disk_text {
2915 assert!(
2916 !buffer.is_dirty() && !buffer.has_conflict(),
2917 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2918 );
2919 }
2920 // If the file change occurred while the buffer was processing the first
2921 // change, the buffer will be in a conflicting state.
2922 else {
2923 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2924 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2925 }
2926 });
2927}
2928
2929#[gpui::test]
2930async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2931 init_test(cx);
2932
2933 let fs = FakeFs::new(cx.executor());
2934 fs.insert_tree(
2935 "/dir",
2936 json!({
2937 "file1": "the old contents",
2938 }),
2939 )
2940 .await;
2941
2942 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2943 let buffer = project
2944 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2945 .await
2946 .unwrap();
2947 buffer.update(cx, |buffer, cx| {
2948 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2949 });
2950
2951 project
2952 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2953 .await
2954 .unwrap();
2955
2956 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2957 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2958}
2959
2960#[gpui::test]
2961async fn test_save_as(cx: &mut gpui::TestAppContext) {
2962 init_test(cx);
2963
2964 let fs = FakeFs::new(cx.executor());
2965 fs.insert_tree("/dir", json!({})).await;
2966
2967 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2968
2969 let languages = project.update(cx, |project, _| project.languages().clone());
2970 languages.add(rust_lang());
2971
2972 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2973 buffer.update(cx, |buffer, cx| {
2974 buffer.edit([(0..0, "abc")], None, cx);
2975 assert!(buffer.is_dirty());
2976 assert!(!buffer.has_conflict());
2977 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2978 });
2979 project
2980 .update(cx, |project, cx| {
2981 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2982 let path = ProjectPath {
2983 worktree_id,
2984 path: Arc::from(Path::new("file1.rs")),
2985 };
2986 project.save_buffer_as(buffer.clone(), path, cx)
2987 })
2988 .await
2989 .unwrap();
2990 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2991
2992 cx.executor().run_until_parked();
2993 buffer.update(cx, |buffer, cx| {
2994 assert_eq!(
2995 buffer.file().unwrap().full_path(cx),
2996 Path::new("dir/file1.rs")
2997 );
2998 assert!(!buffer.is_dirty());
2999 assert!(!buffer.has_conflict());
3000 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
3001 });
3002
3003 let opened_buffer = project
3004 .update(cx, |project, cx| {
3005 project.open_local_buffer("/dir/file1.rs", cx)
3006 })
3007 .await
3008 .unwrap();
3009 assert_eq!(opened_buffer, buffer);
3010}
3011
3012#[gpui::test(retries = 5)]
3013async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3014 use worktree::WorktreeModelHandle as _;
3015
3016 init_test(cx);
3017 cx.executor().allow_parking();
3018
3019 let dir = temp_tree(json!({
3020 "a": {
3021 "file1": "",
3022 "file2": "",
3023 "file3": "",
3024 },
3025 "b": {
3026 "c": {
3027 "file4": "",
3028 "file5": "",
3029 }
3030 }
3031 }));
3032
3033 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3034
3035 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3036 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3037 async move { buffer.await.unwrap() }
3038 };
3039 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3040 project.update(cx, |project, cx| {
3041 let tree = project.worktrees().next().unwrap();
3042 tree.read(cx)
3043 .entry_for_path(path)
3044 .unwrap_or_else(|| panic!("no entry for path {}", path))
3045 .id
3046 })
3047 };
3048
3049 let buffer2 = buffer_for_path("a/file2", cx).await;
3050 let buffer3 = buffer_for_path("a/file3", cx).await;
3051 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3052 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3053
3054 let file2_id = id_for_path("a/file2", cx);
3055 let file3_id = id_for_path("a/file3", cx);
3056 let file4_id = id_for_path("b/c/file4", cx);
3057
3058 // Create a remote copy of this worktree.
3059 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3060 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3061
3062 let updates = Arc::new(Mutex::new(Vec::new()));
3063 tree.update(cx, |tree, cx| {
3064 let updates = updates.clone();
3065 tree.observe_updates(0, cx, move |update| {
3066 updates.lock().push(update);
3067 async { true }
3068 });
3069 });
3070
3071 let remote = cx.update(|cx| {
3072 Worktree::remote(
3073 0,
3074 1,
3075 metadata,
3076 Box::new(CollabRemoteWorktreeClient(project.read(cx).client())),
3077 cx,
3078 )
3079 });
3080
3081 cx.executor().run_until_parked();
3082
3083 cx.update(|cx| {
3084 assert!(!buffer2.read(cx).is_dirty());
3085 assert!(!buffer3.read(cx).is_dirty());
3086 assert!(!buffer4.read(cx).is_dirty());
3087 assert!(!buffer5.read(cx).is_dirty());
3088 });
3089
3090 // Rename and delete files and directories.
3091 tree.flush_fs_events(cx).await;
3092 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3093 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3094 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3095 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3096 tree.flush_fs_events(cx).await;
3097
3098 let expected_paths = vec![
3099 "a",
3100 "a/file1",
3101 "a/file2.new",
3102 "b",
3103 "d",
3104 "d/file3",
3105 "d/file4",
3106 ];
3107
3108 cx.update(|app| {
3109 assert_eq!(
3110 tree.read(app)
3111 .paths()
3112 .map(|p| p.to_str().unwrap())
3113 .collect::<Vec<_>>(),
3114 expected_paths
3115 );
3116 });
3117
3118 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3119 assert_eq!(id_for_path("d/file3", cx), file3_id);
3120 assert_eq!(id_for_path("d/file4", cx), file4_id);
3121
3122 cx.update(|cx| {
3123 assert_eq!(
3124 buffer2.read(cx).file().unwrap().path().as_ref(),
3125 Path::new("a/file2.new")
3126 );
3127 assert_eq!(
3128 buffer3.read(cx).file().unwrap().path().as_ref(),
3129 Path::new("d/file3")
3130 );
3131 assert_eq!(
3132 buffer4.read(cx).file().unwrap().path().as_ref(),
3133 Path::new("d/file4")
3134 );
3135 assert_eq!(
3136 buffer5.read(cx).file().unwrap().path().as_ref(),
3137 Path::new("b/c/file5")
3138 );
3139
3140 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3141 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3142 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3143 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3144 });
3145
3146 // Update the remote worktree. Check that it becomes consistent with the
3147 // local worktree.
3148 cx.executor().run_until_parked();
3149
3150 remote.update(cx, |remote, _| {
3151 for update in updates.lock().drain(..) {
3152 remote.as_remote_mut().unwrap().update_from_remote(update);
3153 }
3154 });
3155 cx.executor().run_until_parked();
3156 remote.update(cx, |remote, _| {
3157 assert_eq!(
3158 remote
3159 .paths()
3160 .map(|p| p.to_str().unwrap())
3161 .collect::<Vec<_>>(),
3162 expected_paths
3163 );
3164 });
3165}
3166
3167#[gpui::test(iterations = 10)]
3168async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3169 init_test(cx);
3170
3171 let fs = FakeFs::new(cx.executor());
3172 fs.insert_tree(
3173 "/dir",
3174 json!({
3175 "a": {
3176 "file1": "",
3177 }
3178 }),
3179 )
3180 .await;
3181
3182 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3183 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3184 let tree_id = tree.update(cx, |tree, _| tree.id());
3185
3186 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3187 project.update(cx, |project, cx| {
3188 let tree = project.worktrees().next().unwrap();
3189 tree.read(cx)
3190 .entry_for_path(path)
3191 .unwrap_or_else(|| panic!("no entry for path {}", path))
3192 .id
3193 })
3194 };
3195
3196 let dir_id = id_for_path("a", cx);
3197 let file_id = id_for_path("a/file1", cx);
3198 let buffer = project
3199 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3200 .await
3201 .unwrap();
3202 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3203
3204 project
3205 .update(cx, |project, cx| {
3206 project.rename_entry(dir_id, Path::new("b"), cx)
3207 })
3208 .unwrap()
3209 .await
3210 .to_included()
3211 .unwrap();
3212 cx.executor().run_until_parked();
3213
3214 assert_eq!(id_for_path("b", cx), dir_id);
3215 assert_eq!(id_for_path("b/file1", cx), file_id);
3216 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3217}
3218
3219#[gpui::test]
3220async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3221 init_test(cx);
3222
3223 let fs = FakeFs::new(cx.executor());
3224 fs.insert_tree(
3225 "/dir",
3226 json!({
3227 "a.txt": "a-contents",
3228 "b.txt": "b-contents",
3229 }),
3230 )
3231 .await;
3232
3233 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3234
3235 // Spawn multiple tasks to open paths, repeating some paths.
3236 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3237 (
3238 p.open_local_buffer("/dir/a.txt", cx),
3239 p.open_local_buffer("/dir/b.txt", cx),
3240 p.open_local_buffer("/dir/a.txt", cx),
3241 )
3242 });
3243
3244 let buffer_a_1 = buffer_a_1.await.unwrap();
3245 let buffer_a_2 = buffer_a_2.await.unwrap();
3246 let buffer_b = buffer_b.await.unwrap();
3247 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3248 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3249
3250 // There is only one buffer per path.
3251 let buffer_a_id = buffer_a_1.entity_id();
3252 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3253
3254 // Open the same path again while it is still open.
3255 drop(buffer_a_1);
3256 let buffer_a_3 = project
3257 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3258 .await
3259 .unwrap();
3260
3261 // There's still only one buffer per path.
3262 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3263}
3264
3265#[gpui::test]
3266async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3267 init_test(cx);
3268
3269 let fs = FakeFs::new(cx.executor());
3270 fs.insert_tree(
3271 "/dir",
3272 json!({
3273 "file1": "abc",
3274 "file2": "def",
3275 "file3": "ghi",
3276 }),
3277 )
3278 .await;
3279
3280 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3281
3282 let buffer1 = project
3283 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3284 .await
3285 .unwrap();
3286 let events = Arc::new(Mutex::new(Vec::new()));
3287
3288 // initially, the buffer isn't dirty.
3289 buffer1.update(cx, |buffer, cx| {
3290 cx.subscribe(&buffer1, {
3291 let events = events.clone();
3292 move |_, _, event, _| match event {
3293 BufferEvent::Operation(_) => {}
3294 _ => events.lock().push(event.clone()),
3295 }
3296 })
3297 .detach();
3298
3299 assert!(!buffer.is_dirty());
3300 assert!(events.lock().is_empty());
3301
3302 buffer.edit([(1..2, "")], None, cx);
3303 });
3304
3305 // after the first edit, the buffer is dirty, and emits a dirtied event.
3306 buffer1.update(cx, |buffer, cx| {
3307 assert!(buffer.text() == "ac");
3308 assert!(buffer.is_dirty());
3309 assert_eq!(
3310 *events.lock(),
3311 &[language::Event::Edited, language::Event::DirtyChanged]
3312 );
3313 events.lock().clear();
3314 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3315 });
3316
3317 // after saving, the buffer is not dirty, and emits a saved event.
3318 buffer1.update(cx, |buffer, cx| {
3319 assert!(!buffer.is_dirty());
3320 assert_eq!(*events.lock(), &[language::Event::Saved]);
3321 events.lock().clear();
3322
3323 buffer.edit([(1..1, "B")], None, cx);
3324 buffer.edit([(2..2, "D")], None, cx);
3325 });
3326
3327 // after editing again, the buffer is dirty, and emits another dirty event.
3328 buffer1.update(cx, |buffer, cx| {
3329 assert!(buffer.text() == "aBDc");
3330 assert!(buffer.is_dirty());
3331 assert_eq!(
3332 *events.lock(),
3333 &[
3334 language::Event::Edited,
3335 language::Event::DirtyChanged,
3336 language::Event::Edited,
3337 ],
3338 );
3339 events.lock().clear();
3340
3341 // After restoring the buffer to its previously-saved state,
3342 // the buffer is not considered dirty anymore.
3343 buffer.edit([(1..3, "")], None, cx);
3344 assert!(buffer.text() == "ac");
3345 assert!(!buffer.is_dirty());
3346 });
3347
3348 assert_eq!(
3349 *events.lock(),
3350 &[language::Event::Edited, language::Event::DirtyChanged]
3351 );
3352
3353 // When a file is deleted, the buffer is considered dirty.
3354 let events = Arc::new(Mutex::new(Vec::new()));
3355 let buffer2 = project
3356 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3357 .await
3358 .unwrap();
3359 buffer2.update(cx, |_, cx| {
3360 cx.subscribe(&buffer2, {
3361 let events = events.clone();
3362 move |_, _, event, _| events.lock().push(event.clone())
3363 })
3364 .detach();
3365 });
3366
3367 fs.remove_file("/dir/file2".as_ref(), Default::default())
3368 .await
3369 .unwrap();
3370 cx.executor().run_until_parked();
3371 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3372 assert_eq!(
3373 *events.lock(),
3374 &[
3375 language::Event::DirtyChanged,
3376 language::Event::FileHandleChanged
3377 ]
3378 );
3379
3380 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3381 let events = Arc::new(Mutex::new(Vec::new()));
3382 let buffer3 = project
3383 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3384 .await
3385 .unwrap();
3386 buffer3.update(cx, |_, cx| {
3387 cx.subscribe(&buffer3, {
3388 let events = events.clone();
3389 move |_, _, event, _| events.lock().push(event.clone())
3390 })
3391 .detach();
3392 });
3393
3394 buffer3.update(cx, |buffer, cx| {
3395 buffer.edit([(0..0, "x")], None, cx);
3396 });
3397 events.lock().clear();
3398 fs.remove_file("/dir/file3".as_ref(), Default::default())
3399 .await
3400 .unwrap();
3401 cx.executor().run_until_parked();
3402 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3403 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3404}
3405
3406#[gpui::test]
3407async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3408 init_test(cx);
3409
3410 let initial_contents = "aaa\nbbbbb\nc\n";
3411 let fs = FakeFs::new(cx.executor());
3412 fs.insert_tree(
3413 "/dir",
3414 json!({
3415 "the-file": initial_contents,
3416 }),
3417 )
3418 .await;
3419 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3420 let buffer = project
3421 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3422 .await
3423 .unwrap();
3424
3425 let anchors = (0..3)
3426 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3427 .collect::<Vec<_>>();
3428
3429 // Change the file on disk, adding two new lines of text, and removing
3430 // one line.
3431 buffer.update(cx, |buffer, _| {
3432 assert!(!buffer.is_dirty());
3433 assert!(!buffer.has_conflict());
3434 });
3435 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3436 fs.save(
3437 "/dir/the-file".as_ref(),
3438 &new_contents.into(),
3439 LineEnding::Unix,
3440 )
3441 .await
3442 .unwrap();
3443
3444 // Because the buffer was not modified, it is reloaded from disk. Its
3445 // contents are edited according to the diff between the old and new
3446 // file contents.
3447 cx.executor().run_until_parked();
3448 buffer.update(cx, |buffer, _| {
3449 assert_eq!(buffer.text(), new_contents);
3450 assert!(!buffer.is_dirty());
3451 assert!(!buffer.has_conflict());
3452
3453 let anchor_positions = anchors
3454 .iter()
3455 .map(|anchor| anchor.to_point(&*buffer))
3456 .collect::<Vec<_>>();
3457 assert_eq!(
3458 anchor_positions,
3459 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3460 );
3461 });
3462
3463 // Modify the buffer
3464 buffer.update(cx, |buffer, cx| {
3465 buffer.edit([(0..0, " ")], None, cx);
3466 assert!(buffer.is_dirty());
3467 assert!(!buffer.has_conflict());
3468 });
3469
3470 // Change the file on disk again, adding blank lines to the beginning.
3471 fs.save(
3472 "/dir/the-file".as_ref(),
3473 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3474 LineEnding::Unix,
3475 )
3476 .await
3477 .unwrap();
3478
3479 // Because the buffer is modified, it doesn't reload from disk, but is
3480 // marked as having a conflict.
3481 cx.executor().run_until_parked();
3482 buffer.update(cx, |buffer, _| {
3483 assert!(buffer.has_conflict());
3484 });
3485}
3486
3487#[gpui::test]
3488async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3489 init_test(cx);
3490
3491 let fs = FakeFs::new(cx.executor());
3492 fs.insert_tree(
3493 "/dir",
3494 json!({
3495 "file1": "a\nb\nc\n",
3496 "file2": "one\r\ntwo\r\nthree\r\n",
3497 }),
3498 )
3499 .await;
3500
3501 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3502 let buffer1 = project
3503 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3504 .await
3505 .unwrap();
3506 let buffer2 = project
3507 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3508 .await
3509 .unwrap();
3510
3511 buffer1.update(cx, |buffer, _| {
3512 assert_eq!(buffer.text(), "a\nb\nc\n");
3513 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3514 });
3515 buffer2.update(cx, |buffer, _| {
3516 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3517 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3518 });
3519
3520 // Change a file's line endings on disk from unix to windows. The buffer's
3521 // state updates correctly.
3522 fs.save(
3523 "/dir/file1".as_ref(),
3524 &"aaa\nb\nc\n".into(),
3525 LineEnding::Windows,
3526 )
3527 .await
3528 .unwrap();
3529 cx.executor().run_until_parked();
3530 buffer1.update(cx, |buffer, _| {
3531 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3532 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3533 });
3534
3535 // Save a file with windows line endings. The file is written correctly.
3536 buffer2.update(cx, |buffer, cx| {
3537 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3538 });
3539 project
3540 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3541 .await
3542 .unwrap();
3543 assert_eq!(
3544 fs.load("/dir/file2".as_ref()).await.unwrap(),
3545 "one\r\ntwo\r\nthree\r\nfour\r\n",
3546 );
3547}
3548
3549#[gpui::test]
3550async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3551 init_test(cx);
3552
3553 let fs = FakeFs::new(cx.executor());
3554 fs.insert_tree(
3555 "/the-dir",
3556 json!({
3557 "a.rs": "
3558 fn foo(mut v: Vec<usize>) {
3559 for x in &v {
3560 v.push(1);
3561 }
3562 }
3563 "
3564 .unindent(),
3565 }),
3566 )
3567 .await;
3568
3569 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3570 let buffer = project
3571 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3572 .await
3573 .unwrap();
3574
3575 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3576 let message = lsp::PublishDiagnosticsParams {
3577 uri: buffer_uri.clone(),
3578 diagnostics: vec![
3579 lsp::Diagnostic {
3580 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3581 severity: Some(DiagnosticSeverity::WARNING),
3582 message: "error 1".to_string(),
3583 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3584 location: lsp::Location {
3585 uri: buffer_uri.clone(),
3586 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3587 },
3588 message: "error 1 hint 1".to_string(),
3589 }]),
3590 ..Default::default()
3591 },
3592 lsp::Diagnostic {
3593 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3594 severity: Some(DiagnosticSeverity::HINT),
3595 message: "error 1 hint 1".to_string(),
3596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3597 location: lsp::Location {
3598 uri: buffer_uri.clone(),
3599 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3600 },
3601 message: "original diagnostic".to_string(),
3602 }]),
3603 ..Default::default()
3604 },
3605 lsp::Diagnostic {
3606 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3607 severity: Some(DiagnosticSeverity::ERROR),
3608 message: "error 2".to_string(),
3609 related_information: Some(vec![
3610 lsp::DiagnosticRelatedInformation {
3611 location: lsp::Location {
3612 uri: buffer_uri.clone(),
3613 range: lsp::Range::new(
3614 lsp::Position::new(1, 13),
3615 lsp::Position::new(1, 15),
3616 ),
3617 },
3618 message: "error 2 hint 1".to_string(),
3619 },
3620 lsp::DiagnosticRelatedInformation {
3621 location: lsp::Location {
3622 uri: buffer_uri.clone(),
3623 range: lsp::Range::new(
3624 lsp::Position::new(1, 13),
3625 lsp::Position::new(1, 15),
3626 ),
3627 },
3628 message: "error 2 hint 2".to_string(),
3629 },
3630 ]),
3631 ..Default::default()
3632 },
3633 lsp::Diagnostic {
3634 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3635 severity: Some(DiagnosticSeverity::HINT),
3636 message: "error 2 hint 1".to_string(),
3637 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3638 location: lsp::Location {
3639 uri: buffer_uri.clone(),
3640 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3641 },
3642 message: "original diagnostic".to_string(),
3643 }]),
3644 ..Default::default()
3645 },
3646 lsp::Diagnostic {
3647 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3648 severity: Some(DiagnosticSeverity::HINT),
3649 message: "error 2 hint 2".to_string(),
3650 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3651 location: lsp::Location {
3652 uri: buffer_uri,
3653 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3654 },
3655 message: "original diagnostic".to_string(),
3656 }]),
3657 ..Default::default()
3658 },
3659 ],
3660 version: None,
3661 };
3662
3663 project
3664 .update(cx, |p, cx| {
3665 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3666 })
3667 .unwrap();
3668 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3669
3670 assert_eq!(
3671 buffer
3672 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3673 .collect::<Vec<_>>(),
3674 &[
3675 DiagnosticEntry {
3676 range: Point::new(1, 8)..Point::new(1, 9),
3677 diagnostic: Diagnostic {
3678 severity: DiagnosticSeverity::WARNING,
3679 message: "error 1".to_string(),
3680 group_id: 1,
3681 is_primary: true,
3682 ..Default::default()
3683 }
3684 },
3685 DiagnosticEntry {
3686 range: Point::new(1, 8)..Point::new(1, 9),
3687 diagnostic: Diagnostic {
3688 severity: DiagnosticSeverity::HINT,
3689 message: "error 1 hint 1".to_string(),
3690 group_id: 1,
3691 is_primary: false,
3692 ..Default::default()
3693 }
3694 },
3695 DiagnosticEntry {
3696 range: Point::new(1, 13)..Point::new(1, 15),
3697 diagnostic: Diagnostic {
3698 severity: DiagnosticSeverity::HINT,
3699 message: "error 2 hint 1".to_string(),
3700 group_id: 0,
3701 is_primary: false,
3702 ..Default::default()
3703 }
3704 },
3705 DiagnosticEntry {
3706 range: Point::new(1, 13)..Point::new(1, 15),
3707 diagnostic: Diagnostic {
3708 severity: DiagnosticSeverity::HINT,
3709 message: "error 2 hint 2".to_string(),
3710 group_id: 0,
3711 is_primary: false,
3712 ..Default::default()
3713 }
3714 },
3715 DiagnosticEntry {
3716 range: Point::new(2, 8)..Point::new(2, 17),
3717 diagnostic: Diagnostic {
3718 severity: DiagnosticSeverity::ERROR,
3719 message: "error 2".to_string(),
3720 group_id: 0,
3721 is_primary: true,
3722 ..Default::default()
3723 }
3724 }
3725 ]
3726 );
3727
3728 assert_eq!(
3729 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3730 &[
3731 DiagnosticEntry {
3732 range: Point::new(1, 13)..Point::new(1, 15),
3733 diagnostic: Diagnostic {
3734 severity: DiagnosticSeverity::HINT,
3735 message: "error 2 hint 1".to_string(),
3736 group_id: 0,
3737 is_primary: false,
3738 ..Default::default()
3739 }
3740 },
3741 DiagnosticEntry {
3742 range: Point::new(1, 13)..Point::new(1, 15),
3743 diagnostic: Diagnostic {
3744 severity: DiagnosticSeverity::HINT,
3745 message: "error 2 hint 2".to_string(),
3746 group_id: 0,
3747 is_primary: false,
3748 ..Default::default()
3749 }
3750 },
3751 DiagnosticEntry {
3752 range: Point::new(2, 8)..Point::new(2, 17),
3753 diagnostic: Diagnostic {
3754 severity: DiagnosticSeverity::ERROR,
3755 message: "error 2".to_string(),
3756 group_id: 0,
3757 is_primary: true,
3758 ..Default::default()
3759 }
3760 }
3761 ]
3762 );
3763
3764 assert_eq!(
3765 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3766 &[
3767 DiagnosticEntry {
3768 range: Point::new(1, 8)..Point::new(1, 9),
3769 diagnostic: Diagnostic {
3770 severity: DiagnosticSeverity::WARNING,
3771 message: "error 1".to_string(),
3772 group_id: 1,
3773 is_primary: true,
3774 ..Default::default()
3775 }
3776 },
3777 DiagnosticEntry {
3778 range: Point::new(1, 8)..Point::new(1, 9),
3779 diagnostic: Diagnostic {
3780 severity: DiagnosticSeverity::HINT,
3781 message: "error 1 hint 1".to_string(),
3782 group_id: 1,
3783 is_primary: false,
3784 ..Default::default()
3785 }
3786 },
3787 ]
3788 );
3789}
3790
3791#[gpui::test]
3792async fn test_rename(cx: &mut gpui::TestAppContext) {
3793 // hi
3794 init_test(cx);
3795
3796 let fs = FakeFs::new(cx.executor());
3797 fs.insert_tree(
3798 "/dir",
3799 json!({
3800 "one.rs": "const ONE: usize = 1;",
3801 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3802 }),
3803 )
3804 .await;
3805
3806 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3807
3808 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3809 language_registry.add(rust_lang());
3810 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3811 "Rust",
3812 FakeLspAdapter {
3813 capabilities: lsp::ServerCapabilities {
3814 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3815 prepare_provider: Some(true),
3816 work_done_progress_options: Default::default(),
3817 })),
3818 ..Default::default()
3819 },
3820 ..Default::default()
3821 },
3822 );
3823
3824 let buffer = project
3825 .update(cx, |project, cx| {
3826 project.open_local_buffer("/dir/one.rs", cx)
3827 })
3828 .await
3829 .unwrap();
3830
3831 let fake_server = fake_servers.next().await.unwrap();
3832
3833 let response = project.update(cx, |project, cx| {
3834 project.prepare_rename(buffer.clone(), 7, cx)
3835 });
3836 fake_server
3837 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3838 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3839 assert_eq!(params.position, lsp::Position::new(0, 7));
3840 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3841 lsp::Position::new(0, 6),
3842 lsp::Position::new(0, 9),
3843 ))))
3844 })
3845 .next()
3846 .await
3847 .unwrap();
3848 let range = response.await.unwrap().unwrap();
3849 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3850 assert_eq!(range, 6..9);
3851
3852 let response = project.update(cx, |project, cx| {
3853 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3854 });
3855 fake_server
3856 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3857 assert_eq!(
3858 params.text_document_position.text_document.uri.as_str(),
3859 "file:///dir/one.rs"
3860 );
3861 assert_eq!(
3862 params.text_document_position.position,
3863 lsp::Position::new(0, 7)
3864 );
3865 assert_eq!(params.new_name, "THREE");
3866 Ok(Some(lsp::WorkspaceEdit {
3867 changes: Some(
3868 [
3869 (
3870 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3871 vec![lsp::TextEdit::new(
3872 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3873 "THREE".to_string(),
3874 )],
3875 ),
3876 (
3877 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3878 vec![
3879 lsp::TextEdit::new(
3880 lsp::Range::new(
3881 lsp::Position::new(0, 24),
3882 lsp::Position::new(0, 27),
3883 ),
3884 "THREE".to_string(),
3885 ),
3886 lsp::TextEdit::new(
3887 lsp::Range::new(
3888 lsp::Position::new(0, 35),
3889 lsp::Position::new(0, 38),
3890 ),
3891 "THREE".to_string(),
3892 ),
3893 ],
3894 ),
3895 ]
3896 .into_iter()
3897 .collect(),
3898 ),
3899 ..Default::default()
3900 }))
3901 })
3902 .next()
3903 .await
3904 .unwrap();
3905 let mut transaction = response.await.unwrap().0;
3906 assert_eq!(transaction.len(), 2);
3907 assert_eq!(
3908 transaction
3909 .remove_entry(&buffer)
3910 .unwrap()
3911 .0
3912 .update(cx, |buffer, _| buffer.text()),
3913 "const THREE: usize = 1;"
3914 );
3915 assert_eq!(
3916 transaction
3917 .into_keys()
3918 .next()
3919 .unwrap()
3920 .update(cx, |buffer, _| buffer.text()),
3921 "const TWO: usize = one::THREE + one::THREE;"
3922 );
3923}
3924
3925#[gpui::test]
3926async fn test_search(cx: &mut gpui::TestAppContext) {
3927 init_test(cx);
3928
3929 let fs = FakeFs::new(cx.executor());
3930 fs.insert_tree(
3931 "/dir",
3932 json!({
3933 "one.rs": "const ONE: usize = 1;",
3934 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3935 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3936 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3937 }),
3938 )
3939 .await;
3940 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3941 assert_eq!(
3942 search(
3943 &project,
3944 SearchQuery::text(
3945 "TWO",
3946 false,
3947 true,
3948 false,
3949 Default::default(),
3950 Default::default()
3951 )
3952 .unwrap(),
3953 cx
3954 )
3955 .await
3956 .unwrap(),
3957 HashMap::from_iter([
3958 ("dir/two.rs".to_string(), vec![6..9]),
3959 ("dir/three.rs".to_string(), vec![37..40])
3960 ])
3961 );
3962
3963 let buffer_4 = project
3964 .update(cx, |project, cx| {
3965 project.open_local_buffer("/dir/four.rs", cx)
3966 })
3967 .await
3968 .unwrap();
3969 buffer_4.update(cx, |buffer, cx| {
3970 let text = "two::TWO";
3971 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3972 });
3973
3974 assert_eq!(
3975 search(
3976 &project,
3977 SearchQuery::text(
3978 "TWO",
3979 false,
3980 true,
3981 false,
3982 Default::default(),
3983 Default::default()
3984 )
3985 .unwrap(),
3986 cx
3987 )
3988 .await
3989 .unwrap(),
3990 HashMap::from_iter([
3991 ("dir/two.rs".to_string(), vec![6..9]),
3992 ("dir/three.rs".to_string(), vec![37..40]),
3993 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3994 ])
3995 );
3996}
3997
3998#[gpui::test]
3999async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4000 init_test(cx);
4001
4002 let search_query = "file";
4003
4004 let fs = FakeFs::new(cx.executor());
4005 fs.insert_tree(
4006 "/dir",
4007 json!({
4008 "one.rs": r#"// Rust file one"#,
4009 "one.ts": r#"// TypeScript file one"#,
4010 "two.rs": r#"// Rust file two"#,
4011 "two.ts": r#"// TypeScript file two"#,
4012 }),
4013 )
4014 .await;
4015 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4016
4017 assert!(
4018 search(
4019 &project,
4020 SearchQuery::text(
4021 search_query,
4022 false,
4023 true,
4024 false,
4025 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4026 Default::default()
4027 )
4028 .unwrap(),
4029 cx
4030 )
4031 .await
4032 .unwrap()
4033 .is_empty(),
4034 "If no inclusions match, no files should be returned"
4035 );
4036
4037 assert_eq!(
4038 search(
4039 &project,
4040 SearchQuery::text(
4041 search_query,
4042 false,
4043 true,
4044 false,
4045 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4046 Default::default()
4047 )
4048 .unwrap(),
4049 cx
4050 )
4051 .await
4052 .unwrap(),
4053 HashMap::from_iter([
4054 ("dir/one.rs".to_string(), vec![8..12]),
4055 ("dir/two.rs".to_string(), vec![8..12]),
4056 ]),
4057 "Rust only search should give only Rust files"
4058 );
4059
4060 assert_eq!(
4061 search(
4062 &project,
4063 SearchQuery::text(
4064 search_query,
4065 false,
4066 true,
4067 false,
4068
4069 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4070
4071 Default::default(),
4072 ).unwrap(),
4073 cx
4074 )
4075 .await
4076 .unwrap(),
4077 HashMap::from_iter([
4078 ("dir/one.ts".to_string(), vec![14..18]),
4079 ("dir/two.ts".to_string(), vec![14..18]),
4080 ]),
4081 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4082 );
4083
4084 assert_eq!(
4085 search(
4086 &project,
4087 SearchQuery::text(
4088 search_query,
4089 false,
4090 true,
4091 false,
4092
4093 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4094
4095 Default::default(),
4096 ).unwrap(),
4097 cx
4098 )
4099 .await
4100 .unwrap(),
4101 HashMap::from_iter([
4102 ("dir/two.ts".to_string(), vec![14..18]),
4103 ("dir/one.rs".to_string(), vec![8..12]),
4104 ("dir/one.ts".to_string(), vec![14..18]),
4105 ("dir/two.rs".to_string(), vec![8..12]),
4106 ]),
4107 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4108 );
4109}
4110
4111#[gpui::test]
4112async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4113 init_test(cx);
4114
4115 let search_query = "file";
4116
4117 let fs = FakeFs::new(cx.executor());
4118 fs.insert_tree(
4119 "/dir",
4120 json!({
4121 "one.rs": r#"// Rust file one"#,
4122 "one.ts": r#"// TypeScript file one"#,
4123 "two.rs": r#"// Rust file two"#,
4124 "two.ts": r#"// TypeScript file two"#,
4125 }),
4126 )
4127 .await;
4128 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4129
4130 assert_eq!(
4131 search(
4132 &project,
4133 SearchQuery::text(
4134 search_query,
4135 false,
4136 true,
4137 false,
4138 Default::default(),
4139 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4140 )
4141 .unwrap(),
4142 cx
4143 )
4144 .await
4145 .unwrap(),
4146 HashMap::from_iter([
4147 ("dir/one.rs".to_string(), vec![8..12]),
4148 ("dir/one.ts".to_string(), vec![14..18]),
4149 ("dir/two.rs".to_string(), vec![8..12]),
4150 ("dir/two.ts".to_string(), vec![14..18]),
4151 ]),
4152 "If no exclusions match, all files should be returned"
4153 );
4154
4155 assert_eq!(
4156 search(
4157 &project,
4158 SearchQuery::text(
4159 search_query,
4160 false,
4161 true,
4162 false,
4163 Default::default(),
4164 PathMatcher::new(&["*.rs".to_owned()]).unwrap()
4165 )
4166 .unwrap(),
4167 cx
4168 )
4169 .await
4170 .unwrap(),
4171 HashMap::from_iter([
4172 ("dir/one.ts".to_string(), vec![14..18]),
4173 ("dir/two.ts".to_string(), vec![14..18]),
4174 ]),
4175 "Rust exclusion search should give only TypeScript files"
4176 );
4177
4178 assert_eq!(
4179 search(
4180 &project,
4181 SearchQuery::text(
4182 search_query,
4183 false,
4184 true,
4185 false,
4186 Default::default(),
4187
4188 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4189
4190 ).unwrap(),
4191 cx
4192 )
4193 .await
4194 .unwrap(),
4195 HashMap::from_iter([
4196 ("dir/one.rs".to_string(), vec![8..12]),
4197 ("dir/two.rs".to_string(), vec![8..12]),
4198 ]),
4199 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4200 );
4201
4202 assert!(
4203 search(
4204 &project,
4205 SearchQuery::text(
4206 search_query,
4207 false,
4208 true,
4209 false,
4210 Default::default(),
4211
4212 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4213
4214 ).unwrap(),
4215 cx
4216 )
4217 .await
4218 .unwrap().is_empty(),
4219 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4220 );
4221}
4222
4223#[gpui::test]
4224async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4225 init_test(cx);
4226
4227 let search_query = "file";
4228
4229 let fs = FakeFs::new(cx.executor());
4230 fs.insert_tree(
4231 "/dir",
4232 json!({
4233 "one.rs": r#"// Rust file one"#,
4234 "one.ts": r#"// TypeScript file one"#,
4235 "two.rs": r#"// Rust file two"#,
4236 "two.ts": r#"// TypeScript file two"#,
4237 }),
4238 )
4239 .await;
4240 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4241
4242 assert!(
4243 search(
4244 &project,
4245 SearchQuery::text(
4246 search_query,
4247 false,
4248 true,
4249 false,
4250 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4251 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4252 )
4253 .unwrap(),
4254 cx
4255 )
4256 .await
4257 .unwrap()
4258 .is_empty(),
4259 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4260 );
4261
4262 assert!(
4263 search(
4264 &project,
4265 SearchQuery::text(
4266 search_query,
4267 false,
4268 true,
4269 false,
4270 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4271 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4272 ).unwrap(),
4273 cx
4274 )
4275 .await
4276 .unwrap()
4277 .is_empty(),
4278 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4279 );
4280
4281 assert!(
4282 search(
4283 &project,
4284 SearchQuery::text(
4285 search_query,
4286 false,
4287 true,
4288 false,
4289 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4290 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4291 )
4292 .unwrap(),
4293 cx
4294 )
4295 .await
4296 .unwrap()
4297 .is_empty(),
4298 "Non-matching inclusions and exclusions should not change that."
4299 );
4300
4301 assert_eq!(
4302 search(
4303 &project,
4304 SearchQuery::text(
4305 search_query,
4306 false,
4307 true,
4308 false,
4309 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4310 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4311 )
4312 .unwrap(),
4313 cx
4314 )
4315 .await
4316 .unwrap(),
4317 HashMap::from_iter([
4318 ("dir/one.ts".to_string(), vec![14..18]),
4319 ("dir/two.ts".to_string(), vec![14..18]),
4320 ]),
4321 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4322 );
4323}
4324
4325#[gpui::test]
4326async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4327 init_test(cx);
4328
4329 let fs = FakeFs::new(cx.executor());
4330 fs.insert_tree(
4331 "/worktree-a",
4332 json!({
4333 "haystack.rs": r#"// NEEDLE"#,
4334 "haystack.ts": r#"// NEEDLE"#,
4335 }),
4336 )
4337 .await;
4338 fs.insert_tree(
4339 "/worktree-b",
4340 json!({
4341 "haystack.rs": r#"// NEEDLE"#,
4342 "haystack.ts": r#"// NEEDLE"#,
4343 }),
4344 )
4345 .await;
4346
4347 let project = Project::test(
4348 fs.clone(),
4349 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4350 cx,
4351 )
4352 .await;
4353
4354 assert_eq!(
4355 search(
4356 &project,
4357 SearchQuery::text(
4358 "NEEDLE",
4359 false,
4360 true,
4361 false,
4362 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4363 Default::default()
4364 )
4365 .unwrap(),
4366 cx
4367 )
4368 .await
4369 .unwrap(),
4370 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4371 "should only return results from included worktree"
4372 );
4373 assert_eq!(
4374 search(
4375 &project,
4376 SearchQuery::text(
4377 "NEEDLE",
4378 false,
4379 true,
4380 false,
4381 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4382 Default::default()
4383 )
4384 .unwrap(),
4385 cx
4386 )
4387 .await
4388 .unwrap(),
4389 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4390 "should only return results from included worktree"
4391 );
4392
4393 assert_eq!(
4394 search(
4395 &project,
4396 SearchQuery::text(
4397 "NEEDLE",
4398 false,
4399 true,
4400 false,
4401 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4402 Default::default()
4403 )
4404 .unwrap(),
4405 cx
4406 )
4407 .await
4408 .unwrap(),
4409 HashMap::from_iter([
4410 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4411 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4412 ]),
4413 "should return results from both worktrees"
4414 );
4415}
4416
4417#[gpui::test]
4418async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4419 init_test(cx);
4420
4421 let fs = FakeFs::new(cx.background_executor.clone());
4422 fs.insert_tree(
4423 "/dir",
4424 json!({
4425 ".git": {},
4426 ".gitignore": "**/target\n/node_modules\n",
4427 "target": {
4428 "index.txt": "index_key:index_value"
4429 },
4430 "node_modules": {
4431 "eslint": {
4432 "index.ts": "const eslint_key = 'eslint value'",
4433 "package.json": r#"{ "some_key": "some value" }"#,
4434 },
4435 "prettier": {
4436 "index.ts": "const prettier_key = 'prettier value'",
4437 "package.json": r#"{ "other_key": "other value" }"#,
4438 },
4439 },
4440 "package.json": r#"{ "main_key": "main value" }"#,
4441 }),
4442 )
4443 .await;
4444 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4445
4446 let query = "key";
4447 assert_eq!(
4448 search(
4449 &project,
4450 SearchQuery::text(
4451 query,
4452 false,
4453 false,
4454 false,
4455 Default::default(),
4456 Default::default()
4457 )
4458 .unwrap(),
4459 cx
4460 )
4461 .await
4462 .unwrap(),
4463 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4464 "Only one non-ignored file should have the query"
4465 );
4466
4467 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4468 assert_eq!(
4469 search(
4470 &project,
4471 SearchQuery::text(
4472 query,
4473 false,
4474 false,
4475 true,
4476 Default::default(),
4477 Default::default()
4478 )
4479 .unwrap(),
4480 cx
4481 )
4482 .await
4483 .unwrap(),
4484 HashMap::from_iter([
4485 ("dir/package.json".to_string(), vec![8..11]),
4486 ("dir/target/index.txt".to_string(), vec![6..9]),
4487 (
4488 "dir/node_modules/prettier/package.json".to_string(),
4489 vec![9..12]
4490 ),
4491 (
4492 "dir/node_modules/prettier/index.ts".to_string(),
4493 vec![15..18]
4494 ),
4495 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4496 (
4497 "dir/node_modules/eslint/package.json".to_string(),
4498 vec![8..11]
4499 ),
4500 ]),
4501 "Unrestricted search with ignored directories should find every file with the query"
4502 );
4503
4504 let files_to_include = PathMatcher::new(&["/dir/node_modules/prettier/**".to_owned()]).unwrap();
4505 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4506 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4507 assert_eq!(
4508 search(
4509 &project,
4510 SearchQuery::text(
4511 query,
4512 false,
4513 false,
4514 true,
4515 files_to_include,
4516 files_to_exclude,
4517 )
4518 .unwrap(),
4519 cx
4520 )
4521 .await
4522 .unwrap(),
4523 HashMap::from_iter([(
4524 "dir/node_modules/prettier/package.json".to_string(),
4525 vec![9..12]
4526 )]),
4527 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4528 );
4529}
4530
4531#[test]
4532fn test_glob_literal_prefix() {
4533 assert_eq!(glob_literal_prefix("**/*.js"), "");
4534 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4535 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4536 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4537}
4538
4539#[gpui::test]
4540async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4541 init_test(cx);
4542
4543 let fs = FakeFs::new(cx.executor().clone());
4544 fs.insert_tree(
4545 "/one/two",
4546 json!({
4547 "three": {
4548 "a.txt": "",
4549 "four": {}
4550 },
4551 "c.rs": ""
4552 }),
4553 )
4554 .await;
4555
4556 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4557 project
4558 .update(cx, |project, cx| {
4559 let id = project.worktrees().next().unwrap().read(cx).id();
4560 project.create_entry((id, "b.."), true, cx)
4561 })
4562 .unwrap()
4563 .await
4564 .to_included()
4565 .unwrap();
4566
4567 // Can't create paths outside the project
4568 let result = project
4569 .update(cx, |project, cx| {
4570 let id = project.worktrees().next().unwrap().read(cx).id();
4571 project.create_entry((id, "../../boop"), true, cx)
4572 })
4573 .await;
4574 assert!(result.is_err());
4575
4576 // Can't create paths with '..'
4577 let result = project
4578 .update(cx, |project, cx| {
4579 let id = project.worktrees().next().unwrap().read(cx).id();
4580 project.create_entry((id, "four/../beep"), true, cx)
4581 })
4582 .await;
4583 assert!(result.is_err());
4584
4585 assert_eq!(
4586 fs.paths(true),
4587 vec![
4588 PathBuf::from("/"),
4589 PathBuf::from("/one"),
4590 PathBuf::from("/one/two"),
4591 PathBuf::from("/one/two/c.rs"),
4592 PathBuf::from("/one/two/three"),
4593 PathBuf::from("/one/two/three/a.txt"),
4594 PathBuf::from("/one/two/three/b.."),
4595 PathBuf::from("/one/two/three/four"),
4596 ]
4597 );
4598
4599 // And we cannot open buffers with '..'
4600 let result = project
4601 .update(cx, |project, cx| {
4602 let id = project.worktrees().next().unwrap().read(cx).id();
4603 project.open_buffer((id, "../c.rs"), cx)
4604 })
4605 .await;
4606 assert!(result.is_err())
4607}
4608
4609#[gpui::test]
4610async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4611 init_test(cx);
4612
4613 let fs = FakeFs::new(cx.executor());
4614 fs.insert_tree(
4615 "/dir",
4616 json!({
4617 "a.tsx": "a",
4618 }),
4619 )
4620 .await;
4621
4622 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4623
4624 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4625 language_registry.add(tsx_lang());
4626 let language_server_names = [
4627 "TypeScriptServer",
4628 "TailwindServer",
4629 "ESLintServer",
4630 "NoHoverCapabilitiesServer",
4631 ];
4632 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4633 "tsx",
4634 true,
4635 FakeLspAdapter {
4636 name: &language_server_names[0],
4637 capabilities: lsp::ServerCapabilities {
4638 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4639 ..lsp::ServerCapabilities::default()
4640 },
4641 ..FakeLspAdapter::default()
4642 },
4643 );
4644 let _a = language_registry.register_specific_fake_lsp_adapter(
4645 "tsx",
4646 false,
4647 FakeLspAdapter {
4648 name: &language_server_names[1],
4649 capabilities: lsp::ServerCapabilities {
4650 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4651 ..lsp::ServerCapabilities::default()
4652 },
4653 ..FakeLspAdapter::default()
4654 },
4655 );
4656 let _b = language_registry.register_specific_fake_lsp_adapter(
4657 "tsx",
4658 false,
4659 FakeLspAdapter {
4660 name: &language_server_names[2],
4661 capabilities: lsp::ServerCapabilities {
4662 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4663 ..lsp::ServerCapabilities::default()
4664 },
4665 ..FakeLspAdapter::default()
4666 },
4667 );
4668 let _c = language_registry.register_specific_fake_lsp_adapter(
4669 "tsx",
4670 false,
4671 FakeLspAdapter {
4672 name: &language_server_names[3],
4673 capabilities: lsp::ServerCapabilities {
4674 hover_provider: None,
4675 ..lsp::ServerCapabilities::default()
4676 },
4677 ..FakeLspAdapter::default()
4678 },
4679 );
4680
4681 let buffer = project
4682 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4683 .await
4684 .unwrap();
4685 cx.executor().run_until_parked();
4686
4687 let mut servers_with_hover_requests = HashMap::default();
4688 for i in 0..language_server_names.len() {
4689 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4690 panic!(
4691 "Failed to get language server #{i} with name {}",
4692 &language_server_names[i]
4693 )
4694 });
4695 let new_server_name = new_server.server.name();
4696 assert!(
4697 !servers_with_hover_requests.contains_key(new_server_name),
4698 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4699 );
4700 let new_server_name = new_server_name.to_string();
4701 match new_server_name.as_str() {
4702 "TailwindServer" | "TypeScriptServer" => {
4703 servers_with_hover_requests.insert(
4704 new_server_name.clone(),
4705 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4706 let name = new_server_name.clone();
4707 async move {
4708 Ok(Some(lsp::Hover {
4709 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4710 format!("{name} hover"),
4711 )),
4712 range: None,
4713 }))
4714 }
4715 }),
4716 );
4717 }
4718 "ESLintServer" => {
4719 servers_with_hover_requests.insert(
4720 new_server_name,
4721 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4722 |_, _| async move { Ok(None) },
4723 ),
4724 );
4725 }
4726 "NoHoverCapabilitiesServer" => {
4727 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4728 |_, _| async move {
4729 panic!(
4730 "Should not call for hovers server with no corresponding capabilities"
4731 )
4732 },
4733 );
4734 }
4735 unexpected => panic!("Unexpected server name: {unexpected}"),
4736 }
4737 }
4738
4739 let hover_task = project.update(cx, |project, cx| {
4740 project.hover(&buffer, Point::new(0, 0), cx)
4741 });
4742 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4743 |mut hover_request| async move {
4744 hover_request
4745 .next()
4746 .await
4747 .expect("All hover requests should have been triggered")
4748 },
4749 ))
4750 .await;
4751 assert_eq!(
4752 vec!["TailwindServer hover", "TypeScriptServer hover"],
4753 hover_task
4754 .await
4755 .into_iter()
4756 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4757 .sorted()
4758 .collect::<Vec<_>>(),
4759 "Should receive hover responses from all related servers with hover capabilities"
4760 );
4761}
4762
4763#[gpui::test]
4764async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4765 init_test(cx);
4766
4767 let fs = FakeFs::new(cx.executor());
4768 fs.insert_tree(
4769 "/dir",
4770 json!({
4771 "a.ts": "a",
4772 }),
4773 )
4774 .await;
4775
4776 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4777
4778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4779 language_registry.add(typescript_lang());
4780 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4781 "TypeScript",
4782 FakeLspAdapter {
4783 capabilities: lsp::ServerCapabilities {
4784 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4785 ..lsp::ServerCapabilities::default()
4786 },
4787 ..FakeLspAdapter::default()
4788 },
4789 );
4790
4791 let buffer = project
4792 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4793 .await
4794 .unwrap();
4795 cx.executor().run_until_parked();
4796
4797 let fake_server = fake_language_servers
4798 .next()
4799 .await
4800 .expect("failed to get the language server");
4801
4802 let mut request_handled =
4803 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4804 Ok(Some(lsp::Hover {
4805 contents: lsp::HoverContents::Array(vec![
4806 lsp::MarkedString::String("".to_string()),
4807 lsp::MarkedString::String(" ".to_string()),
4808 lsp::MarkedString::String("\n\n\n".to_string()),
4809 ]),
4810 range: None,
4811 }))
4812 });
4813
4814 let hover_task = project.update(cx, |project, cx| {
4815 project.hover(&buffer, Point::new(0, 0), cx)
4816 });
4817 let () = request_handled
4818 .next()
4819 .await
4820 .expect("All hover requests should have been triggered");
4821 assert_eq!(
4822 Vec::<String>::new(),
4823 hover_task
4824 .await
4825 .into_iter()
4826 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4827 .sorted()
4828 .collect::<Vec<_>>(),
4829 "Empty hover parts should be ignored"
4830 );
4831}
4832
4833#[gpui::test]
4834async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4835 init_test(cx);
4836
4837 let fs = FakeFs::new(cx.executor());
4838 fs.insert_tree(
4839 "/dir",
4840 json!({
4841 "a.tsx": "a",
4842 }),
4843 )
4844 .await;
4845
4846 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4847
4848 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4849 language_registry.add(tsx_lang());
4850 let language_server_names = [
4851 "TypeScriptServer",
4852 "TailwindServer",
4853 "ESLintServer",
4854 "NoActionsCapabilitiesServer",
4855 ];
4856 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4857 "tsx",
4858 true,
4859 FakeLspAdapter {
4860 name: &language_server_names[0],
4861 capabilities: lsp::ServerCapabilities {
4862 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4863 ..lsp::ServerCapabilities::default()
4864 },
4865 ..FakeLspAdapter::default()
4866 },
4867 );
4868 let _a = language_registry.register_specific_fake_lsp_adapter(
4869 "tsx",
4870 false,
4871 FakeLspAdapter {
4872 name: &language_server_names[1],
4873 capabilities: lsp::ServerCapabilities {
4874 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4875 ..lsp::ServerCapabilities::default()
4876 },
4877 ..FakeLspAdapter::default()
4878 },
4879 );
4880 let _b = language_registry.register_specific_fake_lsp_adapter(
4881 "tsx",
4882 false,
4883 FakeLspAdapter {
4884 name: &language_server_names[2],
4885 capabilities: lsp::ServerCapabilities {
4886 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4887 ..lsp::ServerCapabilities::default()
4888 },
4889 ..FakeLspAdapter::default()
4890 },
4891 );
4892 let _c = language_registry.register_specific_fake_lsp_adapter(
4893 "tsx",
4894 false,
4895 FakeLspAdapter {
4896 name: &language_server_names[3],
4897 capabilities: lsp::ServerCapabilities {
4898 code_action_provider: None,
4899 ..lsp::ServerCapabilities::default()
4900 },
4901 ..FakeLspAdapter::default()
4902 },
4903 );
4904
4905 let buffer = project
4906 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4907 .await
4908 .unwrap();
4909 cx.executor().run_until_parked();
4910
4911 let mut servers_with_actions_requests = HashMap::default();
4912 for i in 0..language_server_names.len() {
4913 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4914 panic!(
4915 "Failed to get language server #{i} with name {}",
4916 &language_server_names[i]
4917 )
4918 });
4919 let new_server_name = new_server.server.name();
4920 assert!(
4921 !servers_with_actions_requests.contains_key(new_server_name),
4922 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4923 );
4924 let new_server_name = new_server_name.to_string();
4925 match new_server_name.as_str() {
4926 "TailwindServer" | "TypeScriptServer" => {
4927 servers_with_actions_requests.insert(
4928 new_server_name.clone(),
4929 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4930 move |_, _| {
4931 let name = new_server_name.clone();
4932 async move {
4933 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4934 lsp::CodeAction {
4935 title: format!("{name} code action"),
4936 ..lsp::CodeAction::default()
4937 },
4938 )]))
4939 }
4940 },
4941 ),
4942 );
4943 }
4944 "ESLintServer" => {
4945 servers_with_actions_requests.insert(
4946 new_server_name,
4947 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4948 |_, _| async move { Ok(None) },
4949 ),
4950 );
4951 }
4952 "NoActionsCapabilitiesServer" => {
4953 let _never_handled = new_server
4954 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4955 panic!(
4956 "Should not call for code actions server with no corresponding capabilities"
4957 )
4958 });
4959 }
4960 unexpected => panic!("Unexpected server name: {unexpected}"),
4961 }
4962 }
4963
4964 let code_actions_task = project.update(cx, |project, cx| {
4965 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4966 });
4967 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4968 |mut code_actions_request| async move {
4969 code_actions_request
4970 .next()
4971 .await
4972 .expect("All code actions requests should have been triggered")
4973 },
4974 ))
4975 .await;
4976 assert_eq!(
4977 vec!["TailwindServer code action", "TypeScriptServer code action"],
4978 code_actions_task
4979 .await
4980 .into_iter()
4981 .map(|code_action| code_action.lsp_action.title)
4982 .sorted()
4983 .collect::<Vec<_>>(),
4984 "Should receive code actions responses from all related servers with hover capabilities"
4985 );
4986}
4987
4988#[gpui::test]
4989async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
4990 init_test(cx);
4991
4992 let fs = FakeFs::new(cx.executor());
4993 fs.insert_tree(
4994 "/dir",
4995 json!({
4996 "a.rs": "let a = 1;",
4997 "b.rs": "let b = 2;",
4998 "c.rs": "let c = 2;",
4999 }),
5000 )
5001 .await;
5002
5003 let project = Project::test(
5004 fs,
5005 [
5006 "/dir/a.rs".as_ref(),
5007 "/dir/b.rs".as_ref(),
5008 "/dir/c.rs".as_ref(),
5009 ],
5010 cx,
5011 )
5012 .await;
5013
5014 // check the initial state and get the worktrees
5015 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5016 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5017 assert_eq!(worktrees.len(), 3);
5018
5019 let worktree_a = worktrees[0].read(cx);
5020 let worktree_b = worktrees[1].read(cx);
5021 let worktree_c = worktrees[2].read(cx);
5022
5023 // check they start in the right order
5024 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5025 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5026 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5027
5028 (
5029 worktrees[0].clone(),
5030 worktrees[1].clone(),
5031 worktrees[2].clone(),
5032 )
5033 });
5034
5035 // move first worktree to after the second
5036 // [a, b, c] -> [b, a, c]
5037 project
5038 .update(cx, |project, cx| {
5039 let first = worktree_a.read(cx);
5040 let second = worktree_b.read(cx);
5041 project.move_worktree(first.id(), second.id(), cx)
5042 })
5043 .expect("moving first after second");
5044
5045 // check the state after moving
5046 project.update(cx, |project, cx| {
5047 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5048 assert_eq!(worktrees.len(), 3);
5049
5050 let first = worktrees[0].read(cx);
5051 let second = worktrees[1].read(cx);
5052 let third = worktrees[2].read(cx);
5053
5054 // check they are now in the right order
5055 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5056 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5057 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5058 });
5059
5060 // move the second worktree to before the first
5061 // [b, a, c] -> [a, b, c]
5062 project
5063 .update(cx, |project, cx| {
5064 let second = worktree_a.read(cx);
5065 let first = worktree_b.read(cx);
5066 project.move_worktree(first.id(), second.id(), cx)
5067 })
5068 .expect("moving second before first");
5069
5070 // check the state after moving
5071 project.update(cx, |project, cx| {
5072 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5073 assert_eq!(worktrees.len(), 3);
5074
5075 let first = worktrees[0].read(cx);
5076 let second = worktrees[1].read(cx);
5077 let third = worktrees[2].read(cx);
5078
5079 // check they are now in the right order
5080 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5081 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5082 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5083 });
5084
5085 // move the second worktree to after the third
5086 // [a, b, c] -> [a, c, b]
5087 project
5088 .update(cx, |project, cx| {
5089 let second = worktree_b.read(cx);
5090 let third = worktree_c.read(cx);
5091 project.move_worktree(second.id(), third.id(), cx)
5092 })
5093 .expect("moving second after third");
5094
5095 // check the state after moving
5096 project.update(cx, |project, cx| {
5097 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5098 assert_eq!(worktrees.len(), 3);
5099
5100 let first = worktrees[0].read(cx);
5101 let second = worktrees[1].read(cx);
5102 let third = worktrees[2].read(cx);
5103
5104 // check they are now in the right order
5105 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5106 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5107 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5108 });
5109
5110 // move the third worktree to before the second
5111 // [a, c, b] -> [a, b, c]
5112 project
5113 .update(cx, |project, cx| {
5114 let third = worktree_c.read(cx);
5115 let second = worktree_b.read(cx);
5116 project.move_worktree(third.id(), second.id(), cx)
5117 })
5118 .expect("moving third before second");
5119
5120 // check the state after moving
5121 project.update(cx, |project, cx| {
5122 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5123 assert_eq!(worktrees.len(), 3);
5124
5125 let first = worktrees[0].read(cx);
5126 let second = worktrees[1].read(cx);
5127 let third = worktrees[2].read(cx);
5128
5129 // check they are now in the right order
5130 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5131 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5132 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5133 });
5134
5135 // move the first worktree to after the third
5136 // [a, b, c] -> [b, c, a]
5137 project
5138 .update(cx, |project, cx| {
5139 let first = worktree_a.read(cx);
5140 let third = worktree_c.read(cx);
5141 project.move_worktree(first.id(), third.id(), cx)
5142 })
5143 .expect("moving first after third");
5144
5145 // check the state after moving
5146 project.update(cx, |project, cx| {
5147 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5148 assert_eq!(worktrees.len(), 3);
5149
5150 let first = worktrees[0].read(cx);
5151 let second = worktrees[1].read(cx);
5152 let third = worktrees[2].read(cx);
5153
5154 // check they are now in the right order
5155 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5156 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5157 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5158 });
5159
5160 // move the third worktree to before the first
5161 // [b, c, a] -> [a, b, c]
5162 project
5163 .update(cx, |project, cx| {
5164 let third = worktree_a.read(cx);
5165 let first = worktree_b.read(cx);
5166 project.move_worktree(third.id(), first.id(), cx)
5167 })
5168 .expect("moving third before first");
5169
5170 // check the state after moving
5171 project.update(cx, |project, cx| {
5172 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5173 assert_eq!(worktrees.len(), 3);
5174
5175 let first = worktrees[0].read(cx);
5176 let second = worktrees[1].read(cx);
5177 let third = worktrees[2].read(cx);
5178
5179 // check they are now in the right order
5180 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5181 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5182 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5183 });
5184}
5185
5186async fn search(
5187 project: &Model<Project>,
5188 query: SearchQuery,
5189 cx: &mut gpui::TestAppContext,
5190) -> Result<HashMap<String, Vec<Range<usize>>>> {
5191 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5192 let mut results = HashMap::default();
5193 while let Some(search_result) = search_rx.next().await {
5194 match search_result {
5195 SearchResult::Buffer { buffer, ranges } => {
5196 results.entry(buffer).or_insert(ranges);
5197 }
5198 SearchResult::LimitReached => {}
5199 }
5200 }
5201 Ok(results
5202 .into_iter()
5203 .map(|(buffer, ranges)| {
5204 buffer.update(cx, |buffer, cx| {
5205 let path = buffer
5206 .file()
5207 .unwrap()
5208 .full_path(cx)
5209 .to_string_lossy()
5210 .to_string();
5211 let ranges = ranges
5212 .into_iter()
5213 .map(|range| range.to_offset(buffer))
5214 .collect::<Vec<_>>();
5215 (path, ranges)
5216 })
5217 })
5218 .collect())
5219}
5220
5221fn init_test(cx: &mut gpui::TestAppContext) {
5222 if std::env::var("RUST_LOG").is_ok() {
5223 env_logger::try_init().ok();
5224 }
5225
5226 cx.update(|cx| {
5227 let settings_store = SettingsStore::test(cx);
5228 cx.set_global(settings_store);
5229 release_channel::init(SemanticVersion::default(), cx);
5230 language::init(cx);
5231 Project::init_settings(cx);
5232 });
5233}
5234
5235fn json_lang() -> Arc<Language> {
5236 Arc::new(Language::new(
5237 LanguageConfig {
5238 name: "JSON".into(),
5239 matcher: LanguageMatcher {
5240 path_suffixes: vec!["json".to_string()],
5241 ..Default::default()
5242 },
5243 ..Default::default()
5244 },
5245 None,
5246 ))
5247}
5248
5249fn js_lang() -> Arc<Language> {
5250 Arc::new(Language::new(
5251 LanguageConfig {
5252 name: Arc::from("JavaScript"),
5253 matcher: LanguageMatcher {
5254 path_suffixes: vec!["js".to_string()],
5255 ..Default::default()
5256 },
5257 ..Default::default()
5258 },
5259 None,
5260 ))
5261}
5262
5263fn rust_lang() -> Arc<Language> {
5264 Arc::new(Language::new(
5265 LanguageConfig {
5266 name: "Rust".into(),
5267 matcher: LanguageMatcher {
5268 path_suffixes: vec!["rs".to_string()],
5269 ..Default::default()
5270 },
5271 ..Default::default()
5272 },
5273 Some(tree_sitter_rust::language()),
5274 ))
5275}
5276
5277fn typescript_lang() -> Arc<Language> {
5278 Arc::new(Language::new(
5279 LanguageConfig {
5280 name: "TypeScript".into(),
5281 matcher: LanguageMatcher {
5282 path_suffixes: vec!["ts".to_string()],
5283 ..Default::default()
5284 },
5285 ..Default::default()
5286 },
5287 Some(tree_sitter_typescript::language_typescript()),
5288 ))
5289}
5290
5291fn tsx_lang() -> Arc<Language> {
5292 Arc::new(Language::new(
5293 LanguageConfig {
5294 name: "tsx".into(),
5295 matcher: LanguageMatcher {
5296 path_suffixes: vec!["tsx".to_string()],
5297 ..Default::default()
5298 },
5299 ..Default::default()
5300 },
5301 Some(tree_sitter_typescript::language_tsx()),
5302 ))
5303}
5304
5305fn get_all_tasks(
5306 project: &Model<Project>,
5307 worktree_id: Option<WorktreeId>,
5308 task_context: &TaskContext,
5309 cx: &mut AppContext,
5310) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5311 let resolved_tasks = project.update(cx, |project, cx| {
5312 project
5313 .task_inventory()
5314 .read(cx)
5315 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5316 });
5317
5318 cx.spawn(|_| async move {
5319 let (mut old, new) = resolved_tasks.await;
5320 old.extend(new);
5321 old
5322 })
5323}