1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::NumberOrString;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/tmp").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128 let task_context = TaskContext::default();
129
130 cx.executor().run_until_parked();
131 let worktree_id = cx.update(|cx| {
132 project.update(cx, |project, cx| {
133 project.worktrees().next().unwrap().read(cx).id()
134 })
135 });
136 let global_task_source_kind = TaskSourceKind::Worktree {
137 id: worktree_id,
138 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
139 id_base: "local_tasks_for_worktree".into(),
140 };
141
142 let all_tasks = cx
143 .update(|cx| {
144 let tree = worktree.read(cx);
145
146 let settings_a = language_settings(
147 None,
148 Some(
149 &(File::for_entry(
150 tree.entry_for_path("a/a.rs").unwrap().clone(),
151 worktree.clone(),
152 ) as _),
153 ),
154 cx,
155 );
156 let settings_b = language_settings(
157 None,
158 Some(
159 &(File::for_entry(
160 tree.entry_for_path("b/b.rs").unwrap().clone(),
161 worktree.clone(),
162 ) as _),
163 ),
164 cx,
165 );
166
167 assert_eq!(settings_a.tab_size.get(), 8);
168 assert_eq!(settings_b.tab_size.get(), 2);
169
170 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
171 })
172 .await
173 .into_iter()
174 .map(|(source_kind, task)| {
175 let resolved = task.resolved.unwrap();
176 (
177 source_kind,
178 task.resolved_label,
179 resolved.args,
180 resolved.env,
181 )
182 })
183 .collect::<Vec<_>>();
184 assert_eq!(
185 all_tasks,
186 vec![
187 (
188 global_task_source_kind.clone(),
189 "cargo check".to_string(),
190 vec!["check".to_string(), "--all".to_string()],
191 HashMap::default(),
192 ),
193 (
194 TaskSourceKind::Worktree {
195 id: worktree_id,
196 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
197 id_base: "local_tasks_for_worktree".into(),
198 },
199 "cargo check".to_string(),
200 vec!["check".to_string()],
201 HashMap::default(),
202 ),
203 ]
204 );
205
206 let (_, resolved_task) = cx
207 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
208 .await
209 .into_iter()
210 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
211 .expect("should have one global task");
212 project.update(cx, |project, cx| {
213 project.task_inventory().update(cx, |inventory, _| {
214 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
215 });
216 });
217
218 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
219 label: "cargo check".to_string(),
220 command: "cargo".to_string(),
221 args: vec![
222 "check".to_string(),
223 "--all".to_string(),
224 "--all-targets".to_string(),
225 ],
226 env: HashMap::from_iter(Some((
227 "RUSTFLAGS".to_string(),
228 "-Zunstable-options".to_string(),
229 ))),
230 ..TaskTemplate::default()
231 }]))
232 .unwrap();
233 let (tx, rx) = futures::channel::mpsc::unbounded();
234 cx.update(|cx| {
235 project.update(cx, |project, cx| {
236 project.task_inventory().update(cx, |inventory, cx| {
237 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
238 inventory.add_source(
239 global_task_source_kind.clone(),
240 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
241 cx,
242 );
243 });
244 })
245 });
246 tx.unbounded_send(tasks).unwrap();
247
248 cx.run_until_parked();
249 let all_tasks = cx
250 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
251 .await
252 .into_iter()
253 .map(|(source_kind, task)| {
254 let resolved = task.resolved.unwrap();
255 (
256 source_kind,
257 task.resolved_label,
258 resolved.args,
259 resolved.env,
260 )
261 })
262 .collect::<Vec<_>>();
263 assert_eq!(
264 all_tasks,
265 vec![
266 (
267 TaskSourceKind::Worktree {
268 id: worktree_id,
269 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
270 id_base: "local_tasks_for_worktree".into(),
271 },
272 "cargo check".to_string(),
273 vec![
274 "check".to_string(),
275 "--all".to_string(),
276 "--all-targets".to_string()
277 ],
278 HashMap::from_iter(Some((
279 "RUSTFLAGS".to_string(),
280 "-Zunstable-options".to_string()
281 ))),
282 ),
283 (
284 TaskSourceKind::Worktree {
285 id: worktree_id,
286 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
287 id_base: "local_tasks_for_worktree".into(),
288 },
289 "cargo check".to_string(),
290 vec!["check".to_string()],
291 HashMap::default(),
292 ),
293 ]
294 );
295}
296
297#[gpui::test]
298async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
299 init_test(cx);
300
301 let fs = FakeFs::new(cx.executor());
302 fs.insert_tree(
303 "/the-root",
304 json!({
305 "test.rs": "const A: i32 = 1;",
306 "test2.rs": "",
307 "Cargo.toml": "a = 1",
308 "package.json": "{\"a\": 1}",
309 }),
310 )
311 .await;
312
313 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
315
316 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
317 "Rust",
318 FakeLspAdapter {
319 name: "the-rust-language-server",
320 capabilities: lsp::ServerCapabilities {
321 completion_provider: Some(lsp::CompletionOptions {
322 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
323 ..Default::default()
324 }),
325 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
326 lsp::TextDocumentSyncOptions {
327 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
328 ..Default::default()
329 },
330 )),
331 ..Default::default()
332 },
333 ..Default::default()
334 },
335 );
336 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
337 "JSON",
338 FakeLspAdapter {
339 name: "the-json-language-server",
340 capabilities: lsp::ServerCapabilities {
341 completion_provider: Some(lsp::CompletionOptions {
342 trigger_characters: Some(vec![":".to_string()]),
343 ..Default::default()
344 }),
345 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
346 lsp::TextDocumentSyncOptions {
347 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
348 ..Default::default()
349 },
350 )),
351 ..Default::default()
352 },
353 ..Default::default()
354 },
355 );
356
357 // Open a buffer without an associated language server.
358 let toml_buffer = project
359 .update(cx, |project, cx| {
360 project.open_local_buffer("/the-root/Cargo.toml", cx)
361 })
362 .await
363 .unwrap();
364
365 // Open a buffer with an associated language server before the language for it has been loaded.
366 let rust_buffer = project
367 .update(cx, |project, cx| {
368 project.open_local_buffer("/the-root/test.rs", cx)
369 })
370 .await
371 .unwrap();
372 rust_buffer.update(cx, |buffer, _| {
373 assert_eq!(buffer.language().map(|l| l.name()), None);
374 });
375
376 // Now we add the languages to the project, and ensure they get assigned to all
377 // the relevant open buffers.
378 language_registry.add(json_lang());
379 language_registry.add(rust_lang());
380 cx.executor().run_until_parked();
381 rust_buffer.update(cx, |buffer, _| {
382 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
383 });
384
385 // A server is started up, and it is notified about Rust files.
386 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
387 assert_eq!(
388 fake_rust_server
389 .receive_notification::<lsp::notification::DidOpenTextDocument>()
390 .await
391 .text_document,
392 lsp::TextDocumentItem {
393 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
394 version: 0,
395 text: "const A: i32 = 1;".to_string(),
396 language_id: "rust".to_string(),
397 }
398 );
399
400 // The buffer is configured based on the language server's capabilities.
401 rust_buffer.update(cx, |buffer, _| {
402 assert_eq!(
403 buffer.completion_triggers(),
404 &[".".to_string(), "::".to_string()]
405 );
406 });
407 toml_buffer.update(cx, |buffer, _| {
408 assert!(buffer.completion_triggers().is_empty());
409 });
410
411 // Edit a buffer. The changes are reported to the language server.
412 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
413 assert_eq!(
414 fake_rust_server
415 .receive_notification::<lsp::notification::DidChangeTextDocument>()
416 .await
417 .text_document,
418 lsp::VersionedTextDocumentIdentifier::new(
419 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
420 1
421 )
422 );
423
424 // Open a third buffer with a different associated language server.
425 let json_buffer = project
426 .update(cx, |project, cx| {
427 project.open_local_buffer("/the-root/package.json", cx)
428 })
429 .await
430 .unwrap();
431
432 // A json language server is started up and is only notified about the json buffer.
433 let mut fake_json_server = fake_json_servers.next().await.unwrap();
434 assert_eq!(
435 fake_json_server
436 .receive_notification::<lsp::notification::DidOpenTextDocument>()
437 .await
438 .text_document,
439 lsp::TextDocumentItem {
440 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
441 version: 0,
442 text: "{\"a\": 1}".to_string(),
443 language_id: "json".to_string(),
444 }
445 );
446
447 // This buffer is configured based on the second language server's
448 // capabilities.
449 json_buffer.update(cx, |buffer, _| {
450 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
451 });
452
453 // When opening another buffer whose language server is already running,
454 // it is also configured based on the existing language server's capabilities.
455 let rust_buffer2 = project
456 .update(cx, |project, cx| {
457 project.open_local_buffer("/the-root/test2.rs", cx)
458 })
459 .await
460 .unwrap();
461 rust_buffer2.update(cx, |buffer, _| {
462 assert_eq!(
463 buffer.completion_triggers(),
464 &[".".to_string(), "::".to_string()]
465 );
466 });
467
468 // Changes are reported only to servers matching the buffer's language.
469 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
470 rust_buffer2.update(cx, |buffer, cx| {
471 buffer.edit([(0..0, "let x = 1;")], None, cx)
472 });
473 assert_eq!(
474 fake_rust_server
475 .receive_notification::<lsp::notification::DidChangeTextDocument>()
476 .await
477 .text_document,
478 lsp::VersionedTextDocumentIdentifier::new(
479 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
480 1
481 )
482 );
483
484 // Save notifications are reported to all servers.
485 project
486 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
487 .await
488 .unwrap();
489 assert_eq!(
490 fake_rust_server
491 .receive_notification::<lsp::notification::DidSaveTextDocument>()
492 .await
493 .text_document,
494 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
495 );
496 assert_eq!(
497 fake_json_server
498 .receive_notification::<lsp::notification::DidSaveTextDocument>()
499 .await
500 .text_document,
501 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
502 );
503
504 // Renames are reported only to servers matching the buffer's language.
505 fs.rename(
506 Path::new("/the-root/test2.rs"),
507 Path::new("/the-root/test3.rs"),
508 Default::default(),
509 )
510 .await
511 .unwrap();
512 assert_eq!(
513 fake_rust_server
514 .receive_notification::<lsp::notification::DidCloseTextDocument>()
515 .await
516 .text_document,
517 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
518 );
519 assert_eq!(
520 fake_rust_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 lsp::TextDocumentItem {
525 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
526 version: 0,
527 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
528 language_id: "rust".to_string(),
529 },
530 );
531
532 rust_buffer2.update(cx, |buffer, cx| {
533 buffer.update_diagnostics(
534 LanguageServerId(0),
535 DiagnosticSet::from_sorted_entries(
536 vec![DiagnosticEntry {
537 diagnostic: Default::default(),
538 range: Anchor::MIN..Anchor::MAX,
539 }],
540 &buffer.snapshot(),
541 ),
542 cx,
543 );
544 assert_eq!(
545 buffer
546 .snapshot()
547 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
548 .count(),
549 1
550 );
551 });
552
553 // When the rename changes the extension of the file, the buffer gets closed on the old
554 // language server and gets opened on the new one.
555 fs.rename(
556 Path::new("/the-root/test3.rs"),
557 Path::new("/the-root/test3.json"),
558 Default::default(),
559 )
560 .await
561 .unwrap();
562 assert_eq!(
563 fake_rust_server
564 .receive_notification::<lsp::notification::DidCloseTextDocument>()
565 .await
566 .text_document,
567 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
568 );
569 assert_eq!(
570 fake_json_server
571 .receive_notification::<lsp::notification::DidOpenTextDocument>()
572 .await
573 .text_document,
574 lsp::TextDocumentItem {
575 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
576 version: 0,
577 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
578 language_id: "json".to_string(),
579 },
580 );
581
582 // We clear the diagnostics, since the language has changed.
583 rust_buffer2.update(cx, |buffer, _| {
584 assert_eq!(
585 buffer
586 .snapshot()
587 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
588 .count(),
589 0
590 );
591 });
592
593 // The renamed file's version resets after changing language server.
594 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
595 assert_eq!(
596 fake_json_server
597 .receive_notification::<lsp::notification::DidChangeTextDocument>()
598 .await
599 .text_document,
600 lsp::VersionedTextDocumentIdentifier::new(
601 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
602 1
603 )
604 );
605
606 // Restart language servers
607 project.update(cx, |project, cx| {
608 project.restart_language_servers_for_buffers(
609 vec![rust_buffer.clone(), json_buffer.clone()],
610 cx,
611 );
612 });
613
614 let mut rust_shutdown_requests = fake_rust_server
615 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
616 let mut json_shutdown_requests = fake_json_server
617 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
618 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
619
620 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
621 let mut fake_json_server = fake_json_servers.next().await.unwrap();
622
623 // Ensure rust document is reopened in new rust language server
624 assert_eq!(
625 fake_rust_server
626 .receive_notification::<lsp::notification::DidOpenTextDocument>()
627 .await
628 .text_document,
629 lsp::TextDocumentItem {
630 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
631 version: 0,
632 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
633 language_id: "rust".to_string(),
634 }
635 );
636
637 // Ensure json documents are reopened in new json language server
638 assert_set_eq!(
639 [
640 fake_json_server
641 .receive_notification::<lsp::notification::DidOpenTextDocument>()
642 .await
643 .text_document,
644 fake_json_server
645 .receive_notification::<lsp::notification::DidOpenTextDocument>()
646 .await
647 .text_document,
648 ],
649 [
650 lsp::TextDocumentItem {
651 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
652 version: 0,
653 text: json_buffer.update(cx, |buffer, _| buffer.text()),
654 language_id: "json".to_string(),
655 },
656 lsp::TextDocumentItem {
657 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
658 version: 0,
659 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
660 language_id: "json".to_string(),
661 }
662 ]
663 );
664
665 // Close notifications are reported only to servers matching the buffer's language.
666 cx.update(|_| drop(json_buffer));
667 let close_message = lsp::DidCloseTextDocumentParams {
668 text_document: lsp::TextDocumentIdentifier::new(
669 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
670 ),
671 };
672 assert_eq!(
673 fake_json_server
674 .receive_notification::<lsp::notification::DidCloseTextDocument>()
675 .await,
676 close_message,
677 );
678}
679
680#[gpui::test]
681async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
682 init_test(cx);
683
684 let fs = FakeFs::new(cx.executor());
685 fs.insert_tree(
686 "/the-root",
687 json!({
688 ".gitignore": "target\n",
689 "src": {
690 "a.rs": "",
691 "b.rs": "",
692 },
693 "target": {
694 "x": {
695 "out": {
696 "x.rs": ""
697 }
698 },
699 "y": {
700 "out": {
701 "y.rs": "",
702 }
703 },
704 "z": {
705 "out": {
706 "z.rs": ""
707 }
708 }
709 }
710 }),
711 )
712 .await;
713
714 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
715 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
716 language_registry.add(rust_lang());
717 let mut fake_servers = language_registry.register_fake_lsp_adapter(
718 "Rust",
719 FakeLspAdapter {
720 name: "the-language-server",
721 ..Default::default()
722 },
723 );
724
725 cx.executor().run_until_parked();
726
727 // Start the language server by opening a buffer with a compatible file extension.
728 let _buffer = project
729 .update(cx, |project, cx| {
730 project.open_local_buffer("/the-root/src/a.rs", cx)
731 })
732 .await
733 .unwrap();
734
735 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
736 project.update(cx, |project, cx| {
737 let worktree = project.worktrees().next().unwrap();
738 assert_eq!(
739 worktree
740 .read(cx)
741 .snapshot()
742 .entries(true, 0)
743 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
744 .collect::<Vec<_>>(),
745 &[
746 (Path::new(""), false),
747 (Path::new(".gitignore"), false),
748 (Path::new("src"), false),
749 (Path::new("src/a.rs"), false),
750 (Path::new("src/b.rs"), false),
751 (Path::new("target"), true),
752 ]
753 );
754 });
755
756 let prev_read_dir_count = fs.read_dir_call_count();
757
758 // Keep track of the FS events reported to the language server.
759 let fake_server = fake_servers.next().await.unwrap();
760 let file_changes = Arc::new(Mutex::new(Vec::new()));
761 fake_server
762 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
763 registrations: vec![lsp::Registration {
764 id: Default::default(),
765 method: "workspace/didChangeWatchedFiles".to_string(),
766 register_options: serde_json::to_value(
767 lsp::DidChangeWatchedFilesRegistrationOptions {
768 watchers: vec![
769 lsp::FileSystemWatcher {
770 glob_pattern: lsp::GlobPattern::String(
771 "/the-root/Cargo.toml".to_string(),
772 ),
773 kind: None,
774 },
775 lsp::FileSystemWatcher {
776 glob_pattern: lsp::GlobPattern::String(
777 "/the-root/src/*.{rs,c}".to_string(),
778 ),
779 kind: None,
780 },
781 lsp::FileSystemWatcher {
782 glob_pattern: lsp::GlobPattern::String(
783 "/the-root/target/y/**/*.rs".to_string(),
784 ),
785 kind: None,
786 },
787 ],
788 },
789 )
790 .ok(),
791 }],
792 })
793 .await
794 .unwrap();
795 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
796 let file_changes = file_changes.clone();
797 move |params, _| {
798 let mut file_changes = file_changes.lock();
799 file_changes.extend(params.changes);
800 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
801 }
802 });
803
804 cx.executor().run_until_parked();
805 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
806 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
807
808 // Now the language server has asked us to watch an ignored directory path,
809 // so we recursively load it.
810 project.update(cx, |project, cx| {
811 let worktree = project.worktrees().next().unwrap();
812 assert_eq!(
813 worktree
814 .read(cx)
815 .snapshot()
816 .entries(true, 0)
817 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
818 .collect::<Vec<_>>(),
819 &[
820 (Path::new(""), false),
821 (Path::new(".gitignore"), false),
822 (Path::new("src"), false),
823 (Path::new("src/a.rs"), false),
824 (Path::new("src/b.rs"), false),
825 (Path::new("target"), true),
826 (Path::new("target/x"), true),
827 (Path::new("target/y"), true),
828 (Path::new("target/y/out"), true),
829 (Path::new("target/y/out/y.rs"), true),
830 (Path::new("target/z"), true),
831 ]
832 );
833 });
834
835 // Perform some file system mutations, two of which match the watched patterns,
836 // and one of which does not.
837 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
838 .await
839 .unwrap();
840 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
841 .await
842 .unwrap();
843 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
844 .await
845 .unwrap();
846 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
847 .await
848 .unwrap();
849 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
850 .await
851 .unwrap();
852
853 // The language server receives events for the FS mutations that match its watch patterns.
854 cx.executor().run_until_parked();
855 assert_eq!(
856 &*file_changes.lock(),
857 &[
858 lsp::FileEvent {
859 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
860 typ: lsp::FileChangeType::DELETED,
861 },
862 lsp::FileEvent {
863 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
864 typ: lsp::FileChangeType::CREATED,
865 },
866 lsp::FileEvent {
867 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
868 typ: lsp::FileChangeType::CREATED,
869 },
870 ]
871 );
872}
873
874#[gpui::test]
875async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
876 init_test(cx);
877
878 let fs = FakeFs::new(cx.executor());
879 fs.insert_tree(
880 "/dir",
881 json!({
882 "a.rs": "let a = 1;",
883 "b.rs": "let b = 2;"
884 }),
885 )
886 .await;
887
888 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
889
890 let buffer_a = project
891 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
892 .await
893 .unwrap();
894 let buffer_b = project
895 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
896 .await
897 .unwrap();
898
899 project.update(cx, |project, cx| {
900 project
901 .update_diagnostics(
902 LanguageServerId(0),
903 lsp::PublishDiagnosticsParams {
904 uri: Url::from_file_path("/dir/a.rs").unwrap(),
905 version: None,
906 diagnostics: vec![lsp::Diagnostic {
907 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
908 severity: Some(lsp::DiagnosticSeverity::ERROR),
909 message: "error 1".to_string(),
910 ..Default::default()
911 }],
912 },
913 &[],
914 cx,
915 )
916 .unwrap();
917 project
918 .update_diagnostics(
919 LanguageServerId(0),
920 lsp::PublishDiagnosticsParams {
921 uri: Url::from_file_path("/dir/b.rs").unwrap(),
922 version: None,
923 diagnostics: vec![lsp::Diagnostic {
924 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
925 severity: Some(lsp::DiagnosticSeverity::WARNING),
926 message: "error 2".to_string(),
927 ..Default::default()
928 }],
929 },
930 &[],
931 cx,
932 )
933 .unwrap();
934 });
935
936 buffer_a.update(cx, |buffer, _| {
937 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
938 assert_eq!(
939 chunks
940 .iter()
941 .map(|(s, d)| (s.as_str(), *d))
942 .collect::<Vec<_>>(),
943 &[
944 ("let ", None),
945 ("a", Some(DiagnosticSeverity::ERROR)),
946 (" = 1;", None),
947 ]
948 );
949 });
950 buffer_b.update(cx, |buffer, _| {
951 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
952 assert_eq!(
953 chunks
954 .iter()
955 .map(|(s, d)| (s.as_str(), *d))
956 .collect::<Vec<_>>(),
957 &[
958 ("let ", None),
959 ("b", Some(DiagnosticSeverity::WARNING)),
960 (" = 2;", None),
961 ]
962 );
963 });
964}
965
966#[gpui::test]
967async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
968 init_test(cx);
969
970 let fs = FakeFs::new(cx.executor());
971 fs.insert_tree(
972 "/root",
973 json!({
974 "dir": {
975 ".git": {
976 "HEAD": "ref: refs/heads/main",
977 },
978 ".gitignore": "b.rs",
979 "a.rs": "let a = 1;",
980 "b.rs": "let b = 2;",
981 },
982 "other.rs": "let b = c;"
983 }),
984 )
985 .await;
986
987 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
988 let (worktree, _) = project
989 .update(cx, |project, cx| {
990 project.find_or_create_worktree("/root/dir", true, cx)
991 })
992 .await
993 .unwrap();
994 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
995
996 let (worktree, _) = project
997 .update(cx, |project, cx| {
998 project.find_or_create_worktree("/root/other.rs", false, cx)
999 })
1000 .await
1001 .unwrap();
1002 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1003
1004 let server_id = LanguageServerId(0);
1005 project.update(cx, |project, cx| {
1006 project
1007 .update_diagnostics(
1008 server_id,
1009 lsp::PublishDiagnosticsParams {
1010 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1011 version: None,
1012 diagnostics: vec![lsp::Diagnostic {
1013 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1014 severity: Some(lsp::DiagnosticSeverity::ERROR),
1015 message: "unused variable 'b'".to_string(),
1016 ..Default::default()
1017 }],
1018 },
1019 &[],
1020 cx,
1021 )
1022 .unwrap();
1023 project
1024 .update_diagnostics(
1025 server_id,
1026 lsp::PublishDiagnosticsParams {
1027 uri: Url::from_file_path("/root/other.rs").unwrap(),
1028 version: None,
1029 diagnostics: vec![lsp::Diagnostic {
1030 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1031 severity: Some(lsp::DiagnosticSeverity::ERROR),
1032 message: "unknown variable 'c'".to_string(),
1033 ..Default::default()
1034 }],
1035 },
1036 &[],
1037 cx,
1038 )
1039 .unwrap();
1040 });
1041
1042 let main_ignored_buffer = project
1043 .update(cx, |project, cx| {
1044 project.open_buffer((main_worktree_id, "b.rs"), cx)
1045 })
1046 .await
1047 .unwrap();
1048 main_ignored_buffer.update(cx, |buffer, _| {
1049 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1050 assert_eq!(
1051 chunks
1052 .iter()
1053 .map(|(s, d)| (s.as_str(), *d))
1054 .collect::<Vec<_>>(),
1055 &[
1056 ("let ", None),
1057 ("b", Some(DiagnosticSeverity::ERROR)),
1058 (" = 2;", None),
1059 ],
1060 "Gigitnored buffers should still get in-buffer diagnostics",
1061 );
1062 });
1063 let other_buffer = project
1064 .update(cx, |project, cx| {
1065 project.open_buffer((other_worktree_id, ""), cx)
1066 })
1067 .await
1068 .unwrap();
1069 other_buffer.update(cx, |buffer, _| {
1070 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1071 assert_eq!(
1072 chunks
1073 .iter()
1074 .map(|(s, d)| (s.as_str(), *d))
1075 .collect::<Vec<_>>(),
1076 &[
1077 ("let b = ", None),
1078 ("c", Some(DiagnosticSeverity::ERROR)),
1079 (";", None),
1080 ],
1081 "Buffers from hidden projects should still get in-buffer diagnostics"
1082 );
1083 });
1084
1085 project.update(cx, |project, cx| {
1086 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1087 assert_eq!(
1088 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1089 vec![(
1090 ProjectPath {
1091 worktree_id: main_worktree_id,
1092 path: Arc::from(Path::new("b.rs")),
1093 },
1094 server_id,
1095 DiagnosticSummary {
1096 error_count: 1,
1097 warning_count: 0,
1098 }
1099 )]
1100 );
1101 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1102 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1103 });
1104}
1105
1106#[gpui::test]
1107async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1108 init_test(cx);
1109
1110 let progress_token = "the-progress-token";
1111
1112 let fs = FakeFs::new(cx.executor());
1113 fs.insert_tree(
1114 "/dir",
1115 json!({
1116 "a.rs": "fn a() { A }",
1117 "b.rs": "const y: i32 = 1",
1118 }),
1119 )
1120 .await;
1121
1122 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1123 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1124
1125 language_registry.add(rust_lang());
1126 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1127 "Rust",
1128 FakeLspAdapter {
1129 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1130 disk_based_diagnostics_sources: vec!["disk".into()],
1131 ..Default::default()
1132 },
1133 );
1134
1135 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1136
1137 // Cause worktree to start the fake language server
1138 let _buffer = project
1139 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1140 .await
1141 .unwrap();
1142
1143 let mut events = cx.events(&project);
1144
1145 let fake_server = fake_servers.next().await.unwrap();
1146 assert_eq!(
1147 events.next().await.unwrap(),
1148 Event::LanguageServerAdded(LanguageServerId(0)),
1149 );
1150
1151 fake_server
1152 .start_progress(format!("{}/0", progress_token))
1153 .await;
1154 assert_eq!(
1155 events.next().await.unwrap(),
1156 Event::DiskBasedDiagnosticsStarted {
1157 language_server_id: LanguageServerId(0),
1158 }
1159 );
1160
1161 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1162 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1163 version: None,
1164 diagnostics: vec![lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1166 severity: Some(lsp::DiagnosticSeverity::ERROR),
1167 message: "undefined variable 'A'".to_string(),
1168 ..Default::default()
1169 }],
1170 });
1171 assert_eq!(
1172 events.next().await.unwrap(),
1173 Event::DiagnosticsUpdated {
1174 language_server_id: LanguageServerId(0),
1175 path: (worktree_id, Path::new("a.rs")).into()
1176 }
1177 );
1178
1179 fake_server.end_progress(format!("{}/0", progress_token));
1180 assert_eq!(
1181 events.next().await.unwrap(),
1182 Event::DiskBasedDiagnosticsFinished {
1183 language_server_id: LanguageServerId(0)
1184 }
1185 );
1186
1187 let buffer = project
1188 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1189 .await
1190 .unwrap();
1191
1192 buffer.update(cx, |buffer, _| {
1193 let snapshot = buffer.snapshot();
1194 let diagnostics = snapshot
1195 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1196 .collect::<Vec<_>>();
1197 assert_eq!(
1198 diagnostics,
1199 &[DiagnosticEntry {
1200 range: Point::new(0, 9)..Point::new(0, 10),
1201 diagnostic: Diagnostic {
1202 severity: lsp::DiagnosticSeverity::ERROR,
1203 message: "undefined variable 'A'".to_string(),
1204 group_id: 0,
1205 is_primary: true,
1206 ..Default::default()
1207 }
1208 }]
1209 )
1210 });
1211
1212 // Ensure publishing empty diagnostics twice only results in one update event.
1213 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1214 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1215 version: None,
1216 diagnostics: Default::default(),
1217 });
1218 assert_eq!(
1219 events.next().await.unwrap(),
1220 Event::DiagnosticsUpdated {
1221 language_server_id: LanguageServerId(0),
1222 path: (worktree_id, Path::new("a.rs")).into()
1223 }
1224 );
1225
1226 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1227 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1228 version: None,
1229 diagnostics: Default::default(),
1230 });
1231 cx.executor().run_until_parked();
1232 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1233}
1234
1235#[gpui::test]
1236async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1237 init_test(cx);
1238
1239 let progress_token = "the-progress-token";
1240
1241 let fs = FakeFs::new(cx.executor());
1242 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1243
1244 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1245
1246 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1247 language_registry.add(rust_lang());
1248 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1249 "Rust",
1250 FakeLspAdapter {
1251 name: "the-language-server",
1252 disk_based_diagnostics_sources: vec!["disk".into()],
1253 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1254 ..Default::default()
1255 },
1256 );
1257
1258 let buffer = project
1259 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1260 .await
1261 .unwrap();
1262
1263 // Simulate diagnostics starting to update.
1264 let fake_server = fake_servers.next().await.unwrap();
1265 fake_server.start_progress(progress_token).await;
1266
1267 // Restart the server before the diagnostics finish updating.
1268 project.update(cx, |project, cx| {
1269 project.restart_language_servers_for_buffers([buffer], cx);
1270 });
1271 let mut events = cx.events(&project);
1272
1273 // Simulate the newly started server sending more diagnostics.
1274 let fake_server = fake_servers.next().await.unwrap();
1275 assert_eq!(
1276 events.next().await.unwrap(),
1277 Event::LanguageServerAdded(LanguageServerId(1))
1278 );
1279 fake_server.start_progress(progress_token).await;
1280 assert_eq!(
1281 events.next().await.unwrap(),
1282 Event::DiskBasedDiagnosticsStarted {
1283 language_server_id: LanguageServerId(1)
1284 }
1285 );
1286 project.update(cx, |project, _| {
1287 assert_eq!(
1288 project
1289 .language_servers_running_disk_based_diagnostics()
1290 .collect::<Vec<_>>(),
1291 [LanguageServerId(1)]
1292 );
1293 });
1294
1295 // All diagnostics are considered done, despite the old server's diagnostic
1296 // task never completing.
1297 fake_server.end_progress(progress_token);
1298 assert_eq!(
1299 events.next().await.unwrap(),
1300 Event::DiskBasedDiagnosticsFinished {
1301 language_server_id: LanguageServerId(1)
1302 }
1303 );
1304 project.update(cx, |project, _| {
1305 assert_eq!(
1306 project
1307 .language_servers_running_disk_based_diagnostics()
1308 .collect::<Vec<_>>(),
1309 [] as [language::LanguageServerId; 0]
1310 );
1311 });
1312}
1313
1314#[gpui::test]
1315async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1316 init_test(cx);
1317
1318 let fs = FakeFs::new(cx.executor());
1319 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1320
1321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1322
1323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1324 language_registry.add(rust_lang());
1325 let mut fake_servers =
1326 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1327
1328 let buffer = project
1329 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1330 .await
1331 .unwrap();
1332
1333 // Publish diagnostics
1334 let fake_server = fake_servers.next().await.unwrap();
1335 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1336 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1337 version: None,
1338 diagnostics: vec![lsp::Diagnostic {
1339 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1340 severity: Some(lsp::DiagnosticSeverity::ERROR),
1341 message: "the message".to_string(),
1342 ..Default::default()
1343 }],
1344 });
1345
1346 cx.executor().run_until_parked();
1347 buffer.update(cx, |buffer, _| {
1348 assert_eq!(
1349 buffer
1350 .snapshot()
1351 .diagnostics_in_range::<_, usize>(0..1, false)
1352 .map(|entry| entry.diagnostic.message.clone())
1353 .collect::<Vec<_>>(),
1354 ["the message".to_string()]
1355 );
1356 });
1357 project.update(cx, |project, cx| {
1358 assert_eq!(
1359 project.diagnostic_summary(false, cx),
1360 DiagnosticSummary {
1361 error_count: 1,
1362 warning_count: 0,
1363 }
1364 );
1365 });
1366
1367 project.update(cx, |project, cx| {
1368 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1369 });
1370
1371 // The diagnostics are cleared.
1372 cx.executor().run_until_parked();
1373 buffer.update(cx, |buffer, _| {
1374 assert_eq!(
1375 buffer
1376 .snapshot()
1377 .diagnostics_in_range::<_, usize>(0..1, false)
1378 .map(|entry| entry.diagnostic.message.clone())
1379 .collect::<Vec<_>>(),
1380 Vec::<String>::new(),
1381 );
1382 });
1383 project.update(cx, |project, cx| {
1384 assert_eq!(
1385 project.diagnostic_summary(false, cx),
1386 DiagnosticSummary {
1387 error_count: 0,
1388 warning_count: 0,
1389 }
1390 );
1391 });
1392}
1393
1394#[gpui::test]
1395async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1396 init_test(cx);
1397
1398 let fs = FakeFs::new(cx.executor());
1399 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1400
1401 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1402 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1403
1404 language_registry.add(rust_lang());
1405 let mut fake_servers =
1406 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1407
1408 let buffer = project
1409 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1410 .await
1411 .unwrap();
1412
1413 // Before restarting the server, report diagnostics with an unknown buffer version.
1414 let fake_server = fake_servers.next().await.unwrap();
1415 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1416 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1417 version: Some(10000),
1418 diagnostics: Vec::new(),
1419 });
1420 cx.executor().run_until_parked();
1421
1422 project.update(cx, |project, cx| {
1423 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1424 });
1425 let mut fake_server = fake_servers.next().await.unwrap();
1426 let notification = fake_server
1427 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1428 .await
1429 .text_document;
1430 assert_eq!(notification.version, 0);
1431}
1432
1433#[gpui::test]
1434async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1435 init_test(cx);
1436
1437 let progress_token = "the-progress-token";
1438
1439 let fs = FakeFs::new(cx.executor());
1440 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1441
1442 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1443
1444 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1445 language_registry.add(rust_lang());
1446 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1447 "Rust",
1448 FakeLspAdapter {
1449 name: "the-language-server",
1450 disk_based_diagnostics_sources: vec!["disk".into()],
1451 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1452 ..Default::default()
1453 },
1454 );
1455
1456 let buffer = project
1457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1458 .await
1459 .unwrap();
1460
1461 // Simulate diagnostics starting to update.
1462 let mut fake_server = fake_servers.next().await.unwrap();
1463 fake_server
1464 .start_progress_with(
1465 "another-token",
1466 lsp::WorkDoneProgressBegin {
1467 cancellable: Some(false),
1468 ..Default::default()
1469 },
1470 )
1471 .await;
1472 fake_server
1473 .start_progress_with(
1474 progress_token,
1475 lsp::WorkDoneProgressBegin {
1476 cancellable: Some(true),
1477 ..Default::default()
1478 },
1479 )
1480 .await;
1481 cx.executor().run_until_parked();
1482
1483 project.update(cx, |project, cx| {
1484 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1485 });
1486
1487 let cancel_notification = fake_server
1488 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1489 .await;
1490 assert_eq!(
1491 cancel_notification.token,
1492 NumberOrString::String(progress_token.into())
1493 );
1494}
1495
1496#[gpui::test]
1497async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1498 init_test(cx);
1499
1500 let fs = FakeFs::new(cx.executor());
1501 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1502 .await;
1503
1504 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1506
1507 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1508 "Rust",
1509 FakeLspAdapter {
1510 name: "rust-lsp",
1511 ..Default::default()
1512 },
1513 );
1514 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1515 "JavaScript",
1516 FakeLspAdapter {
1517 name: "js-lsp",
1518 ..Default::default()
1519 },
1520 );
1521 language_registry.add(rust_lang());
1522 language_registry.add(js_lang());
1523
1524 let _rs_buffer = project
1525 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1526 .await
1527 .unwrap();
1528 let _js_buffer = project
1529 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1530 .await
1531 .unwrap();
1532
1533 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1534 assert_eq!(
1535 fake_rust_server_1
1536 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1537 .await
1538 .text_document
1539 .uri
1540 .as_str(),
1541 "file:///dir/a.rs"
1542 );
1543
1544 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1545 assert_eq!(
1546 fake_js_server
1547 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1548 .await
1549 .text_document
1550 .uri
1551 .as_str(),
1552 "file:///dir/b.js"
1553 );
1554
1555 // Disable Rust language server, ensuring only that server gets stopped.
1556 cx.update(|cx| {
1557 SettingsStore::update_global(cx, |settings, cx| {
1558 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1559 settings.languages.insert(
1560 Arc::from("Rust"),
1561 LanguageSettingsContent {
1562 enable_language_server: Some(false),
1563 ..Default::default()
1564 },
1565 );
1566 });
1567 })
1568 });
1569 fake_rust_server_1
1570 .receive_notification::<lsp::notification::Exit>()
1571 .await;
1572
1573 // Enable Rust and disable JavaScript language servers, ensuring that the
1574 // former gets started again and that the latter stops.
1575 cx.update(|cx| {
1576 SettingsStore::update_global(cx, |settings, cx| {
1577 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1578 settings.languages.insert(
1579 Arc::from("Rust"),
1580 LanguageSettingsContent {
1581 enable_language_server: Some(true),
1582 ..Default::default()
1583 },
1584 );
1585 settings.languages.insert(
1586 Arc::from("JavaScript"),
1587 LanguageSettingsContent {
1588 enable_language_server: Some(false),
1589 ..Default::default()
1590 },
1591 );
1592 });
1593 })
1594 });
1595 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1596 assert_eq!(
1597 fake_rust_server_2
1598 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1599 .await
1600 .text_document
1601 .uri
1602 .as_str(),
1603 "file:///dir/a.rs"
1604 );
1605 fake_js_server
1606 .receive_notification::<lsp::notification::Exit>()
1607 .await;
1608}
1609
1610#[gpui::test(iterations = 3)]
1611async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1612 init_test(cx);
1613
1614 let text = "
1615 fn a() { A }
1616 fn b() { BB }
1617 fn c() { CCC }
1618 "
1619 .unindent();
1620
1621 let fs = FakeFs::new(cx.executor());
1622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1623
1624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1626
1627 language_registry.add(rust_lang());
1628 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1629 "Rust",
1630 FakeLspAdapter {
1631 disk_based_diagnostics_sources: vec!["disk".into()],
1632 ..Default::default()
1633 },
1634 );
1635
1636 let buffer = project
1637 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1638 .await
1639 .unwrap();
1640
1641 let mut fake_server = fake_servers.next().await.unwrap();
1642 let open_notification = fake_server
1643 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1644 .await;
1645
1646 // Edit the buffer, moving the content down
1647 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1648 let change_notification_1 = fake_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await;
1651 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1652
1653 // Report some diagnostics for the initial version of the buffer
1654 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1655 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1656 version: Some(open_notification.text_document.version),
1657 diagnostics: vec![
1658 lsp::Diagnostic {
1659 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1660 severity: Some(DiagnosticSeverity::ERROR),
1661 message: "undefined variable 'A'".to_string(),
1662 source: Some("disk".to_string()),
1663 ..Default::default()
1664 },
1665 lsp::Diagnostic {
1666 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1667 severity: Some(DiagnosticSeverity::ERROR),
1668 message: "undefined variable 'BB'".to_string(),
1669 source: Some("disk".to_string()),
1670 ..Default::default()
1671 },
1672 lsp::Diagnostic {
1673 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1674 severity: Some(DiagnosticSeverity::ERROR),
1675 source: Some("disk".to_string()),
1676 message: "undefined variable 'CCC'".to_string(),
1677 ..Default::default()
1678 },
1679 ],
1680 });
1681
1682 // The diagnostics have moved down since they were created.
1683 cx.executor().run_until_parked();
1684 buffer.update(cx, |buffer, _| {
1685 assert_eq!(
1686 buffer
1687 .snapshot()
1688 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1689 .collect::<Vec<_>>(),
1690 &[
1691 DiagnosticEntry {
1692 range: Point::new(3, 9)..Point::new(3, 11),
1693 diagnostic: Diagnostic {
1694 source: Some("disk".into()),
1695 severity: DiagnosticSeverity::ERROR,
1696 message: "undefined variable 'BB'".to_string(),
1697 is_disk_based: true,
1698 group_id: 1,
1699 is_primary: true,
1700 ..Default::default()
1701 },
1702 },
1703 DiagnosticEntry {
1704 range: Point::new(4, 9)..Point::new(4, 12),
1705 diagnostic: Diagnostic {
1706 source: Some("disk".into()),
1707 severity: DiagnosticSeverity::ERROR,
1708 message: "undefined variable 'CCC'".to_string(),
1709 is_disk_based: true,
1710 group_id: 2,
1711 is_primary: true,
1712 ..Default::default()
1713 }
1714 }
1715 ]
1716 );
1717 assert_eq!(
1718 chunks_with_diagnostics(buffer, 0..buffer.len()),
1719 [
1720 ("\n\nfn a() { ".to_string(), None),
1721 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1722 (" }\nfn b() { ".to_string(), None),
1723 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1724 (" }\nfn c() { ".to_string(), None),
1725 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1726 (" }\n".to_string(), None),
1727 ]
1728 );
1729 assert_eq!(
1730 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1731 [
1732 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1733 (" }\nfn c() { ".to_string(), None),
1734 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1735 ]
1736 );
1737 });
1738
1739 // Ensure overlapping diagnostics are highlighted correctly.
1740 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1741 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1742 version: Some(open_notification.text_document.version),
1743 diagnostics: vec![
1744 lsp::Diagnostic {
1745 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1746 severity: Some(DiagnosticSeverity::ERROR),
1747 message: "undefined variable 'A'".to_string(),
1748 source: Some("disk".to_string()),
1749 ..Default::default()
1750 },
1751 lsp::Diagnostic {
1752 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1753 severity: Some(DiagnosticSeverity::WARNING),
1754 message: "unreachable statement".to_string(),
1755 source: Some("disk".to_string()),
1756 ..Default::default()
1757 },
1758 ],
1759 });
1760
1761 cx.executor().run_until_parked();
1762 buffer.update(cx, |buffer, _| {
1763 assert_eq!(
1764 buffer
1765 .snapshot()
1766 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1767 .collect::<Vec<_>>(),
1768 &[
1769 DiagnosticEntry {
1770 range: Point::new(2, 9)..Point::new(2, 12),
1771 diagnostic: Diagnostic {
1772 source: Some("disk".into()),
1773 severity: DiagnosticSeverity::WARNING,
1774 message: "unreachable statement".to_string(),
1775 is_disk_based: true,
1776 group_id: 4,
1777 is_primary: true,
1778 ..Default::default()
1779 }
1780 },
1781 DiagnosticEntry {
1782 range: Point::new(2, 9)..Point::new(2, 10),
1783 diagnostic: Diagnostic {
1784 source: Some("disk".into()),
1785 severity: DiagnosticSeverity::ERROR,
1786 message: "undefined variable 'A'".to_string(),
1787 is_disk_based: true,
1788 group_id: 3,
1789 is_primary: true,
1790 ..Default::default()
1791 },
1792 }
1793 ]
1794 );
1795 assert_eq!(
1796 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1797 [
1798 ("fn a() { ".to_string(), None),
1799 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1800 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1801 ("\n".to_string(), None),
1802 ]
1803 );
1804 assert_eq!(
1805 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1806 [
1807 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1808 ("\n".to_string(), None),
1809 ]
1810 );
1811 });
1812
1813 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1814 // changes since the last save.
1815 buffer.update(cx, |buffer, cx| {
1816 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1817 buffer.edit(
1818 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1819 None,
1820 cx,
1821 );
1822 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1823 });
1824 let change_notification_2 = fake_server
1825 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1826 .await;
1827 assert!(
1828 change_notification_2.text_document.version > change_notification_1.text_document.version
1829 );
1830
1831 // Handle out-of-order diagnostics
1832 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1833 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1834 version: Some(change_notification_2.text_document.version),
1835 diagnostics: vec![
1836 lsp::Diagnostic {
1837 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1838 severity: Some(DiagnosticSeverity::ERROR),
1839 message: "undefined variable 'BB'".to_string(),
1840 source: Some("disk".to_string()),
1841 ..Default::default()
1842 },
1843 lsp::Diagnostic {
1844 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1845 severity: Some(DiagnosticSeverity::WARNING),
1846 message: "undefined variable 'A'".to_string(),
1847 source: Some("disk".to_string()),
1848 ..Default::default()
1849 },
1850 ],
1851 });
1852
1853 cx.executor().run_until_parked();
1854 buffer.update(cx, |buffer, _| {
1855 assert_eq!(
1856 buffer
1857 .snapshot()
1858 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1859 .collect::<Vec<_>>(),
1860 &[
1861 DiagnosticEntry {
1862 range: Point::new(2, 21)..Point::new(2, 22),
1863 diagnostic: Diagnostic {
1864 source: Some("disk".into()),
1865 severity: DiagnosticSeverity::WARNING,
1866 message: "undefined variable 'A'".to_string(),
1867 is_disk_based: true,
1868 group_id: 6,
1869 is_primary: true,
1870 ..Default::default()
1871 }
1872 },
1873 DiagnosticEntry {
1874 range: Point::new(3, 9)..Point::new(3, 14),
1875 diagnostic: Diagnostic {
1876 source: Some("disk".into()),
1877 severity: DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'BB'".to_string(),
1879 is_disk_based: true,
1880 group_id: 5,
1881 is_primary: true,
1882 ..Default::default()
1883 },
1884 }
1885 ]
1886 );
1887 });
1888}
1889
1890#[gpui::test]
1891async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1892 init_test(cx);
1893
1894 let text = concat!(
1895 "let one = ;\n", //
1896 "let two = \n",
1897 "let three = 3;\n",
1898 );
1899
1900 let fs = FakeFs::new(cx.executor());
1901 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1902
1903 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1904 let buffer = project
1905 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1906 .await
1907 .unwrap();
1908
1909 project.update(cx, |project, cx| {
1910 project
1911 .update_buffer_diagnostics(
1912 &buffer,
1913 LanguageServerId(0),
1914 None,
1915 vec![
1916 DiagnosticEntry {
1917 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1918 diagnostic: Diagnostic {
1919 severity: DiagnosticSeverity::ERROR,
1920 message: "syntax error 1".to_string(),
1921 ..Default::default()
1922 },
1923 },
1924 DiagnosticEntry {
1925 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1926 diagnostic: Diagnostic {
1927 severity: DiagnosticSeverity::ERROR,
1928 message: "syntax error 2".to_string(),
1929 ..Default::default()
1930 },
1931 },
1932 ],
1933 cx,
1934 )
1935 .unwrap();
1936 });
1937
1938 // An empty range is extended forward to include the following character.
1939 // At the end of a line, an empty range is extended backward to include
1940 // the preceding character.
1941 buffer.update(cx, |buffer, _| {
1942 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1943 assert_eq!(
1944 chunks
1945 .iter()
1946 .map(|(s, d)| (s.as_str(), *d))
1947 .collect::<Vec<_>>(),
1948 &[
1949 ("let one = ", None),
1950 (";", Some(DiagnosticSeverity::ERROR)),
1951 ("\nlet two =", None),
1952 (" ", Some(DiagnosticSeverity::ERROR)),
1953 ("\nlet three = 3;\n", None)
1954 ]
1955 );
1956 });
1957}
1958
1959#[gpui::test]
1960async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1961 init_test(cx);
1962
1963 let fs = FakeFs::new(cx.executor());
1964 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1965 .await;
1966
1967 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1968
1969 project.update(cx, |project, cx| {
1970 project
1971 .update_diagnostic_entries(
1972 LanguageServerId(0),
1973 Path::new("/dir/a.rs").to_owned(),
1974 None,
1975 vec![DiagnosticEntry {
1976 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1977 diagnostic: Diagnostic {
1978 severity: DiagnosticSeverity::ERROR,
1979 is_primary: true,
1980 message: "syntax error a1".to_string(),
1981 ..Default::default()
1982 },
1983 }],
1984 cx,
1985 )
1986 .unwrap();
1987 project
1988 .update_diagnostic_entries(
1989 LanguageServerId(1),
1990 Path::new("/dir/a.rs").to_owned(),
1991 None,
1992 vec![DiagnosticEntry {
1993 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1994 diagnostic: Diagnostic {
1995 severity: DiagnosticSeverity::ERROR,
1996 is_primary: true,
1997 message: "syntax error b1".to_string(),
1998 ..Default::default()
1999 },
2000 }],
2001 cx,
2002 )
2003 .unwrap();
2004
2005 assert_eq!(
2006 project.diagnostic_summary(false, cx),
2007 DiagnosticSummary {
2008 error_count: 2,
2009 warning_count: 0,
2010 }
2011 );
2012 });
2013}
2014
2015#[gpui::test]
2016async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2017 init_test(cx);
2018
2019 let text = "
2020 fn a() {
2021 f1();
2022 }
2023 fn b() {
2024 f2();
2025 }
2026 fn c() {
2027 f3();
2028 }
2029 "
2030 .unindent();
2031
2032 let fs = FakeFs::new(cx.executor());
2033 fs.insert_tree(
2034 "/dir",
2035 json!({
2036 "a.rs": text.clone(),
2037 }),
2038 )
2039 .await;
2040
2041 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2042
2043 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2044 language_registry.add(rust_lang());
2045 let mut fake_servers =
2046 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2047
2048 let buffer = project
2049 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2050 .await
2051 .unwrap();
2052
2053 let mut fake_server = fake_servers.next().await.unwrap();
2054 let lsp_document_version = fake_server
2055 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2056 .await
2057 .text_document
2058 .version;
2059
2060 // Simulate editing the buffer after the language server computes some edits.
2061 buffer.update(cx, |buffer, cx| {
2062 buffer.edit(
2063 [(
2064 Point::new(0, 0)..Point::new(0, 0),
2065 "// above first function\n",
2066 )],
2067 None,
2068 cx,
2069 );
2070 buffer.edit(
2071 [(
2072 Point::new(2, 0)..Point::new(2, 0),
2073 " // inside first function\n",
2074 )],
2075 None,
2076 cx,
2077 );
2078 buffer.edit(
2079 [(
2080 Point::new(6, 4)..Point::new(6, 4),
2081 "// inside second function ",
2082 )],
2083 None,
2084 cx,
2085 );
2086
2087 assert_eq!(
2088 buffer.text(),
2089 "
2090 // above first function
2091 fn a() {
2092 // inside first function
2093 f1();
2094 }
2095 fn b() {
2096 // inside second function f2();
2097 }
2098 fn c() {
2099 f3();
2100 }
2101 "
2102 .unindent()
2103 );
2104 });
2105
2106 let edits = project
2107 .update(cx, |project, cx| {
2108 project.edits_from_lsp(
2109 &buffer,
2110 vec![
2111 // replace body of first function
2112 lsp::TextEdit {
2113 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2114 new_text: "
2115 fn a() {
2116 f10();
2117 }
2118 "
2119 .unindent(),
2120 },
2121 // edit inside second function
2122 lsp::TextEdit {
2123 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2124 new_text: "00".into(),
2125 },
2126 // edit inside third function via two distinct edits
2127 lsp::TextEdit {
2128 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2129 new_text: "4000".into(),
2130 },
2131 lsp::TextEdit {
2132 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2133 new_text: "".into(),
2134 },
2135 ],
2136 LanguageServerId(0),
2137 Some(lsp_document_version),
2138 cx,
2139 )
2140 })
2141 .await
2142 .unwrap();
2143
2144 buffer.update(cx, |buffer, cx| {
2145 for (range, new_text) in edits {
2146 buffer.edit([(range, new_text)], None, cx);
2147 }
2148 assert_eq!(
2149 buffer.text(),
2150 "
2151 // above first function
2152 fn a() {
2153 // inside first function
2154 f10();
2155 }
2156 fn b() {
2157 // inside second function f200();
2158 }
2159 fn c() {
2160 f4000();
2161 }
2162 "
2163 .unindent()
2164 );
2165 });
2166}
2167
2168#[gpui::test]
2169async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2170 init_test(cx);
2171
2172 let text = "
2173 use a::b;
2174 use a::c;
2175
2176 fn f() {
2177 b();
2178 c();
2179 }
2180 "
2181 .unindent();
2182
2183 let fs = FakeFs::new(cx.executor());
2184 fs.insert_tree(
2185 "/dir",
2186 json!({
2187 "a.rs": text.clone(),
2188 }),
2189 )
2190 .await;
2191
2192 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2193 let buffer = project
2194 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2195 .await
2196 .unwrap();
2197
2198 // Simulate the language server sending us a small edit in the form of a very large diff.
2199 // Rust-analyzer does this when performing a merge-imports code action.
2200 let edits = project
2201 .update(cx, |project, cx| {
2202 project.edits_from_lsp(
2203 &buffer,
2204 [
2205 // Replace the first use statement without editing the semicolon.
2206 lsp::TextEdit {
2207 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2208 new_text: "a::{b, c}".into(),
2209 },
2210 // Reinsert the remainder of the file between the semicolon and the final
2211 // newline of the file.
2212 lsp::TextEdit {
2213 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2214 new_text: "\n\n".into(),
2215 },
2216 lsp::TextEdit {
2217 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2218 new_text: "
2219 fn f() {
2220 b();
2221 c();
2222 }"
2223 .unindent(),
2224 },
2225 // Delete everything after the first newline of the file.
2226 lsp::TextEdit {
2227 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2228 new_text: "".into(),
2229 },
2230 ],
2231 LanguageServerId(0),
2232 None,
2233 cx,
2234 )
2235 })
2236 .await
2237 .unwrap();
2238
2239 buffer.update(cx, |buffer, cx| {
2240 let edits = edits
2241 .into_iter()
2242 .map(|(range, text)| {
2243 (
2244 range.start.to_point(buffer)..range.end.to_point(buffer),
2245 text,
2246 )
2247 })
2248 .collect::<Vec<_>>();
2249
2250 assert_eq!(
2251 edits,
2252 [
2253 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2254 (Point::new(1, 0)..Point::new(2, 0), "".into())
2255 ]
2256 );
2257
2258 for (range, new_text) in edits {
2259 buffer.edit([(range, new_text)], None, cx);
2260 }
2261 assert_eq!(
2262 buffer.text(),
2263 "
2264 use a::{b, c};
2265
2266 fn f() {
2267 b();
2268 c();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = "
2281 use a::b;
2282 use a::c;
2283
2284 fn f() {
2285 b();
2286 c();
2287 }
2288 "
2289 .unindent();
2290
2291 let fs = FakeFs::new(cx.executor());
2292 fs.insert_tree(
2293 "/dir",
2294 json!({
2295 "a.rs": text.clone(),
2296 }),
2297 )
2298 .await;
2299
2300 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2301 let buffer = project
2302 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2303 .await
2304 .unwrap();
2305
2306 // Simulate the language server sending us edits in a non-ordered fashion,
2307 // with ranges sometimes being inverted or pointing to invalid locations.
2308 let edits = project
2309 .update(cx, |project, cx| {
2310 project.edits_from_lsp(
2311 &buffer,
2312 [
2313 lsp::TextEdit {
2314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2315 new_text: "\n\n".into(),
2316 },
2317 lsp::TextEdit {
2318 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2319 new_text: "a::{b, c}".into(),
2320 },
2321 lsp::TextEdit {
2322 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2323 new_text: "".into(),
2324 },
2325 lsp::TextEdit {
2326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2327 new_text: "
2328 fn f() {
2329 b();
2330 c();
2331 }"
2332 .unindent(),
2333 },
2334 ],
2335 LanguageServerId(0),
2336 None,
2337 cx,
2338 )
2339 })
2340 .await
2341 .unwrap();
2342
2343 buffer.update(cx, |buffer, cx| {
2344 let edits = edits
2345 .into_iter()
2346 .map(|(range, text)| {
2347 (
2348 range.start.to_point(buffer)..range.end.to_point(buffer),
2349 text,
2350 )
2351 })
2352 .collect::<Vec<_>>();
2353
2354 assert_eq!(
2355 edits,
2356 [
2357 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2358 (Point::new(1, 0)..Point::new(2, 0), "".into())
2359 ]
2360 );
2361
2362 for (range, new_text) in edits {
2363 buffer.edit([(range, new_text)], None, cx);
2364 }
2365 assert_eq!(
2366 buffer.text(),
2367 "
2368 use a::{b, c};
2369
2370 fn f() {
2371 b();
2372 c();
2373 }
2374 "
2375 .unindent()
2376 );
2377 });
2378}
2379
2380fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2381 buffer: &Buffer,
2382 range: Range<T>,
2383) -> Vec<(String, Option<DiagnosticSeverity>)> {
2384 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2385 for chunk in buffer.snapshot().chunks(range, true) {
2386 if chunks.last().map_or(false, |prev_chunk| {
2387 prev_chunk.1 == chunk.diagnostic_severity
2388 }) {
2389 chunks.last_mut().unwrap().0.push_str(chunk.text);
2390 } else {
2391 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2392 }
2393 }
2394 chunks
2395}
2396
2397#[gpui::test(iterations = 10)]
2398async fn test_definition(cx: &mut gpui::TestAppContext) {
2399 init_test(cx);
2400
2401 let fs = FakeFs::new(cx.executor());
2402 fs.insert_tree(
2403 "/dir",
2404 json!({
2405 "a.rs": "const fn a() { A }",
2406 "b.rs": "const y: i32 = crate::a()",
2407 }),
2408 )
2409 .await;
2410
2411 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2412
2413 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2414 language_registry.add(rust_lang());
2415 let mut fake_servers =
2416 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2417
2418 let buffer = project
2419 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2420 .await
2421 .unwrap();
2422
2423 let fake_server = fake_servers.next().await.unwrap();
2424 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2425 let params = params.text_document_position_params;
2426 assert_eq!(
2427 params.text_document.uri.to_file_path().unwrap(),
2428 Path::new("/dir/b.rs"),
2429 );
2430 assert_eq!(params.position, lsp::Position::new(0, 22));
2431
2432 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2433 lsp::Location::new(
2434 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2435 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2436 ),
2437 )))
2438 });
2439
2440 let mut definitions = project
2441 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2442 .await
2443 .unwrap();
2444
2445 // Assert no new language server started
2446 cx.executor().run_until_parked();
2447 assert!(fake_servers.try_next().is_err());
2448
2449 assert_eq!(definitions.len(), 1);
2450 let definition = definitions.pop().unwrap();
2451 cx.update(|cx| {
2452 let target_buffer = definition.target.buffer.read(cx);
2453 assert_eq!(
2454 target_buffer
2455 .file()
2456 .unwrap()
2457 .as_local()
2458 .unwrap()
2459 .abs_path(cx),
2460 Path::new("/dir/a.rs"),
2461 );
2462 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2463 assert_eq!(
2464 list_worktrees(&project, cx),
2465 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2466 );
2467
2468 drop(definition);
2469 });
2470 cx.update(|cx| {
2471 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2472 });
2473
2474 fn list_worktrees<'a>(
2475 project: &'a Model<Project>,
2476 cx: &'a AppContext,
2477 ) -> Vec<(&'a Path, bool)> {
2478 project
2479 .read(cx)
2480 .worktrees()
2481 .map(|worktree| {
2482 let worktree = worktree.read(cx);
2483 (
2484 worktree.as_local().unwrap().abs_path().as_ref(),
2485 worktree.is_visible(),
2486 )
2487 })
2488 .collect::<Vec<_>>()
2489 }
2490}
2491
2492#[gpui::test]
2493async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2494 init_test(cx);
2495
2496 let fs = FakeFs::new(cx.executor());
2497 fs.insert_tree(
2498 "/dir",
2499 json!({
2500 "a.ts": "",
2501 }),
2502 )
2503 .await;
2504
2505 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2506
2507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2508 language_registry.add(typescript_lang());
2509 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2510 "TypeScript",
2511 FakeLspAdapter {
2512 capabilities: lsp::ServerCapabilities {
2513 completion_provider: Some(lsp::CompletionOptions {
2514 trigger_characters: Some(vec![":".to_string()]),
2515 ..Default::default()
2516 }),
2517 ..Default::default()
2518 },
2519 ..Default::default()
2520 },
2521 );
2522
2523 let buffer = project
2524 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2525 .await
2526 .unwrap();
2527
2528 let fake_server = fake_language_servers.next().await.unwrap();
2529
2530 let text = "let a = b.fqn";
2531 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2532 let completions = project.update(cx, |project, cx| {
2533 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2534 });
2535
2536 fake_server
2537 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2538 Ok(Some(lsp::CompletionResponse::Array(vec![
2539 lsp::CompletionItem {
2540 label: "fullyQualifiedName?".into(),
2541 insert_text: Some("fullyQualifiedName".into()),
2542 ..Default::default()
2543 },
2544 ])))
2545 })
2546 .next()
2547 .await;
2548 let completions = completions.await.unwrap();
2549 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2550 assert_eq!(completions.len(), 1);
2551 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2552 assert_eq!(
2553 completions[0].old_range.to_offset(&snapshot),
2554 text.len() - 3..text.len()
2555 );
2556
2557 let text = "let a = \"atoms/cmp\"";
2558 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2559 let completions = project.update(cx, |project, cx| {
2560 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2561 });
2562
2563 fake_server
2564 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2565 Ok(Some(lsp::CompletionResponse::Array(vec![
2566 lsp::CompletionItem {
2567 label: "component".into(),
2568 ..Default::default()
2569 },
2570 ])))
2571 })
2572 .next()
2573 .await;
2574 let completions = completions.await.unwrap();
2575 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2576 assert_eq!(completions.len(), 1);
2577 assert_eq!(completions[0].new_text, "component");
2578 assert_eq!(
2579 completions[0].old_range.to_offset(&snapshot),
2580 text.len() - 4..text.len() - 1
2581 );
2582}
2583
2584#[gpui::test]
2585async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2586 init_test(cx);
2587
2588 let fs = FakeFs::new(cx.executor());
2589 fs.insert_tree(
2590 "/dir",
2591 json!({
2592 "a.ts": "",
2593 }),
2594 )
2595 .await;
2596
2597 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2598
2599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2600 language_registry.add(typescript_lang());
2601 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2602 "TypeScript",
2603 FakeLspAdapter {
2604 capabilities: lsp::ServerCapabilities {
2605 completion_provider: Some(lsp::CompletionOptions {
2606 trigger_characters: Some(vec![":".to_string()]),
2607 ..Default::default()
2608 }),
2609 ..Default::default()
2610 },
2611 ..Default::default()
2612 },
2613 );
2614
2615 let buffer = project
2616 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2617 .await
2618 .unwrap();
2619
2620 let fake_server = fake_language_servers.next().await.unwrap();
2621
2622 let text = "let a = b.fqn";
2623 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2624 let completions = project.update(cx, |project, cx| {
2625 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2626 });
2627
2628 fake_server
2629 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2630 Ok(Some(lsp::CompletionResponse::Array(vec![
2631 lsp::CompletionItem {
2632 label: "fullyQualifiedName?".into(),
2633 insert_text: Some("fully\rQualified\r\nName".into()),
2634 ..Default::default()
2635 },
2636 ])))
2637 })
2638 .next()
2639 .await;
2640 let completions = completions.await.unwrap();
2641 assert_eq!(completions.len(), 1);
2642 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2643}
2644
2645#[gpui::test(iterations = 10)]
2646async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2647 init_test(cx);
2648
2649 let fs = FakeFs::new(cx.executor());
2650 fs.insert_tree(
2651 "/dir",
2652 json!({
2653 "a.ts": "a",
2654 }),
2655 )
2656 .await;
2657
2658 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2659
2660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2661 language_registry.add(typescript_lang());
2662 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2663 "TypeScript",
2664 FakeLspAdapter {
2665 capabilities: lsp::ServerCapabilities {
2666 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2667 lsp::CodeActionOptions {
2668 resolve_provider: Some(true),
2669 ..lsp::CodeActionOptions::default()
2670 },
2671 )),
2672 ..lsp::ServerCapabilities::default()
2673 },
2674 ..FakeLspAdapter::default()
2675 },
2676 );
2677
2678 let buffer = project
2679 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2680 .await
2681 .unwrap();
2682
2683 let fake_server = fake_language_servers.next().await.unwrap();
2684
2685 // Language server returns code actions that contain commands, and not edits.
2686 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2687 fake_server
2688 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2689 Ok(Some(vec![
2690 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2691 title: "The code action".into(),
2692 data: Some(serde_json::json!({
2693 "command": "_the/command",
2694 })),
2695 ..lsp::CodeAction::default()
2696 }),
2697 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2698 title: "two".into(),
2699 ..lsp::CodeAction::default()
2700 }),
2701 ]))
2702 })
2703 .next()
2704 .await;
2705
2706 let action = actions.await[0].clone();
2707 let apply = project.update(cx, |project, cx| {
2708 project.apply_code_action(buffer.clone(), action, true, cx)
2709 });
2710
2711 // Resolving the code action does not populate its edits. In absence of
2712 // edits, we must execute the given command.
2713 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2714 |mut action, _| async move {
2715 if action.data.is_some() {
2716 action.command = Some(lsp::Command {
2717 title: "The command".into(),
2718 command: "_the/command".into(),
2719 arguments: Some(vec![json!("the-argument")]),
2720 });
2721 }
2722 Ok(action)
2723 },
2724 );
2725
2726 // While executing the command, the language server sends the editor
2727 // a `workspaceEdit` request.
2728 fake_server
2729 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2730 let fake = fake_server.clone();
2731 move |params, _| {
2732 assert_eq!(params.command, "_the/command");
2733 let fake = fake.clone();
2734 async move {
2735 fake.server
2736 .request::<lsp::request::ApplyWorkspaceEdit>(
2737 lsp::ApplyWorkspaceEditParams {
2738 label: None,
2739 edit: lsp::WorkspaceEdit {
2740 changes: Some(
2741 [(
2742 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2743 vec![lsp::TextEdit {
2744 range: lsp::Range::new(
2745 lsp::Position::new(0, 0),
2746 lsp::Position::new(0, 0),
2747 ),
2748 new_text: "X".into(),
2749 }],
2750 )]
2751 .into_iter()
2752 .collect(),
2753 ),
2754 ..Default::default()
2755 },
2756 },
2757 )
2758 .await
2759 .unwrap();
2760 Ok(Some(json!(null)))
2761 }
2762 }
2763 })
2764 .next()
2765 .await;
2766
2767 // Applying the code action returns a project transaction containing the edits
2768 // sent by the language server in its `workspaceEdit` request.
2769 let transaction = apply.await.unwrap();
2770 assert!(transaction.0.contains_key(&buffer));
2771 buffer.update(cx, |buffer, cx| {
2772 assert_eq!(buffer.text(), "Xa");
2773 buffer.undo(cx);
2774 assert_eq!(buffer.text(), "a");
2775 });
2776}
2777
2778#[gpui::test(iterations = 10)]
2779async fn test_save_file(cx: &mut gpui::TestAppContext) {
2780 init_test(cx);
2781
2782 let fs = FakeFs::new(cx.executor());
2783 fs.insert_tree(
2784 "/dir",
2785 json!({
2786 "file1": "the old contents",
2787 }),
2788 )
2789 .await;
2790
2791 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2792 let buffer = project
2793 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2794 .await
2795 .unwrap();
2796 buffer.update(cx, |buffer, cx| {
2797 assert_eq!(buffer.text(), "the old contents");
2798 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2799 });
2800
2801 project
2802 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2803 .await
2804 .unwrap();
2805
2806 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2807 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2808}
2809
2810#[gpui::test(iterations = 30)]
2811async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2812 init_test(cx);
2813
2814 let fs = FakeFs::new(cx.executor().clone());
2815 fs.insert_tree(
2816 "/dir",
2817 json!({
2818 "file1": "the original contents",
2819 }),
2820 )
2821 .await;
2822
2823 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2824 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2825 let buffer = project
2826 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2827 .await
2828 .unwrap();
2829
2830 // Simulate buffer diffs being slow, so that they don't complete before
2831 // the next file change occurs.
2832 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2833
2834 // Change the buffer's file on disk, and then wait for the file change
2835 // to be detected by the worktree, so that the buffer starts reloading.
2836 fs.save(
2837 "/dir/file1".as_ref(),
2838 &"the first contents".into(),
2839 Default::default(),
2840 )
2841 .await
2842 .unwrap();
2843 worktree.next_event(cx).await;
2844
2845 // Change the buffer's file again. Depending on the random seed, the
2846 // previous file change may still be in progress.
2847 fs.save(
2848 "/dir/file1".as_ref(),
2849 &"the second contents".into(),
2850 Default::default(),
2851 )
2852 .await
2853 .unwrap();
2854 worktree.next_event(cx).await;
2855
2856 cx.executor().run_until_parked();
2857 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2858 buffer.read_with(cx, |buffer, _| {
2859 assert_eq!(buffer.text(), on_disk_text);
2860 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2861 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2862 });
2863}
2864
2865#[gpui::test(iterations = 30)]
2866async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2867 init_test(cx);
2868
2869 let fs = FakeFs::new(cx.executor().clone());
2870 fs.insert_tree(
2871 "/dir",
2872 json!({
2873 "file1": "the original contents",
2874 }),
2875 )
2876 .await;
2877
2878 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2879 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2880 let buffer = project
2881 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2882 .await
2883 .unwrap();
2884
2885 // Simulate buffer diffs being slow, so that they don't complete before
2886 // the next file change occurs.
2887 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2888
2889 // Change the buffer's file on disk, and then wait for the file change
2890 // to be detected by the worktree, so that the buffer starts reloading.
2891 fs.save(
2892 "/dir/file1".as_ref(),
2893 &"the first contents".into(),
2894 Default::default(),
2895 )
2896 .await
2897 .unwrap();
2898 worktree.next_event(cx).await;
2899
2900 cx.executor()
2901 .spawn(cx.executor().simulate_random_delay())
2902 .await;
2903
2904 // Perform a noop edit, causing the buffer's version to increase.
2905 buffer.update(cx, |buffer, cx| {
2906 buffer.edit([(0..0, " ")], None, cx);
2907 buffer.undo(cx);
2908 });
2909
2910 cx.executor().run_until_parked();
2911 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2912 buffer.read_with(cx, |buffer, _| {
2913 let buffer_text = buffer.text();
2914 if buffer_text == on_disk_text {
2915 assert!(
2916 !buffer.is_dirty() && !buffer.has_conflict(),
2917 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2918 );
2919 }
2920 // If the file change occurred while the buffer was processing the first
2921 // change, the buffer will be in a conflicting state.
2922 else {
2923 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2924 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2925 }
2926 });
2927}
2928
2929#[gpui::test]
2930async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2931 init_test(cx);
2932
2933 let fs = FakeFs::new(cx.executor());
2934 fs.insert_tree(
2935 "/dir",
2936 json!({
2937 "file1": "the old contents",
2938 }),
2939 )
2940 .await;
2941
2942 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2943 let buffer = project
2944 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2945 .await
2946 .unwrap();
2947 buffer.update(cx, |buffer, cx| {
2948 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2949 });
2950
2951 project
2952 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2953 .await
2954 .unwrap();
2955
2956 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2957 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2958}
2959
2960#[gpui::test]
2961async fn test_save_as(cx: &mut gpui::TestAppContext) {
2962 init_test(cx);
2963
2964 let fs = FakeFs::new(cx.executor());
2965 fs.insert_tree("/dir", json!({})).await;
2966
2967 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2968
2969 let languages = project.update(cx, |project, _| project.languages().clone());
2970 languages.add(rust_lang());
2971
2972 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2973 buffer.update(cx, |buffer, cx| {
2974 buffer.edit([(0..0, "abc")], None, cx);
2975 assert!(buffer.is_dirty());
2976 assert!(!buffer.has_conflict());
2977 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2978 });
2979 project
2980 .update(cx, |project, cx| {
2981 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2982 let path = ProjectPath {
2983 worktree_id,
2984 path: Arc::from(Path::new("file1.rs")),
2985 };
2986 project.save_buffer_as(buffer.clone(), path, cx)
2987 })
2988 .await
2989 .unwrap();
2990 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2991
2992 cx.executor().run_until_parked();
2993 buffer.update(cx, |buffer, cx| {
2994 assert_eq!(
2995 buffer.file().unwrap().full_path(cx),
2996 Path::new("dir/file1.rs")
2997 );
2998 assert!(!buffer.is_dirty());
2999 assert!(!buffer.has_conflict());
3000 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
3001 });
3002
3003 let opened_buffer = project
3004 .update(cx, |project, cx| {
3005 project.open_local_buffer("/dir/file1.rs", cx)
3006 })
3007 .await
3008 .unwrap();
3009 assert_eq!(opened_buffer, buffer);
3010}
3011
3012#[gpui::test(retries = 5)]
3013async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3014 use worktree::WorktreeModelHandle as _;
3015
3016 init_test(cx);
3017 cx.executor().allow_parking();
3018
3019 let dir = temp_tree(json!({
3020 "a": {
3021 "file1": "",
3022 "file2": "",
3023 "file3": "",
3024 },
3025 "b": {
3026 "c": {
3027 "file4": "",
3028 "file5": "",
3029 }
3030 }
3031 }));
3032
3033 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3034
3035 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3036 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3037 async move { buffer.await.unwrap() }
3038 };
3039 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3040 project.update(cx, |project, cx| {
3041 let tree = project.worktrees().next().unwrap();
3042 tree.read(cx)
3043 .entry_for_path(path)
3044 .unwrap_or_else(|| panic!("no entry for path {}", path))
3045 .id
3046 })
3047 };
3048
3049 let buffer2 = buffer_for_path("a/file2", cx).await;
3050 let buffer3 = buffer_for_path("a/file3", cx).await;
3051 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3052 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3053
3054 let file2_id = id_for_path("a/file2", cx);
3055 let file3_id = id_for_path("a/file3", cx);
3056 let file4_id = id_for_path("b/c/file4", cx);
3057
3058 // Create a remote copy of this worktree.
3059 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3060 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3061
3062 let updates = Arc::new(Mutex::new(Vec::new()));
3063 tree.update(cx, |tree, cx| {
3064 let updates = updates.clone();
3065 tree.observe_updates(0, cx, move |update| {
3066 updates.lock().push(update);
3067 async { true }
3068 });
3069 });
3070
3071 let remote =
3072 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3073
3074 cx.executor().run_until_parked();
3075
3076 cx.update(|cx| {
3077 assert!(!buffer2.read(cx).is_dirty());
3078 assert!(!buffer3.read(cx).is_dirty());
3079 assert!(!buffer4.read(cx).is_dirty());
3080 assert!(!buffer5.read(cx).is_dirty());
3081 });
3082
3083 // Rename and delete files and directories.
3084 tree.flush_fs_events(cx).await;
3085 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3086 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3087 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3088 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3089 tree.flush_fs_events(cx).await;
3090
3091 let expected_paths = vec![
3092 "a",
3093 "a/file1",
3094 "a/file2.new",
3095 "b",
3096 "d",
3097 "d/file3",
3098 "d/file4",
3099 ];
3100
3101 cx.update(|app| {
3102 assert_eq!(
3103 tree.read(app)
3104 .paths()
3105 .map(|p| p.to_str().unwrap())
3106 .collect::<Vec<_>>(),
3107 expected_paths
3108 );
3109 });
3110
3111 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3112 assert_eq!(id_for_path("d/file3", cx), file3_id);
3113 assert_eq!(id_for_path("d/file4", cx), file4_id);
3114
3115 cx.update(|cx| {
3116 assert_eq!(
3117 buffer2.read(cx).file().unwrap().path().as_ref(),
3118 Path::new("a/file2.new")
3119 );
3120 assert_eq!(
3121 buffer3.read(cx).file().unwrap().path().as_ref(),
3122 Path::new("d/file3")
3123 );
3124 assert_eq!(
3125 buffer4.read(cx).file().unwrap().path().as_ref(),
3126 Path::new("d/file4")
3127 );
3128 assert_eq!(
3129 buffer5.read(cx).file().unwrap().path().as_ref(),
3130 Path::new("b/c/file5")
3131 );
3132
3133 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3134 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3135 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3136 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3137 });
3138
3139 // Update the remote worktree. Check that it becomes consistent with the
3140 // local worktree.
3141 cx.executor().run_until_parked();
3142
3143 remote.update(cx, |remote, _| {
3144 for update in updates.lock().drain(..) {
3145 remote.as_remote_mut().unwrap().update_from_remote(update);
3146 }
3147 });
3148 cx.executor().run_until_parked();
3149 remote.update(cx, |remote, _| {
3150 assert_eq!(
3151 remote
3152 .paths()
3153 .map(|p| p.to_str().unwrap())
3154 .collect::<Vec<_>>(),
3155 expected_paths
3156 );
3157 });
3158}
3159
3160#[gpui::test(iterations = 10)]
3161async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3162 init_test(cx);
3163
3164 let fs = FakeFs::new(cx.executor());
3165 fs.insert_tree(
3166 "/dir",
3167 json!({
3168 "a": {
3169 "file1": "",
3170 }
3171 }),
3172 )
3173 .await;
3174
3175 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3176 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3177 let tree_id = tree.update(cx, |tree, _| tree.id());
3178
3179 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3180 project.update(cx, |project, cx| {
3181 let tree = project.worktrees().next().unwrap();
3182 tree.read(cx)
3183 .entry_for_path(path)
3184 .unwrap_or_else(|| panic!("no entry for path {}", path))
3185 .id
3186 })
3187 };
3188
3189 let dir_id = id_for_path("a", cx);
3190 let file_id = id_for_path("a/file1", cx);
3191 let buffer = project
3192 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3193 .await
3194 .unwrap();
3195 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3196
3197 project
3198 .update(cx, |project, cx| {
3199 project.rename_entry(dir_id, Path::new("b"), cx)
3200 })
3201 .unwrap()
3202 .await
3203 .to_included()
3204 .unwrap();
3205 cx.executor().run_until_parked();
3206
3207 assert_eq!(id_for_path("b", cx), dir_id);
3208 assert_eq!(id_for_path("b/file1", cx), file_id);
3209 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3210}
3211
3212#[gpui::test]
3213async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3214 init_test(cx);
3215
3216 let fs = FakeFs::new(cx.executor());
3217 fs.insert_tree(
3218 "/dir",
3219 json!({
3220 "a.txt": "a-contents",
3221 "b.txt": "b-contents",
3222 }),
3223 )
3224 .await;
3225
3226 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3227
3228 // Spawn multiple tasks to open paths, repeating some paths.
3229 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3230 (
3231 p.open_local_buffer("/dir/a.txt", cx),
3232 p.open_local_buffer("/dir/b.txt", cx),
3233 p.open_local_buffer("/dir/a.txt", cx),
3234 )
3235 });
3236
3237 let buffer_a_1 = buffer_a_1.await.unwrap();
3238 let buffer_a_2 = buffer_a_2.await.unwrap();
3239 let buffer_b = buffer_b.await.unwrap();
3240 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3241 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3242
3243 // There is only one buffer per path.
3244 let buffer_a_id = buffer_a_1.entity_id();
3245 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3246
3247 // Open the same path again while it is still open.
3248 drop(buffer_a_1);
3249 let buffer_a_3 = project
3250 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3251 .await
3252 .unwrap();
3253
3254 // There's still only one buffer per path.
3255 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3256}
3257
3258#[gpui::test]
3259async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3260 init_test(cx);
3261
3262 let fs = FakeFs::new(cx.executor());
3263 fs.insert_tree(
3264 "/dir",
3265 json!({
3266 "file1": "abc",
3267 "file2": "def",
3268 "file3": "ghi",
3269 }),
3270 )
3271 .await;
3272
3273 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3274
3275 let buffer1 = project
3276 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3277 .await
3278 .unwrap();
3279 let events = Arc::new(Mutex::new(Vec::new()));
3280
3281 // initially, the buffer isn't dirty.
3282 buffer1.update(cx, |buffer, cx| {
3283 cx.subscribe(&buffer1, {
3284 let events = events.clone();
3285 move |_, _, event, _| match event {
3286 BufferEvent::Operation(_) => {}
3287 _ => events.lock().push(event.clone()),
3288 }
3289 })
3290 .detach();
3291
3292 assert!(!buffer.is_dirty());
3293 assert!(events.lock().is_empty());
3294
3295 buffer.edit([(1..2, "")], None, cx);
3296 });
3297
3298 // after the first edit, the buffer is dirty, and emits a dirtied event.
3299 buffer1.update(cx, |buffer, cx| {
3300 assert!(buffer.text() == "ac");
3301 assert!(buffer.is_dirty());
3302 assert_eq!(
3303 *events.lock(),
3304 &[language::Event::Edited, language::Event::DirtyChanged]
3305 );
3306 events.lock().clear();
3307 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3308 });
3309
3310 // after saving, the buffer is not dirty, and emits a saved event.
3311 buffer1.update(cx, |buffer, cx| {
3312 assert!(!buffer.is_dirty());
3313 assert_eq!(*events.lock(), &[language::Event::Saved]);
3314 events.lock().clear();
3315
3316 buffer.edit([(1..1, "B")], None, cx);
3317 buffer.edit([(2..2, "D")], None, cx);
3318 });
3319
3320 // after editing again, the buffer is dirty, and emits another dirty event.
3321 buffer1.update(cx, |buffer, cx| {
3322 assert!(buffer.text() == "aBDc");
3323 assert!(buffer.is_dirty());
3324 assert_eq!(
3325 *events.lock(),
3326 &[
3327 language::Event::Edited,
3328 language::Event::DirtyChanged,
3329 language::Event::Edited,
3330 ],
3331 );
3332 events.lock().clear();
3333
3334 // After restoring the buffer to its previously-saved state,
3335 // the buffer is not considered dirty anymore.
3336 buffer.edit([(1..3, "")], None, cx);
3337 assert!(buffer.text() == "ac");
3338 assert!(!buffer.is_dirty());
3339 });
3340
3341 assert_eq!(
3342 *events.lock(),
3343 &[language::Event::Edited, language::Event::DirtyChanged]
3344 );
3345
3346 // When a file is deleted, the buffer is considered dirty.
3347 let events = Arc::new(Mutex::new(Vec::new()));
3348 let buffer2 = project
3349 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3350 .await
3351 .unwrap();
3352 buffer2.update(cx, |_, cx| {
3353 cx.subscribe(&buffer2, {
3354 let events = events.clone();
3355 move |_, _, event, _| events.lock().push(event.clone())
3356 })
3357 .detach();
3358 });
3359
3360 fs.remove_file("/dir/file2".as_ref(), Default::default())
3361 .await
3362 .unwrap();
3363 cx.executor().run_until_parked();
3364 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3365 assert_eq!(
3366 *events.lock(),
3367 &[
3368 language::Event::DirtyChanged,
3369 language::Event::FileHandleChanged
3370 ]
3371 );
3372
3373 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3374 let events = Arc::new(Mutex::new(Vec::new()));
3375 let buffer3 = project
3376 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3377 .await
3378 .unwrap();
3379 buffer3.update(cx, |_, cx| {
3380 cx.subscribe(&buffer3, {
3381 let events = events.clone();
3382 move |_, _, event, _| events.lock().push(event.clone())
3383 })
3384 .detach();
3385 });
3386
3387 buffer3.update(cx, |buffer, cx| {
3388 buffer.edit([(0..0, "x")], None, cx);
3389 });
3390 events.lock().clear();
3391 fs.remove_file("/dir/file3".as_ref(), Default::default())
3392 .await
3393 .unwrap();
3394 cx.executor().run_until_parked();
3395 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3396 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3397}
3398
3399#[gpui::test]
3400async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3401 init_test(cx);
3402
3403 let initial_contents = "aaa\nbbbbb\nc\n";
3404 let fs = FakeFs::new(cx.executor());
3405 fs.insert_tree(
3406 "/dir",
3407 json!({
3408 "the-file": initial_contents,
3409 }),
3410 )
3411 .await;
3412 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3413 let buffer = project
3414 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3415 .await
3416 .unwrap();
3417
3418 let anchors = (0..3)
3419 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3420 .collect::<Vec<_>>();
3421
3422 // Change the file on disk, adding two new lines of text, and removing
3423 // one line.
3424 buffer.update(cx, |buffer, _| {
3425 assert!(!buffer.is_dirty());
3426 assert!(!buffer.has_conflict());
3427 });
3428 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3429 fs.save(
3430 "/dir/the-file".as_ref(),
3431 &new_contents.into(),
3432 LineEnding::Unix,
3433 )
3434 .await
3435 .unwrap();
3436
3437 // Because the buffer was not modified, it is reloaded from disk. Its
3438 // contents are edited according to the diff between the old and new
3439 // file contents.
3440 cx.executor().run_until_parked();
3441 buffer.update(cx, |buffer, _| {
3442 assert_eq!(buffer.text(), new_contents);
3443 assert!(!buffer.is_dirty());
3444 assert!(!buffer.has_conflict());
3445
3446 let anchor_positions = anchors
3447 .iter()
3448 .map(|anchor| anchor.to_point(&*buffer))
3449 .collect::<Vec<_>>();
3450 assert_eq!(
3451 anchor_positions,
3452 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3453 );
3454 });
3455
3456 // Modify the buffer
3457 buffer.update(cx, |buffer, cx| {
3458 buffer.edit([(0..0, " ")], None, cx);
3459 assert!(buffer.is_dirty());
3460 assert!(!buffer.has_conflict());
3461 });
3462
3463 // Change the file on disk again, adding blank lines to the beginning.
3464 fs.save(
3465 "/dir/the-file".as_ref(),
3466 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3467 LineEnding::Unix,
3468 )
3469 .await
3470 .unwrap();
3471
3472 // Because the buffer is modified, it doesn't reload from disk, but is
3473 // marked as having a conflict.
3474 cx.executor().run_until_parked();
3475 buffer.update(cx, |buffer, _| {
3476 assert!(buffer.has_conflict());
3477 });
3478}
3479
3480#[gpui::test]
3481async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3482 init_test(cx);
3483
3484 let fs = FakeFs::new(cx.executor());
3485 fs.insert_tree(
3486 "/dir",
3487 json!({
3488 "file1": "a\nb\nc\n",
3489 "file2": "one\r\ntwo\r\nthree\r\n",
3490 }),
3491 )
3492 .await;
3493
3494 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3495 let buffer1 = project
3496 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3497 .await
3498 .unwrap();
3499 let buffer2 = project
3500 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3501 .await
3502 .unwrap();
3503
3504 buffer1.update(cx, |buffer, _| {
3505 assert_eq!(buffer.text(), "a\nb\nc\n");
3506 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3507 });
3508 buffer2.update(cx, |buffer, _| {
3509 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3510 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3511 });
3512
3513 // Change a file's line endings on disk from unix to windows. The buffer's
3514 // state updates correctly.
3515 fs.save(
3516 "/dir/file1".as_ref(),
3517 &"aaa\nb\nc\n".into(),
3518 LineEnding::Windows,
3519 )
3520 .await
3521 .unwrap();
3522 cx.executor().run_until_parked();
3523 buffer1.update(cx, |buffer, _| {
3524 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3525 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3526 });
3527
3528 // Save a file with windows line endings. The file is written correctly.
3529 buffer2.update(cx, |buffer, cx| {
3530 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3531 });
3532 project
3533 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3534 .await
3535 .unwrap();
3536 assert_eq!(
3537 fs.load("/dir/file2".as_ref()).await.unwrap(),
3538 "one\r\ntwo\r\nthree\r\nfour\r\n",
3539 );
3540}
3541
3542#[gpui::test]
3543async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3544 init_test(cx);
3545
3546 let fs = FakeFs::new(cx.executor());
3547 fs.insert_tree(
3548 "/the-dir",
3549 json!({
3550 "a.rs": "
3551 fn foo(mut v: Vec<usize>) {
3552 for x in &v {
3553 v.push(1);
3554 }
3555 }
3556 "
3557 .unindent(),
3558 }),
3559 )
3560 .await;
3561
3562 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3563 let buffer = project
3564 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3565 .await
3566 .unwrap();
3567
3568 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3569 let message = lsp::PublishDiagnosticsParams {
3570 uri: buffer_uri.clone(),
3571 diagnostics: vec![
3572 lsp::Diagnostic {
3573 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3574 severity: Some(DiagnosticSeverity::WARNING),
3575 message: "error 1".to_string(),
3576 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3577 location: lsp::Location {
3578 uri: buffer_uri.clone(),
3579 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3580 },
3581 message: "error 1 hint 1".to_string(),
3582 }]),
3583 ..Default::default()
3584 },
3585 lsp::Diagnostic {
3586 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3587 severity: Some(DiagnosticSeverity::HINT),
3588 message: "error 1 hint 1".to_string(),
3589 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3590 location: lsp::Location {
3591 uri: buffer_uri.clone(),
3592 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3593 },
3594 message: "original diagnostic".to_string(),
3595 }]),
3596 ..Default::default()
3597 },
3598 lsp::Diagnostic {
3599 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3600 severity: Some(DiagnosticSeverity::ERROR),
3601 message: "error 2".to_string(),
3602 related_information: Some(vec![
3603 lsp::DiagnosticRelatedInformation {
3604 location: lsp::Location {
3605 uri: buffer_uri.clone(),
3606 range: lsp::Range::new(
3607 lsp::Position::new(1, 13),
3608 lsp::Position::new(1, 15),
3609 ),
3610 },
3611 message: "error 2 hint 1".to_string(),
3612 },
3613 lsp::DiagnosticRelatedInformation {
3614 location: lsp::Location {
3615 uri: buffer_uri.clone(),
3616 range: lsp::Range::new(
3617 lsp::Position::new(1, 13),
3618 lsp::Position::new(1, 15),
3619 ),
3620 },
3621 message: "error 2 hint 2".to_string(),
3622 },
3623 ]),
3624 ..Default::default()
3625 },
3626 lsp::Diagnostic {
3627 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3628 severity: Some(DiagnosticSeverity::HINT),
3629 message: "error 2 hint 1".to_string(),
3630 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3631 location: lsp::Location {
3632 uri: buffer_uri.clone(),
3633 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3634 },
3635 message: "original diagnostic".to_string(),
3636 }]),
3637 ..Default::default()
3638 },
3639 lsp::Diagnostic {
3640 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3641 severity: Some(DiagnosticSeverity::HINT),
3642 message: "error 2 hint 2".to_string(),
3643 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3644 location: lsp::Location {
3645 uri: buffer_uri,
3646 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3647 },
3648 message: "original diagnostic".to_string(),
3649 }]),
3650 ..Default::default()
3651 },
3652 ],
3653 version: None,
3654 };
3655
3656 project
3657 .update(cx, |p, cx| {
3658 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3659 })
3660 .unwrap();
3661 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3662
3663 assert_eq!(
3664 buffer
3665 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3666 .collect::<Vec<_>>(),
3667 &[
3668 DiagnosticEntry {
3669 range: Point::new(1, 8)..Point::new(1, 9),
3670 diagnostic: Diagnostic {
3671 severity: DiagnosticSeverity::WARNING,
3672 message: "error 1".to_string(),
3673 group_id: 1,
3674 is_primary: true,
3675 ..Default::default()
3676 }
3677 },
3678 DiagnosticEntry {
3679 range: Point::new(1, 8)..Point::new(1, 9),
3680 diagnostic: Diagnostic {
3681 severity: DiagnosticSeverity::HINT,
3682 message: "error 1 hint 1".to_string(),
3683 group_id: 1,
3684 is_primary: false,
3685 ..Default::default()
3686 }
3687 },
3688 DiagnosticEntry {
3689 range: Point::new(1, 13)..Point::new(1, 15),
3690 diagnostic: Diagnostic {
3691 severity: DiagnosticSeverity::HINT,
3692 message: "error 2 hint 1".to_string(),
3693 group_id: 0,
3694 is_primary: false,
3695 ..Default::default()
3696 }
3697 },
3698 DiagnosticEntry {
3699 range: Point::new(1, 13)..Point::new(1, 15),
3700 diagnostic: Diagnostic {
3701 severity: DiagnosticSeverity::HINT,
3702 message: "error 2 hint 2".to_string(),
3703 group_id: 0,
3704 is_primary: false,
3705 ..Default::default()
3706 }
3707 },
3708 DiagnosticEntry {
3709 range: Point::new(2, 8)..Point::new(2, 17),
3710 diagnostic: Diagnostic {
3711 severity: DiagnosticSeverity::ERROR,
3712 message: "error 2".to_string(),
3713 group_id: 0,
3714 is_primary: true,
3715 ..Default::default()
3716 }
3717 }
3718 ]
3719 );
3720
3721 assert_eq!(
3722 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3723 &[
3724 DiagnosticEntry {
3725 range: Point::new(1, 13)..Point::new(1, 15),
3726 diagnostic: Diagnostic {
3727 severity: DiagnosticSeverity::HINT,
3728 message: "error 2 hint 1".to_string(),
3729 group_id: 0,
3730 is_primary: false,
3731 ..Default::default()
3732 }
3733 },
3734 DiagnosticEntry {
3735 range: Point::new(1, 13)..Point::new(1, 15),
3736 diagnostic: Diagnostic {
3737 severity: DiagnosticSeverity::HINT,
3738 message: "error 2 hint 2".to_string(),
3739 group_id: 0,
3740 is_primary: false,
3741 ..Default::default()
3742 }
3743 },
3744 DiagnosticEntry {
3745 range: Point::new(2, 8)..Point::new(2, 17),
3746 diagnostic: Diagnostic {
3747 severity: DiagnosticSeverity::ERROR,
3748 message: "error 2".to_string(),
3749 group_id: 0,
3750 is_primary: true,
3751 ..Default::default()
3752 }
3753 }
3754 ]
3755 );
3756
3757 assert_eq!(
3758 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3759 &[
3760 DiagnosticEntry {
3761 range: Point::new(1, 8)..Point::new(1, 9),
3762 diagnostic: Diagnostic {
3763 severity: DiagnosticSeverity::WARNING,
3764 message: "error 1".to_string(),
3765 group_id: 1,
3766 is_primary: true,
3767 ..Default::default()
3768 }
3769 },
3770 DiagnosticEntry {
3771 range: Point::new(1, 8)..Point::new(1, 9),
3772 diagnostic: Diagnostic {
3773 severity: DiagnosticSeverity::HINT,
3774 message: "error 1 hint 1".to_string(),
3775 group_id: 1,
3776 is_primary: false,
3777 ..Default::default()
3778 }
3779 },
3780 ]
3781 );
3782}
3783
3784#[gpui::test]
3785async fn test_rename(cx: &mut gpui::TestAppContext) {
3786 // hi
3787 init_test(cx);
3788
3789 let fs = FakeFs::new(cx.executor());
3790 fs.insert_tree(
3791 "/dir",
3792 json!({
3793 "one.rs": "const ONE: usize = 1;",
3794 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3795 }),
3796 )
3797 .await;
3798
3799 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3800
3801 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3802 language_registry.add(rust_lang());
3803 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3804 "Rust",
3805 FakeLspAdapter {
3806 capabilities: lsp::ServerCapabilities {
3807 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3808 prepare_provider: Some(true),
3809 work_done_progress_options: Default::default(),
3810 })),
3811 ..Default::default()
3812 },
3813 ..Default::default()
3814 },
3815 );
3816
3817 let buffer = project
3818 .update(cx, |project, cx| {
3819 project.open_local_buffer("/dir/one.rs", cx)
3820 })
3821 .await
3822 .unwrap();
3823
3824 let fake_server = fake_servers.next().await.unwrap();
3825
3826 let response = project.update(cx, |project, cx| {
3827 project.prepare_rename(buffer.clone(), 7, cx)
3828 });
3829 fake_server
3830 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3831 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3832 assert_eq!(params.position, lsp::Position::new(0, 7));
3833 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3834 lsp::Position::new(0, 6),
3835 lsp::Position::new(0, 9),
3836 ))))
3837 })
3838 .next()
3839 .await
3840 .unwrap();
3841 let range = response.await.unwrap().unwrap();
3842 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3843 assert_eq!(range, 6..9);
3844
3845 let response = project.update(cx, |project, cx| {
3846 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3847 });
3848 fake_server
3849 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3850 assert_eq!(
3851 params.text_document_position.text_document.uri.as_str(),
3852 "file:///dir/one.rs"
3853 );
3854 assert_eq!(
3855 params.text_document_position.position,
3856 lsp::Position::new(0, 7)
3857 );
3858 assert_eq!(params.new_name, "THREE");
3859 Ok(Some(lsp::WorkspaceEdit {
3860 changes: Some(
3861 [
3862 (
3863 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3864 vec![lsp::TextEdit::new(
3865 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3866 "THREE".to_string(),
3867 )],
3868 ),
3869 (
3870 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3871 vec![
3872 lsp::TextEdit::new(
3873 lsp::Range::new(
3874 lsp::Position::new(0, 24),
3875 lsp::Position::new(0, 27),
3876 ),
3877 "THREE".to_string(),
3878 ),
3879 lsp::TextEdit::new(
3880 lsp::Range::new(
3881 lsp::Position::new(0, 35),
3882 lsp::Position::new(0, 38),
3883 ),
3884 "THREE".to_string(),
3885 ),
3886 ],
3887 ),
3888 ]
3889 .into_iter()
3890 .collect(),
3891 ),
3892 ..Default::default()
3893 }))
3894 })
3895 .next()
3896 .await
3897 .unwrap();
3898 let mut transaction = response.await.unwrap().0;
3899 assert_eq!(transaction.len(), 2);
3900 assert_eq!(
3901 transaction
3902 .remove_entry(&buffer)
3903 .unwrap()
3904 .0
3905 .update(cx, |buffer, _| buffer.text()),
3906 "const THREE: usize = 1;"
3907 );
3908 assert_eq!(
3909 transaction
3910 .into_keys()
3911 .next()
3912 .unwrap()
3913 .update(cx, |buffer, _| buffer.text()),
3914 "const TWO: usize = one::THREE + one::THREE;"
3915 );
3916}
3917
3918#[gpui::test]
3919async fn test_search(cx: &mut gpui::TestAppContext) {
3920 init_test(cx);
3921
3922 let fs = FakeFs::new(cx.executor());
3923 fs.insert_tree(
3924 "/dir",
3925 json!({
3926 "one.rs": "const ONE: usize = 1;",
3927 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3928 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3929 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3930 }),
3931 )
3932 .await;
3933 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3934 assert_eq!(
3935 search(
3936 &project,
3937 SearchQuery::text(
3938 "TWO",
3939 false,
3940 true,
3941 false,
3942 Default::default(),
3943 Default::default()
3944 )
3945 .unwrap(),
3946 cx
3947 )
3948 .await
3949 .unwrap(),
3950 HashMap::from_iter([
3951 ("dir/two.rs".to_string(), vec![6..9]),
3952 ("dir/three.rs".to_string(), vec![37..40])
3953 ])
3954 );
3955
3956 let buffer_4 = project
3957 .update(cx, |project, cx| {
3958 project.open_local_buffer("/dir/four.rs", cx)
3959 })
3960 .await
3961 .unwrap();
3962 buffer_4.update(cx, |buffer, cx| {
3963 let text = "two::TWO";
3964 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3965 });
3966
3967 assert_eq!(
3968 search(
3969 &project,
3970 SearchQuery::text(
3971 "TWO",
3972 false,
3973 true,
3974 false,
3975 Default::default(),
3976 Default::default()
3977 )
3978 .unwrap(),
3979 cx
3980 )
3981 .await
3982 .unwrap(),
3983 HashMap::from_iter([
3984 ("dir/two.rs".to_string(), vec![6..9]),
3985 ("dir/three.rs".to_string(), vec![37..40]),
3986 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3987 ])
3988 );
3989}
3990
3991#[gpui::test]
3992async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3993 init_test(cx);
3994
3995 let search_query = "file";
3996
3997 let fs = FakeFs::new(cx.executor());
3998 fs.insert_tree(
3999 "/dir",
4000 json!({
4001 "one.rs": r#"// Rust file one"#,
4002 "one.ts": r#"// TypeScript file one"#,
4003 "two.rs": r#"// Rust file two"#,
4004 "two.ts": r#"// TypeScript file two"#,
4005 }),
4006 )
4007 .await;
4008 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4009
4010 assert!(
4011 search(
4012 &project,
4013 SearchQuery::text(
4014 search_query,
4015 false,
4016 true,
4017 false,
4018 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4019 Default::default()
4020 )
4021 .unwrap(),
4022 cx
4023 )
4024 .await
4025 .unwrap()
4026 .is_empty(),
4027 "If no inclusions match, no files should be returned"
4028 );
4029
4030 assert_eq!(
4031 search(
4032 &project,
4033 SearchQuery::text(
4034 search_query,
4035 false,
4036 true,
4037 false,
4038 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4039 Default::default()
4040 )
4041 .unwrap(),
4042 cx
4043 )
4044 .await
4045 .unwrap(),
4046 HashMap::from_iter([
4047 ("dir/one.rs".to_string(), vec![8..12]),
4048 ("dir/two.rs".to_string(), vec![8..12]),
4049 ]),
4050 "Rust only search should give only Rust files"
4051 );
4052
4053 assert_eq!(
4054 search(
4055 &project,
4056 SearchQuery::text(
4057 search_query,
4058 false,
4059 true,
4060 false,
4061
4062 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4063
4064 Default::default(),
4065 ).unwrap(),
4066 cx
4067 )
4068 .await
4069 .unwrap(),
4070 HashMap::from_iter([
4071 ("dir/one.ts".to_string(), vec![14..18]),
4072 ("dir/two.ts".to_string(), vec![14..18]),
4073 ]),
4074 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4075 );
4076
4077 assert_eq!(
4078 search(
4079 &project,
4080 SearchQuery::text(
4081 search_query,
4082 false,
4083 true,
4084 false,
4085
4086 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4087
4088 Default::default(),
4089 ).unwrap(),
4090 cx
4091 )
4092 .await
4093 .unwrap(),
4094 HashMap::from_iter([
4095 ("dir/two.ts".to_string(), vec![14..18]),
4096 ("dir/one.rs".to_string(), vec![8..12]),
4097 ("dir/one.ts".to_string(), vec![14..18]),
4098 ("dir/two.rs".to_string(), vec![8..12]),
4099 ]),
4100 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4101 );
4102}
4103
4104#[gpui::test]
4105async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4106 init_test(cx);
4107
4108 let search_query = "file";
4109
4110 let fs = FakeFs::new(cx.executor());
4111 fs.insert_tree(
4112 "/dir",
4113 json!({
4114 "one.rs": r#"// Rust file one"#,
4115 "one.ts": r#"// TypeScript file one"#,
4116 "two.rs": r#"// Rust file two"#,
4117 "two.ts": r#"// TypeScript file two"#,
4118 }),
4119 )
4120 .await;
4121 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4122
4123 assert_eq!(
4124 search(
4125 &project,
4126 SearchQuery::text(
4127 search_query,
4128 false,
4129 true,
4130 false,
4131 Default::default(),
4132 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4133 )
4134 .unwrap(),
4135 cx
4136 )
4137 .await
4138 .unwrap(),
4139 HashMap::from_iter([
4140 ("dir/one.rs".to_string(), vec![8..12]),
4141 ("dir/one.ts".to_string(), vec![14..18]),
4142 ("dir/two.rs".to_string(), vec![8..12]),
4143 ("dir/two.ts".to_string(), vec![14..18]),
4144 ]),
4145 "If no exclusions match, all files should be returned"
4146 );
4147
4148 assert_eq!(
4149 search(
4150 &project,
4151 SearchQuery::text(
4152 search_query,
4153 false,
4154 true,
4155 false,
4156 Default::default(),
4157 PathMatcher::new(&["*.rs".to_owned()]).unwrap()
4158 )
4159 .unwrap(),
4160 cx
4161 )
4162 .await
4163 .unwrap(),
4164 HashMap::from_iter([
4165 ("dir/one.ts".to_string(), vec![14..18]),
4166 ("dir/two.ts".to_string(), vec![14..18]),
4167 ]),
4168 "Rust exclusion search should give only TypeScript files"
4169 );
4170
4171 assert_eq!(
4172 search(
4173 &project,
4174 SearchQuery::text(
4175 search_query,
4176 false,
4177 true,
4178 false,
4179 Default::default(),
4180
4181 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4182
4183 ).unwrap(),
4184 cx
4185 )
4186 .await
4187 .unwrap(),
4188 HashMap::from_iter([
4189 ("dir/one.rs".to_string(), vec![8..12]),
4190 ("dir/two.rs".to_string(), vec![8..12]),
4191 ]),
4192 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4193 );
4194
4195 assert!(
4196 search(
4197 &project,
4198 SearchQuery::text(
4199 search_query,
4200 false,
4201 true,
4202 false,
4203 Default::default(),
4204
4205 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4206
4207 ).unwrap(),
4208 cx
4209 )
4210 .await
4211 .unwrap().is_empty(),
4212 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4213 );
4214}
4215
4216#[gpui::test]
4217async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4218 init_test(cx);
4219
4220 let search_query = "file";
4221
4222 let fs = FakeFs::new(cx.executor());
4223 fs.insert_tree(
4224 "/dir",
4225 json!({
4226 "one.rs": r#"// Rust file one"#,
4227 "one.ts": r#"// TypeScript file one"#,
4228 "two.rs": r#"// Rust file two"#,
4229 "two.ts": r#"// TypeScript file two"#,
4230 }),
4231 )
4232 .await;
4233 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4234
4235 assert!(
4236 search(
4237 &project,
4238 SearchQuery::text(
4239 search_query,
4240 false,
4241 true,
4242 false,
4243 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4244 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4245 )
4246 .unwrap(),
4247 cx
4248 )
4249 .await
4250 .unwrap()
4251 .is_empty(),
4252 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4253 );
4254
4255 assert!(
4256 search(
4257 &project,
4258 SearchQuery::text(
4259 search_query,
4260 false,
4261 true,
4262 false,
4263 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4264 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4265 ).unwrap(),
4266 cx
4267 )
4268 .await
4269 .unwrap()
4270 .is_empty(),
4271 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4272 );
4273
4274 assert!(
4275 search(
4276 &project,
4277 SearchQuery::text(
4278 search_query,
4279 false,
4280 true,
4281 false,
4282 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4283 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4284 )
4285 .unwrap(),
4286 cx
4287 )
4288 .await
4289 .unwrap()
4290 .is_empty(),
4291 "Non-matching inclusions and exclusions should not change that."
4292 );
4293
4294 assert_eq!(
4295 search(
4296 &project,
4297 SearchQuery::text(
4298 search_query,
4299 false,
4300 true,
4301 false,
4302 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4303 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4304 )
4305 .unwrap(),
4306 cx
4307 )
4308 .await
4309 .unwrap(),
4310 HashMap::from_iter([
4311 ("dir/one.ts".to_string(), vec![14..18]),
4312 ("dir/two.ts".to_string(), vec![14..18]),
4313 ]),
4314 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4315 );
4316}
4317
4318#[gpui::test]
4319async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4320 init_test(cx);
4321
4322 let fs = FakeFs::new(cx.executor());
4323 fs.insert_tree(
4324 "/worktree-a",
4325 json!({
4326 "haystack.rs": r#"// NEEDLE"#,
4327 "haystack.ts": r#"// NEEDLE"#,
4328 }),
4329 )
4330 .await;
4331 fs.insert_tree(
4332 "/worktree-b",
4333 json!({
4334 "haystack.rs": r#"// NEEDLE"#,
4335 "haystack.ts": r#"// NEEDLE"#,
4336 }),
4337 )
4338 .await;
4339
4340 let project = Project::test(
4341 fs.clone(),
4342 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4343 cx,
4344 )
4345 .await;
4346
4347 assert_eq!(
4348 search(
4349 &project,
4350 SearchQuery::text(
4351 "NEEDLE",
4352 false,
4353 true,
4354 false,
4355 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4356 Default::default()
4357 )
4358 .unwrap(),
4359 cx
4360 )
4361 .await
4362 .unwrap(),
4363 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4364 "should only return results from included worktree"
4365 );
4366 assert_eq!(
4367 search(
4368 &project,
4369 SearchQuery::text(
4370 "NEEDLE",
4371 false,
4372 true,
4373 false,
4374 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4375 Default::default()
4376 )
4377 .unwrap(),
4378 cx
4379 )
4380 .await
4381 .unwrap(),
4382 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4383 "should only return results from included worktree"
4384 );
4385
4386 assert_eq!(
4387 search(
4388 &project,
4389 SearchQuery::text(
4390 "NEEDLE",
4391 false,
4392 true,
4393 false,
4394 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4395 Default::default()
4396 )
4397 .unwrap(),
4398 cx
4399 )
4400 .await
4401 .unwrap(),
4402 HashMap::from_iter([
4403 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4404 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4405 ]),
4406 "should return results from both worktrees"
4407 );
4408}
4409
4410#[gpui::test]
4411async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4412 init_test(cx);
4413
4414 let fs = FakeFs::new(cx.background_executor.clone());
4415 fs.insert_tree(
4416 "/dir",
4417 json!({
4418 ".git": {},
4419 ".gitignore": "**/target\n/node_modules\n",
4420 "target": {
4421 "index.txt": "index_key:index_value"
4422 },
4423 "node_modules": {
4424 "eslint": {
4425 "index.ts": "const eslint_key = 'eslint value'",
4426 "package.json": r#"{ "some_key": "some value" }"#,
4427 },
4428 "prettier": {
4429 "index.ts": "const prettier_key = 'prettier value'",
4430 "package.json": r#"{ "other_key": "other value" }"#,
4431 },
4432 },
4433 "package.json": r#"{ "main_key": "main value" }"#,
4434 }),
4435 )
4436 .await;
4437 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4438
4439 let query = "key";
4440 assert_eq!(
4441 search(
4442 &project,
4443 SearchQuery::text(
4444 query,
4445 false,
4446 false,
4447 false,
4448 Default::default(),
4449 Default::default()
4450 )
4451 .unwrap(),
4452 cx
4453 )
4454 .await
4455 .unwrap(),
4456 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4457 "Only one non-ignored file should have the query"
4458 );
4459
4460 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4461 assert_eq!(
4462 search(
4463 &project,
4464 SearchQuery::text(
4465 query,
4466 false,
4467 false,
4468 true,
4469 Default::default(),
4470 Default::default()
4471 )
4472 .unwrap(),
4473 cx
4474 )
4475 .await
4476 .unwrap(),
4477 HashMap::from_iter([
4478 ("dir/package.json".to_string(), vec![8..11]),
4479 ("dir/target/index.txt".to_string(), vec![6..9]),
4480 (
4481 "dir/node_modules/prettier/package.json".to_string(),
4482 vec![9..12]
4483 ),
4484 (
4485 "dir/node_modules/prettier/index.ts".to_string(),
4486 vec![15..18]
4487 ),
4488 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4489 (
4490 "dir/node_modules/eslint/package.json".to_string(),
4491 vec![8..11]
4492 ),
4493 ]),
4494 "Unrestricted search with ignored directories should find every file with the query"
4495 );
4496
4497 let files_to_include = PathMatcher::new(&["/dir/node_modules/prettier/**".to_owned()]).unwrap();
4498 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4499 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4500 assert_eq!(
4501 search(
4502 &project,
4503 SearchQuery::text(
4504 query,
4505 false,
4506 false,
4507 true,
4508 files_to_include,
4509 files_to_exclude,
4510 )
4511 .unwrap(),
4512 cx
4513 )
4514 .await
4515 .unwrap(),
4516 HashMap::from_iter([(
4517 "dir/node_modules/prettier/package.json".to_string(),
4518 vec![9..12]
4519 )]),
4520 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4521 );
4522}
4523
4524#[test]
4525fn test_glob_literal_prefix() {
4526 assert_eq!(glob_literal_prefix("**/*.js"), "");
4527 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4528 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4529 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4530}
4531
4532#[gpui::test]
4533async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4534 init_test(cx);
4535
4536 let fs = FakeFs::new(cx.executor().clone());
4537 fs.insert_tree(
4538 "/one/two",
4539 json!({
4540 "three": {
4541 "a.txt": "",
4542 "four": {}
4543 },
4544 "c.rs": ""
4545 }),
4546 )
4547 .await;
4548
4549 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4550 project
4551 .update(cx, |project, cx| {
4552 let id = project.worktrees().next().unwrap().read(cx).id();
4553 project.create_entry((id, "b.."), true, cx)
4554 })
4555 .unwrap()
4556 .await
4557 .to_included()
4558 .unwrap();
4559
4560 // Can't create paths outside the project
4561 let result = project
4562 .update(cx, |project, cx| {
4563 let id = project.worktrees().next().unwrap().read(cx).id();
4564 project.create_entry((id, "../../boop"), true, cx)
4565 })
4566 .await;
4567 assert!(result.is_err());
4568
4569 // Can't create paths with '..'
4570 let result = project
4571 .update(cx, |project, cx| {
4572 let id = project.worktrees().next().unwrap().read(cx).id();
4573 project.create_entry((id, "four/../beep"), true, cx)
4574 })
4575 .await;
4576 assert!(result.is_err());
4577
4578 assert_eq!(
4579 fs.paths(true),
4580 vec![
4581 PathBuf::from("/"),
4582 PathBuf::from("/one"),
4583 PathBuf::from("/one/two"),
4584 PathBuf::from("/one/two/c.rs"),
4585 PathBuf::from("/one/two/three"),
4586 PathBuf::from("/one/two/three/a.txt"),
4587 PathBuf::from("/one/two/three/b.."),
4588 PathBuf::from("/one/two/three/four"),
4589 ]
4590 );
4591
4592 // And we cannot open buffers with '..'
4593 let result = project
4594 .update(cx, |project, cx| {
4595 let id = project.worktrees().next().unwrap().read(cx).id();
4596 project.open_buffer((id, "../c.rs"), cx)
4597 })
4598 .await;
4599 assert!(result.is_err())
4600}
4601
4602#[gpui::test]
4603async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4604 init_test(cx);
4605
4606 let fs = FakeFs::new(cx.executor());
4607 fs.insert_tree(
4608 "/dir",
4609 json!({
4610 "a.tsx": "a",
4611 }),
4612 )
4613 .await;
4614
4615 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4616
4617 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4618 language_registry.add(tsx_lang());
4619 let language_server_names = [
4620 "TypeScriptServer",
4621 "TailwindServer",
4622 "ESLintServer",
4623 "NoHoverCapabilitiesServer",
4624 ];
4625 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4626 "tsx",
4627 true,
4628 FakeLspAdapter {
4629 name: &language_server_names[0],
4630 capabilities: lsp::ServerCapabilities {
4631 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4632 ..lsp::ServerCapabilities::default()
4633 },
4634 ..FakeLspAdapter::default()
4635 },
4636 );
4637 let _a = language_registry.register_specific_fake_lsp_adapter(
4638 "tsx",
4639 false,
4640 FakeLspAdapter {
4641 name: &language_server_names[1],
4642 capabilities: lsp::ServerCapabilities {
4643 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4644 ..lsp::ServerCapabilities::default()
4645 },
4646 ..FakeLspAdapter::default()
4647 },
4648 );
4649 let _b = language_registry.register_specific_fake_lsp_adapter(
4650 "tsx",
4651 false,
4652 FakeLspAdapter {
4653 name: &language_server_names[2],
4654 capabilities: lsp::ServerCapabilities {
4655 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4656 ..lsp::ServerCapabilities::default()
4657 },
4658 ..FakeLspAdapter::default()
4659 },
4660 );
4661 let _c = language_registry.register_specific_fake_lsp_adapter(
4662 "tsx",
4663 false,
4664 FakeLspAdapter {
4665 name: &language_server_names[3],
4666 capabilities: lsp::ServerCapabilities {
4667 hover_provider: None,
4668 ..lsp::ServerCapabilities::default()
4669 },
4670 ..FakeLspAdapter::default()
4671 },
4672 );
4673
4674 let buffer = project
4675 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4676 .await
4677 .unwrap();
4678 cx.executor().run_until_parked();
4679
4680 let mut servers_with_hover_requests = HashMap::default();
4681 for i in 0..language_server_names.len() {
4682 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4683 panic!(
4684 "Failed to get language server #{i} with name {}",
4685 &language_server_names[i]
4686 )
4687 });
4688 let new_server_name = new_server.server.name();
4689 assert!(
4690 !servers_with_hover_requests.contains_key(new_server_name),
4691 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4692 );
4693 let new_server_name = new_server_name.to_string();
4694 match new_server_name.as_str() {
4695 "TailwindServer" | "TypeScriptServer" => {
4696 servers_with_hover_requests.insert(
4697 new_server_name.clone(),
4698 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4699 let name = new_server_name.clone();
4700 async move {
4701 Ok(Some(lsp::Hover {
4702 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4703 format!("{name} hover"),
4704 )),
4705 range: None,
4706 }))
4707 }
4708 }),
4709 );
4710 }
4711 "ESLintServer" => {
4712 servers_with_hover_requests.insert(
4713 new_server_name,
4714 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4715 |_, _| async move { Ok(None) },
4716 ),
4717 );
4718 }
4719 "NoHoverCapabilitiesServer" => {
4720 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4721 |_, _| async move {
4722 panic!(
4723 "Should not call for hovers server with no corresponding capabilities"
4724 )
4725 },
4726 );
4727 }
4728 unexpected => panic!("Unexpected server name: {unexpected}"),
4729 }
4730 }
4731
4732 let hover_task = project.update(cx, |project, cx| {
4733 project.hover(&buffer, Point::new(0, 0), cx)
4734 });
4735 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4736 |mut hover_request| async move {
4737 hover_request
4738 .next()
4739 .await
4740 .expect("All hover requests should have been triggered")
4741 },
4742 ))
4743 .await;
4744 assert_eq!(
4745 vec!["TailwindServer hover", "TypeScriptServer hover"],
4746 hover_task
4747 .await
4748 .into_iter()
4749 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4750 .sorted()
4751 .collect::<Vec<_>>(),
4752 "Should receive hover responses from all related servers with hover capabilities"
4753 );
4754}
4755
4756#[gpui::test]
4757async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4758 init_test(cx);
4759
4760 let fs = FakeFs::new(cx.executor());
4761 fs.insert_tree(
4762 "/dir",
4763 json!({
4764 "a.ts": "a",
4765 }),
4766 )
4767 .await;
4768
4769 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4770
4771 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4772 language_registry.add(typescript_lang());
4773 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4774 "TypeScript",
4775 FakeLspAdapter {
4776 capabilities: lsp::ServerCapabilities {
4777 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4778 ..lsp::ServerCapabilities::default()
4779 },
4780 ..FakeLspAdapter::default()
4781 },
4782 );
4783
4784 let buffer = project
4785 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4786 .await
4787 .unwrap();
4788 cx.executor().run_until_parked();
4789
4790 let fake_server = fake_language_servers
4791 .next()
4792 .await
4793 .expect("failed to get the language server");
4794
4795 let mut request_handled =
4796 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4797 Ok(Some(lsp::Hover {
4798 contents: lsp::HoverContents::Array(vec![
4799 lsp::MarkedString::String("".to_string()),
4800 lsp::MarkedString::String(" ".to_string()),
4801 lsp::MarkedString::String("\n\n\n".to_string()),
4802 ]),
4803 range: None,
4804 }))
4805 });
4806
4807 let hover_task = project.update(cx, |project, cx| {
4808 project.hover(&buffer, Point::new(0, 0), cx)
4809 });
4810 let () = request_handled
4811 .next()
4812 .await
4813 .expect("All hover requests should have been triggered");
4814 assert_eq!(
4815 Vec::<String>::new(),
4816 hover_task
4817 .await
4818 .into_iter()
4819 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4820 .sorted()
4821 .collect::<Vec<_>>(),
4822 "Empty hover parts should be ignored"
4823 );
4824}
4825
4826#[gpui::test]
4827async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4828 init_test(cx);
4829
4830 let fs = FakeFs::new(cx.executor());
4831 fs.insert_tree(
4832 "/dir",
4833 json!({
4834 "a.tsx": "a",
4835 }),
4836 )
4837 .await;
4838
4839 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4840
4841 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4842 language_registry.add(tsx_lang());
4843 let language_server_names = [
4844 "TypeScriptServer",
4845 "TailwindServer",
4846 "ESLintServer",
4847 "NoActionsCapabilitiesServer",
4848 ];
4849 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4850 "tsx",
4851 true,
4852 FakeLspAdapter {
4853 name: &language_server_names[0],
4854 capabilities: lsp::ServerCapabilities {
4855 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4856 ..lsp::ServerCapabilities::default()
4857 },
4858 ..FakeLspAdapter::default()
4859 },
4860 );
4861 let _a = language_registry.register_specific_fake_lsp_adapter(
4862 "tsx",
4863 false,
4864 FakeLspAdapter {
4865 name: &language_server_names[1],
4866 capabilities: lsp::ServerCapabilities {
4867 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4868 ..lsp::ServerCapabilities::default()
4869 },
4870 ..FakeLspAdapter::default()
4871 },
4872 );
4873 let _b = language_registry.register_specific_fake_lsp_adapter(
4874 "tsx",
4875 false,
4876 FakeLspAdapter {
4877 name: &language_server_names[2],
4878 capabilities: lsp::ServerCapabilities {
4879 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4880 ..lsp::ServerCapabilities::default()
4881 },
4882 ..FakeLspAdapter::default()
4883 },
4884 );
4885 let _c = language_registry.register_specific_fake_lsp_adapter(
4886 "tsx",
4887 false,
4888 FakeLspAdapter {
4889 name: &language_server_names[3],
4890 capabilities: lsp::ServerCapabilities {
4891 code_action_provider: None,
4892 ..lsp::ServerCapabilities::default()
4893 },
4894 ..FakeLspAdapter::default()
4895 },
4896 );
4897
4898 let buffer = project
4899 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4900 .await
4901 .unwrap();
4902 cx.executor().run_until_parked();
4903
4904 let mut servers_with_actions_requests = HashMap::default();
4905 for i in 0..language_server_names.len() {
4906 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4907 panic!(
4908 "Failed to get language server #{i} with name {}",
4909 &language_server_names[i]
4910 )
4911 });
4912 let new_server_name = new_server.server.name();
4913 assert!(
4914 !servers_with_actions_requests.contains_key(new_server_name),
4915 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4916 );
4917 let new_server_name = new_server_name.to_string();
4918 match new_server_name.as_str() {
4919 "TailwindServer" | "TypeScriptServer" => {
4920 servers_with_actions_requests.insert(
4921 new_server_name.clone(),
4922 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4923 move |_, _| {
4924 let name = new_server_name.clone();
4925 async move {
4926 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4927 lsp::CodeAction {
4928 title: format!("{name} code action"),
4929 ..lsp::CodeAction::default()
4930 },
4931 )]))
4932 }
4933 },
4934 ),
4935 );
4936 }
4937 "ESLintServer" => {
4938 servers_with_actions_requests.insert(
4939 new_server_name,
4940 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4941 |_, _| async move { Ok(None) },
4942 ),
4943 );
4944 }
4945 "NoActionsCapabilitiesServer" => {
4946 let _never_handled = new_server
4947 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4948 panic!(
4949 "Should not call for code actions server with no corresponding capabilities"
4950 )
4951 });
4952 }
4953 unexpected => panic!("Unexpected server name: {unexpected}"),
4954 }
4955 }
4956
4957 let code_actions_task = project.update(cx, |project, cx| {
4958 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4959 });
4960 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4961 |mut code_actions_request| async move {
4962 code_actions_request
4963 .next()
4964 .await
4965 .expect("All code actions requests should have been triggered")
4966 },
4967 ))
4968 .await;
4969 assert_eq!(
4970 vec!["TailwindServer code action", "TypeScriptServer code action"],
4971 code_actions_task
4972 .await
4973 .into_iter()
4974 .map(|code_action| code_action.lsp_action.title)
4975 .sorted()
4976 .collect::<Vec<_>>(),
4977 "Should receive code actions responses from all related servers with hover capabilities"
4978 );
4979}
4980
4981#[gpui::test]
4982async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
4983 init_test(cx);
4984
4985 let fs = FakeFs::new(cx.executor());
4986 fs.insert_tree(
4987 "/dir",
4988 json!({
4989 "a.rs": "let a = 1;",
4990 "b.rs": "let b = 2;",
4991 "c.rs": "let c = 2;",
4992 }),
4993 )
4994 .await;
4995
4996 let project = Project::test(
4997 fs,
4998 [
4999 "/dir/a.rs".as_ref(),
5000 "/dir/b.rs".as_ref(),
5001 "/dir/c.rs".as_ref(),
5002 ],
5003 cx,
5004 )
5005 .await;
5006
5007 // check the initial state and get the worktrees
5008 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5009 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5010 assert_eq!(worktrees.len(), 3);
5011
5012 let worktree_a = worktrees[0].read(cx);
5013 let worktree_b = worktrees[1].read(cx);
5014 let worktree_c = worktrees[2].read(cx);
5015
5016 // check they start in the right order
5017 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5018 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5019 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5020
5021 (
5022 worktrees[0].clone(),
5023 worktrees[1].clone(),
5024 worktrees[2].clone(),
5025 )
5026 });
5027
5028 // move first worktree to after the second
5029 // [a, b, c] -> [b, a, c]
5030 project
5031 .update(cx, |project, cx| {
5032 let first = worktree_a.read(cx);
5033 let second = worktree_b.read(cx);
5034 project.move_worktree(first.id(), second.id(), cx)
5035 })
5036 .expect("moving first after second");
5037
5038 // check the state after moving
5039 project.update(cx, |project, cx| {
5040 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5041 assert_eq!(worktrees.len(), 3);
5042
5043 let first = worktrees[0].read(cx);
5044 let second = worktrees[1].read(cx);
5045 let third = worktrees[2].read(cx);
5046
5047 // check they are now in the right order
5048 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5049 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5050 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5051 });
5052
5053 // move the second worktree to before the first
5054 // [b, a, c] -> [a, b, c]
5055 project
5056 .update(cx, |project, cx| {
5057 let second = worktree_a.read(cx);
5058 let first = worktree_b.read(cx);
5059 project.move_worktree(first.id(), second.id(), cx)
5060 })
5061 .expect("moving second before first");
5062
5063 // check the state after moving
5064 project.update(cx, |project, cx| {
5065 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5066 assert_eq!(worktrees.len(), 3);
5067
5068 let first = worktrees[0].read(cx);
5069 let second = worktrees[1].read(cx);
5070 let third = worktrees[2].read(cx);
5071
5072 // check they are now in the right order
5073 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5074 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5075 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5076 });
5077
5078 // move the second worktree to after the third
5079 // [a, b, c] -> [a, c, b]
5080 project
5081 .update(cx, |project, cx| {
5082 let second = worktree_b.read(cx);
5083 let third = worktree_c.read(cx);
5084 project.move_worktree(second.id(), third.id(), cx)
5085 })
5086 .expect("moving second after third");
5087
5088 // check the state after moving
5089 project.update(cx, |project, cx| {
5090 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5091 assert_eq!(worktrees.len(), 3);
5092
5093 let first = worktrees[0].read(cx);
5094 let second = worktrees[1].read(cx);
5095 let third = worktrees[2].read(cx);
5096
5097 // check they are now in the right order
5098 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5099 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5100 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5101 });
5102
5103 // move the third worktree to before the second
5104 // [a, c, b] -> [a, b, c]
5105 project
5106 .update(cx, |project, cx| {
5107 let third = worktree_c.read(cx);
5108 let second = worktree_b.read(cx);
5109 project.move_worktree(third.id(), second.id(), cx)
5110 })
5111 .expect("moving third before second");
5112
5113 // check the state after moving
5114 project.update(cx, |project, cx| {
5115 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5116 assert_eq!(worktrees.len(), 3);
5117
5118 let first = worktrees[0].read(cx);
5119 let second = worktrees[1].read(cx);
5120 let third = worktrees[2].read(cx);
5121
5122 // check they are now in the right order
5123 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5124 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5125 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5126 });
5127
5128 // move the first worktree to after the third
5129 // [a, b, c] -> [b, c, a]
5130 project
5131 .update(cx, |project, cx| {
5132 let first = worktree_a.read(cx);
5133 let third = worktree_c.read(cx);
5134 project.move_worktree(first.id(), third.id(), cx)
5135 })
5136 .expect("moving first after third");
5137
5138 // check the state after moving
5139 project.update(cx, |project, cx| {
5140 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5141 assert_eq!(worktrees.len(), 3);
5142
5143 let first = worktrees[0].read(cx);
5144 let second = worktrees[1].read(cx);
5145 let third = worktrees[2].read(cx);
5146
5147 // check they are now in the right order
5148 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5149 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5150 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5151 });
5152
5153 // move the third worktree to before the first
5154 // [b, c, a] -> [a, b, c]
5155 project
5156 .update(cx, |project, cx| {
5157 let third = worktree_a.read(cx);
5158 let first = worktree_b.read(cx);
5159 project.move_worktree(third.id(), first.id(), cx)
5160 })
5161 .expect("moving third before first");
5162
5163 // check the state after moving
5164 project.update(cx, |project, cx| {
5165 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5166 assert_eq!(worktrees.len(), 3);
5167
5168 let first = worktrees[0].read(cx);
5169 let second = worktrees[1].read(cx);
5170 let third = worktrees[2].read(cx);
5171
5172 // check they are now in the right order
5173 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5174 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5175 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5176 });
5177}
5178
5179async fn search(
5180 project: &Model<Project>,
5181 query: SearchQuery,
5182 cx: &mut gpui::TestAppContext,
5183) -> Result<HashMap<String, Vec<Range<usize>>>> {
5184 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5185 let mut results = HashMap::default();
5186 while let Some(search_result) = search_rx.next().await {
5187 match search_result {
5188 SearchResult::Buffer { buffer, ranges } => {
5189 results.entry(buffer).or_insert(ranges);
5190 }
5191 SearchResult::LimitReached => {}
5192 }
5193 }
5194 Ok(results
5195 .into_iter()
5196 .map(|(buffer, ranges)| {
5197 buffer.update(cx, |buffer, cx| {
5198 let path = buffer
5199 .file()
5200 .unwrap()
5201 .full_path(cx)
5202 .to_string_lossy()
5203 .to_string();
5204 let ranges = ranges
5205 .into_iter()
5206 .map(|range| range.to_offset(buffer))
5207 .collect::<Vec<_>>();
5208 (path, ranges)
5209 })
5210 })
5211 .collect())
5212}
5213
5214fn init_test(cx: &mut gpui::TestAppContext) {
5215 if std::env::var("RUST_LOG").is_ok() {
5216 env_logger::try_init().ok();
5217 }
5218
5219 cx.update(|cx| {
5220 let settings_store = SettingsStore::test(cx);
5221 cx.set_global(settings_store);
5222 release_channel::init(SemanticVersion::default(), cx);
5223 language::init(cx);
5224 Project::init_settings(cx);
5225 });
5226}
5227
5228fn json_lang() -> Arc<Language> {
5229 Arc::new(Language::new(
5230 LanguageConfig {
5231 name: "JSON".into(),
5232 matcher: LanguageMatcher {
5233 path_suffixes: vec!["json".to_string()],
5234 ..Default::default()
5235 },
5236 ..Default::default()
5237 },
5238 None,
5239 ))
5240}
5241
5242fn js_lang() -> Arc<Language> {
5243 Arc::new(Language::new(
5244 LanguageConfig {
5245 name: Arc::from("JavaScript"),
5246 matcher: LanguageMatcher {
5247 path_suffixes: vec!["js".to_string()],
5248 ..Default::default()
5249 },
5250 ..Default::default()
5251 },
5252 None,
5253 ))
5254}
5255
5256fn rust_lang() -> Arc<Language> {
5257 Arc::new(Language::new(
5258 LanguageConfig {
5259 name: "Rust".into(),
5260 matcher: LanguageMatcher {
5261 path_suffixes: vec!["rs".to_string()],
5262 ..Default::default()
5263 },
5264 ..Default::default()
5265 },
5266 Some(tree_sitter_rust::language()),
5267 ))
5268}
5269
5270fn typescript_lang() -> Arc<Language> {
5271 Arc::new(Language::new(
5272 LanguageConfig {
5273 name: "TypeScript".into(),
5274 matcher: LanguageMatcher {
5275 path_suffixes: vec!["ts".to_string()],
5276 ..Default::default()
5277 },
5278 ..Default::default()
5279 },
5280 Some(tree_sitter_typescript::language_typescript()),
5281 ))
5282}
5283
5284fn tsx_lang() -> Arc<Language> {
5285 Arc::new(Language::new(
5286 LanguageConfig {
5287 name: "tsx".into(),
5288 matcher: LanguageMatcher {
5289 path_suffixes: vec!["tsx".to_string()],
5290 ..Default::default()
5291 },
5292 ..Default::default()
5293 },
5294 Some(tree_sitter_typescript::language_tsx()),
5295 ))
5296}
5297
5298fn get_all_tasks(
5299 project: &Model<Project>,
5300 worktree_id: Option<WorktreeId>,
5301 task_context: &TaskContext,
5302 cx: &mut AppContext,
5303) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5304 let resolved_tasks = project.update(cx, |project, cx| {
5305 project
5306 .task_inventory()
5307 .read(cx)
5308 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5309 });
5310
5311 cx.spawn(|_| async move {
5312 let (mut old, new) = resolved_tasks.await;
5313 old.extend(new);
5314 old
5315 })
5316}