1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::NumberOrString;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17use std::task::Poll;
18use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
19use unindent::Unindent as _;
20use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/tmp").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees(cx).next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let worktree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees(cx).next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: worktree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree".into(),
141 };
142
143 let all_tasks = cx
144 .update(|cx| {
145 let tree = worktree.read(cx);
146
147 let settings_a = language_settings(
148 None,
149 Some(
150 &(File::for_entry(
151 tree.entry_for_path("a/a.rs").unwrap().clone(),
152 worktree.clone(),
153 ) as _),
154 ),
155 cx,
156 );
157 let settings_b = language_settings(
158 None,
159 Some(
160 &(File::for_entry(
161 tree.entry_for_path("b/b.rs").unwrap().clone(),
162 worktree.clone(),
163 ) as _),
164 ),
165 cx,
166 );
167
168 assert_eq!(settings_a.tab_size.get(), 8);
169 assert_eq!(settings_b.tab_size.get(), 2);
170
171 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
172 })
173 .await
174 .into_iter()
175 .map(|(source_kind, task)| {
176 let resolved = task.resolved.unwrap();
177 (
178 source_kind,
179 task.resolved_label,
180 resolved.args,
181 resolved.env,
182 )
183 })
184 .collect::<Vec<_>>();
185 assert_eq!(
186 all_tasks,
187 vec![
188 (
189 global_task_source_kind.clone(),
190 "cargo check".to_string(),
191 vec!["check".to_string(), "--all".to_string()],
192 HashMap::default(),
193 ),
194 (
195 TaskSourceKind::Worktree {
196 id: worktree_id,
197 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
198 id_base: "local_tasks_for_worktree".into(),
199 },
200 "cargo check".to_string(),
201 vec!["check".to_string()],
202 HashMap::default(),
203 ),
204 ]
205 );
206
207 let (_, resolved_task) = cx
208 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
209 .await
210 .into_iter()
211 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
212 .expect("should have one global task");
213 project.update(cx, |project, cx| {
214 project.task_inventory().update(cx, |inventory, _| {
215 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
216 });
217 });
218
219 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
220 label: "cargo check".to_string(),
221 command: "cargo".to_string(),
222 args: vec![
223 "check".to_string(),
224 "--all".to_string(),
225 "--all-targets".to_string(),
226 ],
227 env: HashMap::from_iter(Some((
228 "RUSTFLAGS".to_string(),
229 "-Zunstable-options".to_string(),
230 ))),
231 ..TaskTemplate::default()
232 }]))
233 .unwrap();
234 let (tx, rx) = futures::channel::mpsc::unbounded();
235 cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.task_inventory().update(cx, |inventory, cx| {
238 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
239 inventory.add_source(
240 global_task_source_kind.clone(),
241 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
242 cx,
243 );
244 });
245 })
246 });
247 tx.unbounded_send(tasks).unwrap();
248
249 cx.run_until_parked();
250 let all_tasks = cx
251 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
252 .await
253 .into_iter()
254 .map(|(source_kind, task)| {
255 let resolved = task.resolved.unwrap();
256 (
257 source_kind,
258 task.resolved_label,
259 resolved.args,
260 resolved.env,
261 )
262 })
263 .collect::<Vec<_>>();
264 assert_eq!(
265 all_tasks,
266 vec![
267 (
268 TaskSourceKind::Worktree {
269 id: worktree_id,
270 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
271 id_base: "local_tasks_for_worktree".into(),
272 },
273 "cargo check".to_string(),
274 vec![
275 "check".to_string(),
276 "--all".to_string(),
277 "--all-targets".to_string()
278 ],
279 HashMap::from_iter(Some((
280 "RUSTFLAGS".to_string(),
281 "-Zunstable-options".to_string()
282 ))),
283 ),
284 (
285 TaskSourceKind::Worktree {
286 id: worktree_id,
287 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
288 id_base: "local_tasks_for_worktree".into(),
289 },
290 "cargo check".to_string(),
291 vec!["check".to_string()],
292 HashMap::default(),
293 ),
294 ]
295 );
296}
297
298#[gpui::test]
299async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301
302 let fs = FakeFs::new(cx.executor());
303 fs.insert_tree(
304 "/the-root",
305 json!({
306 "test.rs": "const A: i32 = 1;",
307 "test2.rs": "",
308 "Cargo.toml": "a = 1",
309 "package.json": "{\"a\": 1}",
310 }),
311 )
312 .await;
313
314 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
316
317 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
318 "Rust",
319 FakeLspAdapter {
320 name: "the-rust-language-server",
321 capabilities: lsp::ServerCapabilities {
322 completion_provider: Some(lsp::CompletionOptions {
323 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
324 ..Default::default()
325 }),
326 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
327 lsp::TextDocumentSyncOptions {
328 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
329 ..Default::default()
330 },
331 )),
332 ..Default::default()
333 },
334 ..Default::default()
335 },
336 );
337 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
338 "JSON",
339 FakeLspAdapter {
340 name: "the-json-language-server",
341 capabilities: lsp::ServerCapabilities {
342 completion_provider: Some(lsp::CompletionOptions {
343 trigger_characters: Some(vec![":".to_string()]),
344 ..Default::default()
345 }),
346 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
347 lsp::TextDocumentSyncOptions {
348 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
349 ..Default::default()
350 },
351 )),
352 ..Default::default()
353 },
354 ..Default::default()
355 },
356 );
357
358 // Open a buffer without an associated language server.
359 let toml_buffer = project
360 .update(cx, |project, cx| {
361 project.open_local_buffer("/the-root/Cargo.toml", cx)
362 })
363 .await
364 .unwrap();
365
366 // Open a buffer with an associated language server before the language for it has been loaded.
367 let rust_buffer = project
368 .update(cx, |project, cx| {
369 project.open_local_buffer("/the-root/test.rs", cx)
370 })
371 .await
372 .unwrap();
373 rust_buffer.update(cx, |buffer, _| {
374 assert_eq!(buffer.language().map(|l| l.name()), None);
375 });
376
377 // Now we add the languages to the project, and ensure they get assigned to all
378 // the relevant open buffers.
379 language_registry.add(json_lang());
380 language_registry.add(rust_lang());
381 cx.executor().run_until_parked();
382 rust_buffer.update(cx, |buffer, _| {
383 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
384 });
385
386 // A server is started up, and it is notified about Rust files.
387 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
388 assert_eq!(
389 fake_rust_server
390 .receive_notification::<lsp::notification::DidOpenTextDocument>()
391 .await
392 .text_document,
393 lsp::TextDocumentItem {
394 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
395 version: 0,
396 text: "const A: i32 = 1;".to_string(),
397 language_id: "rust".to_string(),
398 }
399 );
400
401 // The buffer is configured based on the language server's capabilities.
402 rust_buffer.update(cx, |buffer, _| {
403 assert_eq!(
404 buffer.completion_triggers(),
405 &[".".to_string(), "::".to_string()]
406 );
407 });
408 toml_buffer.update(cx, |buffer, _| {
409 assert!(buffer.completion_triggers().is_empty());
410 });
411
412 // Edit a buffer. The changes are reported to the language server.
413 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
414 assert_eq!(
415 fake_rust_server
416 .receive_notification::<lsp::notification::DidChangeTextDocument>()
417 .await
418 .text_document,
419 lsp::VersionedTextDocumentIdentifier::new(
420 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
421 1
422 )
423 );
424
425 // Open a third buffer with a different associated language server.
426 let json_buffer = project
427 .update(cx, |project, cx| {
428 project.open_local_buffer("/the-root/package.json", cx)
429 })
430 .await
431 .unwrap();
432
433 // A json language server is started up and is only notified about the json buffer.
434 let mut fake_json_server = fake_json_servers.next().await.unwrap();
435 assert_eq!(
436 fake_json_server
437 .receive_notification::<lsp::notification::DidOpenTextDocument>()
438 .await
439 .text_document,
440 lsp::TextDocumentItem {
441 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
442 version: 0,
443 text: "{\"a\": 1}".to_string(),
444 language_id: "json".to_string(),
445 }
446 );
447
448 // This buffer is configured based on the second language server's
449 // capabilities.
450 json_buffer.update(cx, |buffer, _| {
451 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
452 });
453
454 // When opening another buffer whose language server is already running,
455 // it is also configured based on the existing language server's capabilities.
456 let rust_buffer2 = project
457 .update(cx, |project, cx| {
458 project.open_local_buffer("/the-root/test2.rs", cx)
459 })
460 .await
461 .unwrap();
462 rust_buffer2.update(cx, |buffer, _| {
463 assert_eq!(
464 buffer.completion_triggers(),
465 &[".".to_string(), "::".to_string()]
466 );
467 });
468
469 // Changes are reported only to servers matching the buffer's language.
470 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
471 rust_buffer2.update(cx, |buffer, cx| {
472 buffer.edit([(0..0, "let x = 1;")], None, cx)
473 });
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidChangeTextDocument>()
477 .await
478 .text_document,
479 lsp::VersionedTextDocumentIdentifier::new(
480 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
481 1
482 )
483 );
484
485 // Save notifications are reported to all servers.
486 project
487 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
488 .await
489 .unwrap();
490 assert_eq!(
491 fake_rust_server
492 .receive_notification::<lsp::notification::DidSaveTextDocument>()
493 .await
494 .text_document,
495 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
496 );
497 assert_eq!(
498 fake_json_server
499 .receive_notification::<lsp::notification::DidSaveTextDocument>()
500 .await
501 .text_document,
502 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
503 );
504
505 // Renames are reported only to servers matching the buffer's language.
506 fs.rename(
507 Path::new("/the-root/test2.rs"),
508 Path::new("/the-root/test3.rs"),
509 Default::default(),
510 )
511 .await
512 .unwrap();
513 assert_eq!(
514 fake_rust_server
515 .receive_notification::<lsp::notification::DidCloseTextDocument>()
516 .await
517 .text_document,
518 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
519 );
520 assert_eq!(
521 fake_rust_server
522 .receive_notification::<lsp::notification::DidOpenTextDocument>()
523 .await
524 .text_document,
525 lsp::TextDocumentItem {
526 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
527 version: 0,
528 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
529 language_id: "rust".to_string(),
530 },
531 );
532
533 rust_buffer2.update(cx, |buffer, cx| {
534 buffer.update_diagnostics(
535 LanguageServerId(0),
536 DiagnosticSet::from_sorted_entries(
537 vec![DiagnosticEntry {
538 diagnostic: Default::default(),
539 range: Anchor::MIN..Anchor::MAX,
540 }],
541 &buffer.snapshot(),
542 ),
543 cx,
544 );
545 assert_eq!(
546 buffer
547 .snapshot()
548 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
549 .count(),
550 1
551 );
552 });
553
554 // When the rename changes the extension of the file, the buffer gets closed on the old
555 // language server and gets opened on the new one.
556 fs.rename(
557 Path::new("/the-root/test3.rs"),
558 Path::new("/the-root/test3.json"),
559 Default::default(),
560 )
561 .await
562 .unwrap();
563 assert_eq!(
564 fake_rust_server
565 .receive_notification::<lsp::notification::DidCloseTextDocument>()
566 .await
567 .text_document,
568 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
569 );
570 assert_eq!(
571 fake_json_server
572 .receive_notification::<lsp::notification::DidOpenTextDocument>()
573 .await
574 .text_document,
575 lsp::TextDocumentItem {
576 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
577 version: 0,
578 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
579 language_id: "json".to_string(),
580 },
581 );
582
583 // We clear the diagnostics, since the language has changed.
584 rust_buffer2.update(cx, |buffer, _| {
585 assert_eq!(
586 buffer
587 .snapshot()
588 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
589 .count(),
590 0
591 );
592 });
593
594 // The renamed file's version resets after changing language server.
595 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
596 assert_eq!(
597 fake_json_server
598 .receive_notification::<lsp::notification::DidChangeTextDocument>()
599 .await
600 .text_document,
601 lsp::VersionedTextDocumentIdentifier::new(
602 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
603 1
604 )
605 );
606
607 // Restart language servers
608 project.update(cx, |project, cx| {
609 project.restart_language_servers_for_buffers(
610 vec![rust_buffer.clone(), json_buffer.clone()],
611 cx,
612 );
613 });
614
615 let mut rust_shutdown_requests = fake_rust_server
616 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
617 let mut json_shutdown_requests = fake_json_server
618 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
619 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
620
621 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
622 let mut fake_json_server = fake_json_servers.next().await.unwrap();
623
624 // Ensure rust document is reopened in new rust language server
625 assert_eq!(
626 fake_rust_server
627 .receive_notification::<lsp::notification::DidOpenTextDocument>()
628 .await
629 .text_document,
630 lsp::TextDocumentItem {
631 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
632 version: 0,
633 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
634 language_id: "rust".to_string(),
635 }
636 );
637
638 // Ensure json documents are reopened in new json language server
639 assert_set_eq!(
640 [
641 fake_json_server
642 .receive_notification::<lsp::notification::DidOpenTextDocument>()
643 .await
644 .text_document,
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 ],
650 [
651 lsp::TextDocumentItem {
652 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
653 version: 0,
654 text: json_buffer.update(cx, |buffer, _| buffer.text()),
655 language_id: "json".to_string(),
656 },
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
659 version: 0,
660 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
661 language_id: "json".to_string(),
662 }
663 ]
664 );
665
666 // Close notifications are reported only to servers matching the buffer's language.
667 cx.update(|_| drop(json_buffer));
668 let close_message = lsp::DidCloseTextDocumentParams {
669 text_document: lsp::TextDocumentIdentifier::new(
670 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
671 ),
672 };
673 assert_eq!(
674 fake_json_server
675 .receive_notification::<lsp::notification::DidCloseTextDocument>()
676 .await,
677 close_message,
678 );
679}
680
681#[gpui::test]
682async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
683 init_test(cx);
684
685 let fs = FakeFs::new(cx.executor());
686 fs.insert_tree(
687 "/the-root",
688 json!({
689 ".gitignore": "target\n",
690 "src": {
691 "a.rs": "",
692 "b.rs": "",
693 },
694 "target": {
695 "x": {
696 "out": {
697 "x.rs": ""
698 }
699 },
700 "y": {
701 "out": {
702 "y.rs": "",
703 }
704 },
705 "z": {
706 "out": {
707 "z.rs": ""
708 }
709 }
710 }
711 }),
712 )
713 .await;
714
715 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
716 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
717 language_registry.add(rust_lang());
718 let mut fake_servers = language_registry.register_fake_lsp_adapter(
719 "Rust",
720 FakeLspAdapter {
721 name: "the-language-server",
722 ..Default::default()
723 },
724 );
725
726 cx.executor().run_until_parked();
727
728 // Start the language server by opening a buffer with a compatible file extension.
729 let _buffer = project
730 .update(cx, |project, cx| {
731 project.open_local_buffer("/the-root/src/a.rs", cx)
732 })
733 .await
734 .unwrap();
735
736 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
737 project.update(cx, |project, cx| {
738 let worktree = project.worktrees(cx).next().unwrap();
739 assert_eq!(
740 worktree
741 .read(cx)
742 .snapshot()
743 .entries(true, 0)
744 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
745 .collect::<Vec<_>>(),
746 &[
747 (Path::new(""), false),
748 (Path::new(".gitignore"), false),
749 (Path::new("src"), false),
750 (Path::new("src/a.rs"), false),
751 (Path::new("src/b.rs"), false),
752 (Path::new("target"), true),
753 ]
754 );
755 });
756
757 let prev_read_dir_count = fs.read_dir_call_count();
758
759 // Keep track of the FS events reported to the language server.
760 let fake_server = fake_servers.next().await.unwrap();
761 let file_changes = Arc::new(Mutex::new(Vec::new()));
762 fake_server
763 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
764 registrations: vec![lsp::Registration {
765 id: Default::default(),
766 method: "workspace/didChangeWatchedFiles".to_string(),
767 register_options: serde_json::to_value(
768 lsp::DidChangeWatchedFilesRegistrationOptions {
769 watchers: vec![
770 lsp::FileSystemWatcher {
771 glob_pattern: lsp::GlobPattern::String(
772 "/the-root/Cargo.toml".to_string(),
773 ),
774 kind: None,
775 },
776 lsp::FileSystemWatcher {
777 glob_pattern: lsp::GlobPattern::String(
778 "/the-root/src/*.{rs,c}".to_string(),
779 ),
780 kind: None,
781 },
782 lsp::FileSystemWatcher {
783 glob_pattern: lsp::GlobPattern::String(
784 "/the-root/target/y/**/*.rs".to_string(),
785 ),
786 kind: None,
787 },
788 ],
789 },
790 )
791 .ok(),
792 }],
793 })
794 .await
795 .unwrap();
796 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
797 let file_changes = file_changes.clone();
798 move |params, _| {
799 let mut file_changes = file_changes.lock();
800 file_changes.extend(params.changes);
801 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
802 }
803 });
804
805 cx.executor().run_until_parked();
806 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
807 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
808
809 // Now the language server has asked us to watch an ignored directory path,
810 // so we recursively load it.
811 project.update(cx, |project, cx| {
812 let worktree = project.worktrees(cx).next().unwrap();
813 assert_eq!(
814 worktree
815 .read(cx)
816 .snapshot()
817 .entries(true, 0)
818 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
819 .collect::<Vec<_>>(),
820 &[
821 (Path::new(""), false),
822 (Path::new(".gitignore"), false),
823 (Path::new("src"), false),
824 (Path::new("src/a.rs"), false),
825 (Path::new("src/b.rs"), false),
826 (Path::new("target"), true),
827 (Path::new("target/x"), true),
828 (Path::new("target/y"), true),
829 (Path::new("target/y/out"), true),
830 (Path::new("target/y/out/y.rs"), true),
831 (Path::new("target/z"), true),
832 ]
833 );
834 });
835
836 // Perform some file system mutations, two of which match the watched patterns,
837 // and one of which does not.
838 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
839 .await
840 .unwrap();
841 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
842 .await
843 .unwrap();
844 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
845 .await
846 .unwrap();
847 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
848 .await
849 .unwrap();
850 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
851 .await
852 .unwrap();
853
854 // The language server receives events for the FS mutations that match its watch patterns.
855 cx.executor().run_until_parked();
856 assert_eq!(
857 &*file_changes.lock(),
858 &[
859 lsp::FileEvent {
860 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
861 typ: lsp::FileChangeType::DELETED,
862 },
863 lsp::FileEvent {
864 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
865 typ: lsp::FileChangeType::CREATED,
866 },
867 lsp::FileEvent {
868 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
869 typ: lsp::FileChangeType::CREATED,
870 },
871 ]
872 );
873}
874
875#[gpui::test]
876async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
877 init_test(cx);
878
879 let fs = FakeFs::new(cx.executor());
880 fs.insert_tree(
881 "/dir",
882 json!({
883 "a.rs": "let a = 1;",
884 "b.rs": "let b = 2;"
885 }),
886 )
887 .await;
888
889 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
890
891 let buffer_a = project
892 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
893 .await
894 .unwrap();
895 let buffer_b = project
896 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
897 .await
898 .unwrap();
899
900 project.update(cx, |project, cx| {
901 project
902 .update_diagnostics(
903 LanguageServerId(0),
904 lsp::PublishDiagnosticsParams {
905 uri: Url::from_file_path("/dir/a.rs").unwrap(),
906 version: None,
907 diagnostics: vec![lsp::Diagnostic {
908 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
909 severity: Some(lsp::DiagnosticSeverity::ERROR),
910 message: "error 1".to_string(),
911 ..Default::default()
912 }],
913 },
914 &[],
915 cx,
916 )
917 .unwrap();
918 project
919 .update_diagnostics(
920 LanguageServerId(0),
921 lsp::PublishDiagnosticsParams {
922 uri: Url::from_file_path("/dir/b.rs").unwrap(),
923 version: None,
924 diagnostics: vec![lsp::Diagnostic {
925 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
926 severity: Some(lsp::DiagnosticSeverity::WARNING),
927 message: "error 2".to_string(),
928 ..Default::default()
929 }],
930 },
931 &[],
932 cx,
933 )
934 .unwrap();
935 });
936
937 buffer_a.update(cx, |buffer, _| {
938 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
939 assert_eq!(
940 chunks
941 .iter()
942 .map(|(s, d)| (s.as_str(), *d))
943 .collect::<Vec<_>>(),
944 &[
945 ("let ", None),
946 ("a", Some(DiagnosticSeverity::ERROR)),
947 (" = 1;", None),
948 ]
949 );
950 });
951 buffer_b.update(cx, |buffer, _| {
952 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
953 assert_eq!(
954 chunks
955 .iter()
956 .map(|(s, d)| (s.as_str(), *d))
957 .collect::<Vec<_>>(),
958 &[
959 ("let ", None),
960 ("b", Some(DiagnosticSeverity::WARNING)),
961 (" = 2;", None),
962 ]
963 );
964 });
965}
966
967#[gpui::test]
968async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
969 init_test(cx);
970
971 let fs = FakeFs::new(cx.executor());
972 fs.insert_tree(
973 "/root",
974 json!({
975 "dir": {
976 ".git": {
977 "HEAD": "ref: refs/heads/main",
978 },
979 ".gitignore": "b.rs",
980 "a.rs": "let a = 1;",
981 "b.rs": "let b = 2;",
982 },
983 "other.rs": "let b = c;"
984 }),
985 )
986 .await;
987
988 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
989 let (worktree, _) = project
990 .update(cx, |project, cx| {
991 project.find_or_create_worktree("/root/dir", true, cx)
992 })
993 .await
994 .unwrap();
995 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
996
997 let (worktree, _) = project
998 .update(cx, |project, cx| {
999 project.find_or_create_worktree("/root/other.rs", false, cx)
1000 })
1001 .await
1002 .unwrap();
1003 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1004
1005 let server_id = LanguageServerId(0);
1006 project.update(cx, |project, cx| {
1007 project
1008 .update_diagnostics(
1009 server_id,
1010 lsp::PublishDiagnosticsParams {
1011 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1012 version: None,
1013 diagnostics: vec![lsp::Diagnostic {
1014 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1015 severity: Some(lsp::DiagnosticSeverity::ERROR),
1016 message: "unused variable 'b'".to_string(),
1017 ..Default::default()
1018 }],
1019 },
1020 &[],
1021 cx,
1022 )
1023 .unwrap();
1024 project
1025 .update_diagnostics(
1026 server_id,
1027 lsp::PublishDiagnosticsParams {
1028 uri: Url::from_file_path("/root/other.rs").unwrap(),
1029 version: None,
1030 diagnostics: vec![lsp::Diagnostic {
1031 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1032 severity: Some(lsp::DiagnosticSeverity::ERROR),
1033 message: "unknown variable 'c'".to_string(),
1034 ..Default::default()
1035 }],
1036 },
1037 &[],
1038 cx,
1039 )
1040 .unwrap();
1041 });
1042
1043 let main_ignored_buffer = project
1044 .update(cx, |project, cx| {
1045 project.open_buffer((main_worktree_id, "b.rs"), cx)
1046 })
1047 .await
1048 .unwrap();
1049 main_ignored_buffer.update(cx, |buffer, _| {
1050 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1051 assert_eq!(
1052 chunks
1053 .iter()
1054 .map(|(s, d)| (s.as_str(), *d))
1055 .collect::<Vec<_>>(),
1056 &[
1057 ("let ", None),
1058 ("b", Some(DiagnosticSeverity::ERROR)),
1059 (" = 2;", None),
1060 ],
1061 "Gigitnored buffers should still get in-buffer diagnostics",
1062 );
1063 });
1064 let other_buffer = project
1065 .update(cx, |project, cx| {
1066 project.open_buffer((other_worktree_id, ""), cx)
1067 })
1068 .await
1069 .unwrap();
1070 other_buffer.update(cx, |buffer, _| {
1071 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1072 assert_eq!(
1073 chunks
1074 .iter()
1075 .map(|(s, d)| (s.as_str(), *d))
1076 .collect::<Vec<_>>(),
1077 &[
1078 ("let b = ", None),
1079 ("c", Some(DiagnosticSeverity::ERROR)),
1080 (";", None),
1081 ],
1082 "Buffers from hidden projects should still get in-buffer diagnostics"
1083 );
1084 });
1085
1086 project.update(cx, |project, cx| {
1087 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1088 assert_eq!(
1089 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1090 vec![(
1091 ProjectPath {
1092 worktree_id: main_worktree_id,
1093 path: Arc::from(Path::new("b.rs")),
1094 },
1095 server_id,
1096 DiagnosticSummary {
1097 error_count: 1,
1098 warning_count: 0,
1099 }
1100 )]
1101 );
1102 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1103 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1104 });
1105}
1106
1107#[gpui::test]
1108async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1109 init_test(cx);
1110
1111 let progress_token = "the-progress-token";
1112
1113 let fs = FakeFs::new(cx.executor());
1114 fs.insert_tree(
1115 "/dir",
1116 json!({
1117 "a.rs": "fn a() { A }",
1118 "b.rs": "const y: i32 = 1",
1119 }),
1120 )
1121 .await;
1122
1123 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1124 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1125
1126 language_registry.add(rust_lang());
1127 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1128 "Rust",
1129 FakeLspAdapter {
1130 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1131 disk_based_diagnostics_sources: vec!["disk".into()],
1132 ..Default::default()
1133 },
1134 );
1135
1136 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1137
1138 // Cause worktree to start the fake language server
1139 let _buffer = project
1140 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1141 .await
1142 .unwrap();
1143
1144 let mut events = cx.events(&project);
1145
1146 let fake_server = fake_servers.next().await.unwrap();
1147 assert_eq!(
1148 events.next().await.unwrap(),
1149 Event::LanguageServerAdded(LanguageServerId(0)),
1150 );
1151
1152 fake_server
1153 .start_progress(format!("{}/0", progress_token))
1154 .await;
1155 assert_eq!(
1156 events.next().await.unwrap(),
1157 Event::DiskBasedDiagnosticsStarted {
1158 language_server_id: LanguageServerId(0),
1159 }
1160 );
1161
1162 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1163 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1164 version: None,
1165 diagnostics: vec![lsp::Diagnostic {
1166 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1167 severity: Some(lsp::DiagnosticSeverity::ERROR),
1168 message: "undefined variable 'A'".to_string(),
1169 ..Default::default()
1170 }],
1171 });
1172 assert_eq!(
1173 events.next().await.unwrap(),
1174 Event::DiagnosticsUpdated {
1175 language_server_id: LanguageServerId(0),
1176 path: (worktree_id, Path::new("a.rs")).into()
1177 }
1178 );
1179
1180 fake_server.end_progress(format!("{}/0", progress_token));
1181 assert_eq!(
1182 events.next().await.unwrap(),
1183 Event::DiskBasedDiagnosticsFinished {
1184 language_server_id: LanguageServerId(0)
1185 }
1186 );
1187
1188 let buffer = project
1189 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1190 .await
1191 .unwrap();
1192
1193 buffer.update(cx, |buffer, _| {
1194 let snapshot = buffer.snapshot();
1195 let diagnostics = snapshot
1196 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1197 .collect::<Vec<_>>();
1198 assert_eq!(
1199 diagnostics,
1200 &[DiagnosticEntry {
1201 range: Point::new(0, 9)..Point::new(0, 10),
1202 diagnostic: Diagnostic {
1203 severity: lsp::DiagnosticSeverity::ERROR,
1204 message: "undefined variable 'A'".to_string(),
1205 group_id: 0,
1206 is_primary: true,
1207 ..Default::default()
1208 }
1209 }]
1210 )
1211 });
1212
1213 // Ensure publishing empty diagnostics twice only results in one update event.
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: None,
1217 diagnostics: Default::default(),
1218 });
1219 assert_eq!(
1220 events.next().await.unwrap(),
1221 Event::DiagnosticsUpdated {
1222 language_server_id: LanguageServerId(0),
1223 path: (worktree_id, Path::new("a.rs")).into()
1224 }
1225 );
1226
1227 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1228 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1229 version: None,
1230 diagnostics: Default::default(),
1231 });
1232 cx.executor().run_until_parked();
1233 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1234}
1235
1236#[gpui::test]
1237async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1238 init_test(cx);
1239
1240 let progress_token = "the-progress-token";
1241
1242 let fs = FakeFs::new(cx.executor());
1243 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1244
1245 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1246
1247 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1248 language_registry.add(rust_lang());
1249 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1250 "Rust",
1251 FakeLspAdapter {
1252 name: "the-language-server",
1253 disk_based_diagnostics_sources: vec!["disk".into()],
1254 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1255 ..Default::default()
1256 },
1257 );
1258
1259 let buffer = project
1260 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1261 .await
1262 .unwrap();
1263
1264 // Simulate diagnostics starting to update.
1265 let fake_server = fake_servers.next().await.unwrap();
1266 fake_server.start_progress(progress_token).await;
1267
1268 // Restart the server before the diagnostics finish updating.
1269 project.update(cx, |project, cx| {
1270 project.restart_language_servers_for_buffers([buffer], cx);
1271 });
1272 let mut events = cx.events(&project);
1273
1274 // Simulate the newly started server sending more diagnostics.
1275 let fake_server = fake_servers.next().await.unwrap();
1276 assert_eq!(
1277 events.next().await.unwrap(),
1278 Event::LanguageServerAdded(LanguageServerId(1))
1279 );
1280 fake_server.start_progress(progress_token).await;
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::DiskBasedDiagnosticsStarted {
1284 language_server_id: LanguageServerId(1)
1285 }
1286 );
1287 project.update(cx, |project, _| {
1288 assert_eq!(
1289 project
1290 .language_servers_running_disk_based_diagnostics()
1291 .collect::<Vec<_>>(),
1292 [LanguageServerId(1)]
1293 );
1294 });
1295
1296 // All diagnostics are considered done, despite the old server's diagnostic
1297 // task never completing.
1298 fake_server.end_progress(progress_token);
1299 assert_eq!(
1300 events.next().await.unwrap(),
1301 Event::DiskBasedDiagnosticsFinished {
1302 language_server_id: LanguageServerId(1)
1303 }
1304 );
1305 project.update(cx, |project, _| {
1306 assert_eq!(
1307 project
1308 .language_servers_running_disk_based_diagnostics()
1309 .collect::<Vec<_>>(),
1310 [] as [language::LanguageServerId; 0]
1311 );
1312 });
1313}
1314
1315#[gpui::test]
1316async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1317 init_test(cx);
1318
1319 let fs = FakeFs::new(cx.executor());
1320 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1321
1322 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1323
1324 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1325 language_registry.add(rust_lang());
1326 let mut fake_servers =
1327 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1328
1329 let buffer = project
1330 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1331 .await
1332 .unwrap();
1333
1334 // Publish diagnostics
1335 let fake_server = fake_servers.next().await.unwrap();
1336 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1337 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1338 version: None,
1339 diagnostics: vec![lsp::Diagnostic {
1340 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1341 severity: Some(lsp::DiagnosticSeverity::ERROR),
1342 message: "the message".to_string(),
1343 ..Default::default()
1344 }],
1345 });
1346
1347 cx.executor().run_until_parked();
1348 buffer.update(cx, |buffer, _| {
1349 assert_eq!(
1350 buffer
1351 .snapshot()
1352 .diagnostics_in_range::<_, usize>(0..1, false)
1353 .map(|entry| entry.diagnostic.message.clone())
1354 .collect::<Vec<_>>(),
1355 ["the message".to_string()]
1356 );
1357 });
1358 project.update(cx, |project, cx| {
1359 assert_eq!(
1360 project.diagnostic_summary(false, cx),
1361 DiagnosticSummary {
1362 error_count: 1,
1363 warning_count: 0,
1364 }
1365 );
1366 });
1367
1368 project.update(cx, |project, cx| {
1369 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1370 });
1371
1372 // The diagnostics are cleared.
1373 cx.executor().run_until_parked();
1374 buffer.update(cx, |buffer, _| {
1375 assert_eq!(
1376 buffer
1377 .snapshot()
1378 .diagnostics_in_range::<_, usize>(0..1, false)
1379 .map(|entry| entry.diagnostic.message.clone())
1380 .collect::<Vec<_>>(),
1381 Vec::<String>::new(),
1382 );
1383 });
1384 project.update(cx, |project, cx| {
1385 assert_eq!(
1386 project.diagnostic_summary(false, cx),
1387 DiagnosticSummary {
1388 error_count: 0,
1389 warning_count: 0,
1390 }
1391 );
1392 });
1393}
1394
1395#[gpui::test]
1396async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1397 init_test(cx);
1398
1399 let fs = FakeFs::new(cx.executor());
1400 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1401
1402 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1403 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1404
1405 language_registry.add(rust_lang());
1406 let mut fake_servers =
1407 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1408
1409 let buffer = project
1410 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1411 .await
1412 .unwrap();
1413
1414 // Before restarting the server, report diagnostics with an unknown buffer version.
1415 let fake_server = fake_servers.next().await.unwrap();
1416 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1417 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1418 version: Some(10000),
1419 diagnostics: Vec::new(),
1420 });
1421 cx.executor().run_until_parked();
1422
1423 project.update(cx, |project, cx| {
1424 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1425 });
1426 let mut fake_server = fake_servers.next().await.unwrap();
1427 let notification = fake_server
1428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1429 .await
1430 .text_document;
1431 assert_eq!(notification.version, 0);
1432}
1433
1434#[gpui::test]
1435async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1436 init_test(cx);
1437
1438 let progress_token = "the-progress-token";
1439
1440 let fs = FakeFs::new(cx.executor());
1441 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1442
1443 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1444
1445 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1446 language_registry.add(rust_lang());
1447 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1448 "Rust",
1449 FakeLspAdapter {
1450 name: "the-language-server",
1451 disk_based_diagnostics_sources: vec!["disk".into()],
1452 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1453 ..Default::default()
1454 },
1455 );
1456
1457 let buffer = project
1458 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1459 .await
1460 .unwrap();
1461
1462 // Simulate diagnostics starting to update.
1463 let mut fake_server = fake_servers.next().await.unwrap();
1464 fake_server
1465 .start_progress_with(
1466 "another-token",
1467 lsp::WorkDoneProgressBegin {
1468 cancellable: Some(false),
1469 ..Default::default()
1470 },
1471 )
1472 .await;
1473 fake_server
1474 .start_progress_with(
1475 progress_token,
1476 lsp::WorkDoneProgressBegin {
1477 cancellable: Some(true),
1478 ..Default::default()
1479 },
1480 )
1481 .await;
1482 cx.executor().run_until_parked();
1483
1484 project.update(cx, |project, cx| {
1485 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1486 });
1487
1488 let cancel_notification = fake_server
1489 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1490 .await;
1491 assert_eq!(
1492 cancel_notification.token,
1493 NumberOrString::String(progress_token.into())
1494 );
1495}
1496
1497#[gpui::test]
1498async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1499 init_test(cx);
1500
1501 let fs = FakeFs::new(cx.executor());
1502 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1503 .await;
1504
1505 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1506 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1507
1508 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1509 "Rust",
1510 FakeLspAdapter {
1511 name: "rust-lsp",
1512 ..Default::default()
1513 },
1514 );
1515 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1516 "JavaScript",
1517 FakeLspAdapter {
1518 name: "js-lsp",
1519 ..Default::default()
1520 },
1521 );
1522 language_registry.add(rust_lang());
1523 language_registry.add(js_lang());
1524
1525 let _rs_buffer = project
1526 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1527 .await
1528 .unwrap();
1529 let _js_buffer = project
1530 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1531 .await
1532 .unwrap();
1533
1534 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1535 assert_eq!(
1536 fake_rust_server_1
1537 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1538 .await
1539 .text_document
1540 .uri
1541 .as_str(),
1542 "file:///dir/a.rs"
1543 );
1544
1545 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1546 assert_eq!(
1547 fake_js_server
1548 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1549 .await
1550 .text_document
1551 .uri
1552 .as_str(),
1553 "file:///dir/b.js"
1554 );
1555
1556 // Disable Rust language server, ensuring only that server gets stopped.
1557 cx.update(|cx| {
1558 SettingsStore::update_global(cx, |settings, cx| {
1559 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1560 settings.languages.insert(
1561 Arc::from("Rust"),
1562 LanguageSettingsContent {
1563 enable_language_server: Some(false),
1564 ..Default::default()
1565 },
1566 );
1567 });
1568 })
1569 });
1570 fake_rust_server_1
1571 .receive_notification::<lsp::notification::Exit>()
1572 .await;
1573
1574 // Enable Rust and disable JavaScript language servers, ensuring that the
1575 // former gets started again and that the latter stops.
1576 cx.update(|cx| {
1577 SettingsStore::update_global(cx, |settings, cx| {
1578 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1579 settings.languages.insert(
1580 Arc::from("Rust"),
1581 LanguageSettingsContent {
1582 enable_language_server: Some(true),
1583 ..Default::default()
1584 },
1585 );
1586 settings.languages.insert(
1587 Arc::from("JavaScript"),
1588 LanguageSettingsContent {
1589 enable_language_server: Some(false),
1590 ..Default::default()
1591 },
1592 );
1593 });
1594 })
1595 });
1596 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1597 assert_eq!(
1598 fake_rust_server_2
1599 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1600 .await
1601 .text_document
1602 .uri
1603 .as_str(),
1604 "file:///dir/a.rs"
1605 );
1606 fake_js_server
1607 .receive_notification::<lsp::notification::Exit>()
1608 .await;
1609}
1610
1611#[gpui::test(iterations = 3)]
1612async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1613 init_test(cx);
1614
1615 let text = "
1616 fn a() { A }
1617 fn b() { BB }
1618 fn c() { CCC }
1619 "
1620 .unindent();
1621
1622 let fs = FakeFs::new(cx.executor());
1623 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1624
1625 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1626 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1627
1628 language_registry.add(rust_lang());
1629 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1630 "Rust",
1631 FakeLspAdapter {
1632 disk_based_diagnostics_sources: vec!["disk".into()],
1633 ..Default::default()
1634 },
1635 );
1636
1637 let buffer = project
1638 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1639 .await
1640 .unwrap();
1641
1642 let mut fake_server = fake_servers.next().await.unwrap();
1643 let open_notification = fake_server
1644 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1645 .await;
1646
1647 // Edit the buffer, moving the content down
1648 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1649 let change_notification_1 = fake_server
1650 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1651 .await;
1652 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1653
1654 // Report some diagnostics for the initial version of the buffer
1655 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1656 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1657 version: Some(open_notification.text_document.version),
1658 diagnostics: vec![
1659 lsp::Diagnostic {
1660 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1661 severity: Some(DiagnosticSeverity::ERROR),
1662 message: "undefined variable 'A'".to_string(),
1663 source: Some("disk".to_string()),
1664 ..Default::default()
1665 },
1666 lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1668 severity: Some(DiagnosticSeverity::ERROR),
1669 message: "undefined variable 'BB'".to_string(),
1670 source: Some("disk".to_string()),
1671 ..Default::default()
1672 },
1673 lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1675 severity: Some(DiagnosticSeverity::ERROR),
1676 source: Some("disk".to_string()),
1677 message: "undefined variable 'CCC'".to_string(),
1678 ..Default::default()
1679 },
1680 ],
1681 });
1682
1683 // The diagnostics have moved down since they were created.
1684 cx.executor().run_until_parked();
1685 buffer.update(cx, |buffer, _| {
1686 assert_eq!(
1687 buffer
1688 .snapshot()
1689 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1690 .collect::<Vec<_>>(),
1691 &[
1692 DiagnosticEntry {
1693 range: Point::new(3, 9)..Point::new(3, 11),
1694 diagnostic: Diagnostic {
1695 source: Some("disk".into()),
1696 severity: DiagnosticSeverity::ERROR,
1697 message: "undefined variable 'BB'".to_string(),
1698 is_disk_based: true,
1699 group_id: 1,
1700 is_primary: true,
1701 ..Default::default()
1702 },
1703 },
1704 DiagnosticEntry {
1705 range: Point::new(4, 9)..Point::new(4, 12),
1706 diagnostic: Diagnostic {
1707 source: Some("disk".into()),
1708 severity: DiagnosticSeverity::ERROR,
1709 message: "undefined variable 'CCC'".to_string(),
1710 is_disk_based: true,
1711 group_id: 2,
1712 is_primary: true,
1713 ..Default::default()
1714 }
1715 }
1716 ]
1717 );
1718 assert_eq!(
1719 chunks_with_diagnostics(buffer, 0..buffer.len()),
1720 [
1721 ("\n\nfn a() { ".to_string(), None),
1722 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1723 (" }\nfn b() { ".to_string(), None),
1724 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1725 (" }\nfn c() { ".to_string(), None),
1726 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1727 (" }\n".to_string(), None),
1728 ]
1729 );
1730 assert_eq!(
1731 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1732 [
1733 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1734 (" }\nfn c() { ".to_string(), None),
1735 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1736 ]
1737 );
1738 });
1739
1740 // Ensure overlapping diagnostics are highlighted correctly.
1741 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1742 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1743 version: Some(open_notification.text_document.version),
1744 diagnostics: vec![
1745 lsp::Diagnostic {
1746 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1747 severity: Some(DiagnosticSeverity::ERROR),
1748 message: "undefined variable 'A'".to_string(),
1749 source: Some("disk".to_string()),
1750 ..Default::default()
1751 },
1752 lsp::Diagnostic {
1753 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1754 severity: Some(DiagnosticSeverity::WARNING),
1755 message: "unreachable statement".to_string(),
1756 source: Some("disk".to_string()),
1757 ..Default::default()
1758 },
1759 ],
1760 });
1761
1762 cx.executor().run_until_parked();
1763 buffer.update(cx, |buffer, _| {
1764 assert_eq!(
1765 buffer
1766 .snapshot()
1767 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1768 .collect::<Vec<_>>(),
1769 &[
1770 DiagnosticEntry {
1771 range: Point::new(2, 9)..Point::new(2, 12),
1772 diagnostic: Diagnostic {
1773 source: Some("disk".into()),
1774 severity: DiagnosticSeverity::WARNING,
1775 message: "unreachable statement".to_string(),
1776 is_disk_based: true,
1777 group_id: 4,
1778 is_primary: true,
1779 ..Default::default()
1780 }
1781 },
1782 DiagnosticEntry {
1783 range: Point::new(2, 9)..Point::new(2, 10),
1784 diagnostic: Diagnostic {
1785 source: Some("disk".into()),
1786 severity: DiagnosticSeverity::ERROR,
1787 message: "undefined variable 'A'".to_string(),
1788 is_disk_based: true,
1789 group_id: 3,
1790 is_primary: true,
1791 ..Default::default()
1792 },
1793 }
1794 ]
1795 );
1796 assert_eq!(
1797 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1798 [
1799 ("fn a() { ".to_string(), None),
1800 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1801 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1802 ("\n".to_string(), None),
1803 ]
1804 );
1805 assert_eq!(
1806 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1807 [
1808 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1809 ("\n".to_string(), None),
1810 ]
1811 );
1812 });
1813
1814 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1815 // changes since the last save.
1816 buffer.update(cx, |buffer, cx| {
1817 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1818 buffer.edit(
1819 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1820 None,
1821 cx,
1822 );
1823 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1824 });
1825 let change_notification_2 = fake_server
1826 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1827 .await;
1828 assert!(
1829 change_notification_2.text_document.version > change_notification_1.text_document.version
1830 );
1831
1832 // Handle out-of-order diagnostics
1833 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1834 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1835 version: Some(change_notification_2.text_document.version),
1836 diagnostics: vec![
1837 lsp::Diagnostic {
1838 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1839 severity: Some(DiagnosticSeverity::ERROR),
1840 message: "undefined variable 'BB'".to_string(),
1841 source: Some("disk".to_string()),
1842 ..Default::default()
1843 },
1844 lsp::Diagnostic {
1845 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1846 severity: Some(DiagnosticSeverity::WARNING),
1847 message: "undefined variable 'A'".to_string(),
1848 source: Some("disk".to_string()),
1849 ..Default::default()
1850 },
1851 ],
1852 });
1853
1854 cx.executor().run_until_parked();
1855 buffer.update(cx, |buffer, _| {
1856 assert_eq!(
1857 buffer
1858 .snapshot()
1859 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1860 .collect::<Vec<_>>(),
1861 &[
1862 DiagnosticEntry {
1863 range: Point::new(2, 21)..Point::new(2, 22),
1864 diagnostic: Diagnostic {
1865 source: Some("disk".into()),
1866 severity: DiagnosticSeverity::WARNING,
1867 message: "undefined variable 'A'".to_string(),
1868 is_disk_based: true,
1869 group_id: 6,
1870 is_primary: true,
1871 ..Default::default()
1872 }
1873 },
1874 DiagnosticEntry {
1875 range: Point::new(3, 9)..Point::new(3, 14),
1876 diagnostic: Diagnostic {
1877 source: Some("disk".into()),
1878 severity: DiagnosticSeverity::ERROR,
1879 message: "undefined variable 'BB'".to_string(),
1880 is_disk_based: true,
1881 group_id: 5,
1882 is_primary: true,
1883 ..Default::default()
1884 },
1885 }
1886 ]
1887 );
1888 });
1889}
1890
1891#[gpui::test]
1892async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1893 init_test(cx);
1894
1895 let text = concat!(
1896 "let one = ;\n", //
1897 "let two = \n",
1898 "let three = 3;\n",
1899 );
1900
1901 let fs = FakeFs::new(cx.executor());
1902 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1903
1904 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1905 let buffer = project
1906 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1907 .await
1908 .unwrap();
1909
1910 project.update(cx, |project, cx| {
1911 project
1912 .update_buffer_diagnostics(
1913 &buffer,
1914 LanguageServerId(0),
1915 None,
1916 vec![
1917 DiagnosticEntry {
1918 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1919 diagnostic: Diagnostic {
1920 severity: DiagnosticSeverity::ERROR,
1921 message: "syntax error 1".to_string(),
1922 ..Default::default()
1923 },
1924 },
1925 DiagnosticEntry {
1926 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1927 diagnostic: Diagnostic {
1928 severity: DiagnosticSeverity::ERROR,
1929 message: "syntax error 2".to_string(),
1930 ..Default::default()
1931 },
1932 },
1933 ],
1934 cx,
1935 )
1936 .unwrap();
1937 });
1938
1939 // An empty range is extended forward to include the following character.
1940 // At the end of a line, an empty range is extended backward to include
1941 // the preceding character.
1942 buffer.update(cx, |buffer, _| {
1943 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1944 assert_eq!(
1945 chunks
1946 .iter()
1947 .map(|(s, d)| (s.as_str(), *d))
1948 .collect::<Vec<_>>(),
1949 &[
1950 ("let one = ", None),
1951 (";", Some(DiagnosticSeverity::ERROR)),
1952 ("\nlet two =", None),
1953 (" ", Some(DiagnosticSeverity::ERROR)),
1954 ("\nlet three = 3;\n", None)
1955 ]
1956 );
1957 });
1958}
1959
1960#[gpui::test]
1961async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1962 init_test(cx);
1963
1964 let fs = FakeFs::new(cx.executor());
1965 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1966 .await;
1967
1968 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1969
1970 project.update(cx, |project, cx| {
1971 project
1972 .update_diagnostic_entries(
1973 LanguageServerId(0),
1974 Path::new("/dir/a.rs").to_owned(),
1975 None,
1976 vec![DiagnosticEntry {
1977 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1978 diagnostic: Diagnostic {
1979 severity: DiagnosticSeverity::ERROR,
1980 is_primary: true,
1981 message: "syntax error a1".to_string(),
1982 ..Default::default()
1983 },
1984 }],
1985 cx,
1986 )
1987 .unwrap();
1988 project
1989 .update_diagnostic_entries(
1990 LanguageServerId(1),
1991 Path::new("/dir/a.rs").to_owned(),
1992 None,
1993 vec![DiagnosticEntry {
1994 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1995 diagnostic: Diagnostic {
1996 severity: DiagnosticSeverity::ERROR,
1997 is_primary: true,
1998 message: "syntax error b1".to_string(),
1999 ..Default::default()
2000 },
2001 }],
2002 cx,
2003 )
2004 .unwrap();
2005
2006 assert_eq!(
2007 project.diagnostic_summary(false, cx),
2008 DiagnosticSummary {
2009 error_count: 2,
2010 warning_count: 0,
2011 }
2012 );
2013 });
2014}
2015
2016#[gpui::test]
2017async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2018 init_test(cx);
2019
2020 let text = "
2021 fn a() {
2022 f1();
2023 }
2024 fn b() {
2025 f2();
2026 }
2027 fn c() {
2028 f3();
2029 }
2030 "
2031 .unindent();
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 "/dir",
2036 json!({
2037 "a.rs": text.clone(),
2038 }),
2039 )
2040 .await;
2041
2042 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2043
2044 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2045 language_registry.add(rust_lang());
2046 let mut fake_servers =
2047 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2048
2049 let buffer = project
2050 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2051 .await
2052 .unwrap();
2053
2054 let mut fake_server = fake_servers.next().await.unwrap();
2055 let lsp_document_version = fake_server
2056 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2057 .await
2058 .text_document
2059 .version;
2060
2061 // Simulate editing the buffer after the language server computes some edits.
2062 buffer.update(cx, |buffer, cx| {
2063 buffer.edit(
2064 [(
2065 Point::new(0, 0)..Point::new(0, 0),
2066 "// above first function\n",
2067 )],
2068 None,
2069 cx,
2070 );
2071 buffer.edit(
2072 [(
2073 Point::new(2, 0)..Point::new(2, 0),
2074 " // inside first function\n",
2075 )],
2076 None,
2077 cx,
2078 );
2079 buffer.edit(
2080 [(
2081 Point::new(6, 4)..Point::new(6, 4),
2082 "// inside second function ",
2083 )],
2084 None,
2085 cx,
2086 );
2087
2088 assert_eq!(
2089 buffer.text(),
2090 "
2091 // above first function
2092 fn a() {
2093 // inside first function
2094 f1();
2095 }
2096 fn b() {
2097 // inside second function f2();
2098 }
2099 fn c() {
2100 f3();
2101 }
2102 "
2103 .unindent()
2104 );
2105 });
2106
2107 let edits = project
2108 .update(cx, |project, cx| {
2109 project.edits_from_lsp(
2110 &buffer,
2111 vec![
2112 // replace body of first function
2113 lsp::TextEdit {
2114 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2115 new_text: "
2116 fn a() {
2117 f10();
2118 }
2119 "
2120 .unindent(),
2121 },
2122 // edit inside second function
2123 lsp::TextEdit {
2124 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2125 new_text: "00".into(),
2126 },
2127 // edit inside third function via two distinct edits
2128 lsp::TextEdit {
2129 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2130 new_text: "4000".into(),
2131 },
2132 lsp::TextEdit {
2133 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2134 new_text: "".into(),
2135 },
2136 ],
2137 LanguageServerId(0),
2138 Some(lsp_document_version),
2139 cx,
2140 )
2141 })
2142 .await
2143 .unwrap();
2144
2145 buffer.update(cx, |buffer, cx| {
2146 for (range, new_text) in edits {
2147 buffer.edit([(range, new_text)], None, cx);
2148 }
2149 assert_eq!(
2150 buffer.text(),
2151 "
2152 // above first function
2153 fn a() {
2154 // inside first function
2155 f10();
2156 }
2157 fn b() {
2158 // inside second function f200();
2159 }
2160 fn c() {
2161 f4000();
2162 }
2163 "
2164 .unindent()
2165 );
2166 });
2167}
2168
2169#[gpui::test]
2170async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2171 init_test(cx);
2172
2173 let text = "
2174 use a::b;
2175 use a::c;
2176
2177 fn f() {
2178 b();
2179 c();
2180 }
2181 "
2182 .unindent();
2183
2184 let fs = FakeFs::new(cx.executor());
2185 fs.insert_tree(
2186 "/dir",
2187 json!({
2188 "a.rs": text.clone(),
2189 }),
2190 )
2191 .await;
2192
2193 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2194 let buffer = project
2195 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2196 .await
2197 .unwrap();
2198
2199 // Simulate the language server sending us a small edit in the form of a very large diff.
2200 // Rust-analyzer does this when performing a merge-imports code action.
2201 let edits = project
2202 .update(cx, |project, cx| {
2203 project.edits_from_lsp(
2204 &buffer,
2205 [
2206 // Replace the first use statement without editing the semicolon.
2207 lsp::TextEdit {
2208 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2209 new_text: "a::{b, c}".into(),
2210 },
2211 // Reinsert the remainder of the file between the semicolon and the final
2212 // newline of the file.
2213 lsp::TextEdit {
2214 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2215 new_text: "\n\n".into(),
2216 },
2217 lsp::TextEdit {
2218 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2219 new_text: "
2220 fn f() {
2221 b();
2222 c();
2223 }"
2224 .unindent(),
2225 },
2226 // Delete everything after the first newline of the file.
2227 lsp::TextEdit {
2228 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2229 new_text: "".into(),
2230 },
2231 ],
2232 LanguageServerId(0),
2233 None,
2234 cx,
2235 )
2236 })
2237 .await
2238 .unwrap();
2239
2240 buffer.update(cx, |buffer, cx| {
2241 let edits = edits
2242 .into_iter()
2243 .map(|(range, text)| {
2244 (
2245 range.start.to_point(buffer)..range.end.to_point(buffer),
2246 text,
2247 )
2248 })
2249 .collect::<Vec<_>>();
2250
2251 assert_eq!(
2252 edits,
2253 [
2254 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2255 (Point::new(1, 0)..Point::new(2, 0), "".into())
2256 ]
2257 );
2258
2259 for (range, new_text) in edits {
2260 buffer.edit([(range, new_text)], None, cx);
2261 }
2262 assert_eq!(
2263 buffer.text(),
2264 "
2265 use a::{b, c};
2266
2267 fn f() {
2268 b();
2269 c();
2270 }
2271 "
2272 .unindent()
2273 );
2274 });
2275}
2276
2277#[gpui::test]
2278async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let text = "
2282 use a::b;
2283 use a::c;
2284
2285 fn f() {
2286 b();
2287 c();
2288 }
2289 "
2290 .unindent();
2291
2292 let fs = FakeFs::new(cx.executor());
2293 fs.insert_tree(
2294 "/dir",
2295 json!({
2296 "a.rs": text.clone(),
2297 }),
2298 )
2299 .await;
2300
2301 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2302 let buffer = project
2303 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2304 .await
2305 .unwrap();
2306
2307 // Simulate the language server sending us edits in a non-ordered fashion,
2308 // with ranges sometimes being inverted or pointing to invalid locations.
2309 let edits = project
2310 .update(cx, |project, cx| {
2311 project.edits_from_lsp(
2312 &buffer,
2313 [
2314 lsp::TextEdit {
2315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2316 new_text: "\n\n".into(),
2317 },
2318 lsp::TextEdit {
2319 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2320 new_text: "a::{b, c}".into(),
2321 },
2322 lsp::TextEdit {
2323 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2324 new_text: "".into(),
2325 },
2326 lsp::TextEdit {
2327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2328 new_text: "
2329 fn f() {
2330 b();
2331 c();
2332 }"
2333 .unindent(),
2334 },
2335 ],
2336 LanguageServerId(0),
2337 None,
2338 cx,
2339 )
2340 })
2341 .await
2342 .unwrap();
2343
2344 buffer.update(cx, |buffer, cx| {
2345 let edits = edits
2346 .into_iter()
2347 .map(|(range, text)| {
2348 (
2349 range.start.to_point(buffer)..range.end.to_point(buffer),
2350 text,
2351 )
2352 })
2353 .collect::<Vec<_>>();
2354
2355 assert_eq!(
2356 edits,
2357 [
2358 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2359 (Point::new(1, 0)..Point::new(2, 0), "".into())
2360 ]
2361 );
2362
2363 for (range, new_text) in edits {
2364 buffer.edit([(range, new_text)], None, cx);
2365 }
2366 assert_eq!(
2367 buffer.text(),
2368 "
2369 use a::{b, c};
2370
2371 fn f() {
2372 b();
2373 c();
2374 }
2375 "
2376 .unindent()
2377 );
2378 });
2379}
2380
2381fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2382 buffer: &Buffer,
2383 range: Range<T>,
2384) -> Vec<(String, Option<DiagnosticSeverity>)> {
2385 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2386 for chunk in buffer.snapshot().chunks(range, true) {
2387 if chunks.last().map_or(false, |prev_chunk| {
2388 prev_chunk.1 == chunk.diagnostic_severity
2389 }) {
2390 chunks.last_mut().unwrap().0.push_str(chunk.text);
2391 } else {
2392 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2393 }
2394 }
2395 chunks
2396}
2397
2398#[gpui::test(iterations = 10)]
2399async fn test_definition(cx: &mut gpui::TestAppContext) {
2400 init_test(cx);
2401
2402 let fs = FakeFs::new(cx.executor());
2403 fs.insert_tree(
2404 "/dir",
2405 json!({
2406 "a.rs": "const fn a() { A }",
2407 "b.rs": "const y: i32 = crate::a()",
2408 }),
2409 )
2410 .await;
2411
2412 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2413
2414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2415 language_registry.add(rust_lang());
2416 let mut fake_servers =
2417 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2418
2419 let buffer = project
2420 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2421 .await
2422 .unwrap();
2423
2424 let fake_server = fake_servers.next().await.unwrap();
2425 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2426 let params = params.text_document_position_params;
2427 assert_eq!(
2428 params.text_document.uri.to_file_path().unwrap(),
2429 Path::new("/dir/b.rs"),
2430 );
2431 assert_eq!(params.position, lsp::Position::new(0, 22));
2432
2433 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2434 lsp::Location::new(
2435 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2436 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2437 ),
2438 )))
2439 });
2440
2441 let mut definitions = project
2442 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2443 .await
2444 .unwrap();
2445
2446 // Assert no new language server started
2447 cx.executor().run_until_parked();
2448 assert!(fake_servers.try_next().is_err());
2449
2450 assert_eq!(definitions.len(), 1);
2451 let definition = definitions.pop().unwrap();
2452 cx.update(|cx| {
2453 let target_buffer = definition.target.buffer.read(cx);
2454 assert_eq!(
2455 target_buffer
2456 .file()
2457 .unwrap()
2458 .as_local()
2459 .unwrap()
2460 .abs_path(cx),
2461 Path::new("/dir/a.rs"),
2462 );
2463 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2464 assert_eq!(
2465 list_worktrees(&project, cx),
2466 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2467 );
2468
2469 drop(definition);
2470 });
2471 cx.update(|cx| {
2472 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2473 });
2474
2475 fn list_worktrees<'a>(
2476 project: &'a Model<Project>,
2477 cx: &'a AppContext,
2478 ) -> Vec<(&'a Path, bool)> {
2479 project
2480 .read(cx)
2481 .worktrees(cx)
2482 .map(|worktree| {
2483 let worktree = worktree.read(cx);
2484 (
2485 worktree.as_local().unwrap().abs_path().as_ref(),
2486 worktree.is_visible(),
2487 )
2488 })
2489 .collect::<Vec<_>>()
2490 }
2491}
2492
2493#[gpui::test]
2494async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2495 init_test(cx);
2496
2497 let fs = FakeFs::new(cx.executor());
2498 fs.insert_tree(
2499 "/dir",
2500 json!({
2501 "a.ts": "",
2502 }),
2503 )
2504 .await;
2505
2506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2507
2508 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2509 language_registry.add(typescript_lang());
2510 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2511 "TypeScript",
2512 FakeLspAdapter {
2513 capabilities: lsp::ServerCapabilities {
2514 completion_provider: Some(lsp::CompletionOptions {
2515 trigger_characters: Some(vec![":".to_string()]),
2516 ..Default::default()
2517 }),
2518 ..Default::default()
2519 },
2520 ..Default::default()
2521 },
2522 );
2523
2524 let buffer = project
2525 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2526 .await
2527 .unwrap();
2528
2529 let fake_server = fake_language_servers.next().await.unwrap();
2530
2531 let text = "let a = b.fqn";
2532 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2533 let completions = project.update(cx, |project, cx| {
2534 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2535 });
2536
2537 fake_server
2538 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2539 Ok(Some(lsp::CompletionResponse::Array(vec![
2540 lsp::CompletionItem {
2541 label: "fullyQualifiedName?".into(),
2542 insert_text: Some("fullyQualifiedName".into()),
2543 ..Default::default()
2544 },
2545 ])))
2546 })
2547 .next()
2548 .await;
2549 let completions = completions.await.unwrap();
2550 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2551 assert_eq!(completions.len(), 1);
2552 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2553 assert_eq!(
2554 completions[0].old_range.to_offset(&snapshot),
2555 text.len() - 3..text.len()
2556 );
2557
2558 let text = "let a = \"atoms/cmp\"";
2559 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2560 let completions = project.update(cx, |project, cx| {
2561 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2562 });
2563
2564 fake_server
2565 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2566 Ok(Some(lsp::CompletionResponse::Array(vec![
2567 lsp::CompletionItem {
2568 label: "component".into(),
2569 ..Default::default()
2570 },
2571 ])))
2572 })
2573 .next()
2574 .await;
2575 let completions = completions.await.unwrap();
2576 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2577 assert_eq!(completions.len(), 1);
2578 assert_eq!(completions[0].new_text, "component");
2579 assert_eq!(
2580 completions[0].old_range.to_offset(&snapshot),
2581 text.len() - 4..text.len() - 1
2582 );
2583}
2584
2585#[gpui::test]
2586async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2587 init_test(cx);
2588
2589 let fs = FakeFs::new(cx.executor());
2590 fs.insert_tree(
2591 "/dir",
2592 json!({
2593 "a.ts": "",
2594 }),
2595 )
2596 .await;
2597
2598 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2599
2600 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2601 language_registry.add(typescript_lang());
2602 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2603 "TypeScript",
2604 FakeLspAdapter {
2605 capabilities: lsp::ServerCapabilities {
2606 completion_provider: Some(lsp::CompletionOptions {
2607 trigger_characters: Some(vec![":".to_string()]),
2608 ..Default::default()
2609 }),
2610 ..Default::default()
2611 },
2612 ..Default::default()
2613 },
2614 );
2615
2616 let buffer = project
2617 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2618 .await
2619 .unwrap();
2620
2621 let fake_server = fake_language_servers.next().await.unwrap();
2622
2623 let text = "let a = b.fqn";
2624 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2625 let completions = project.update(cx, |project, cx| {
2626 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2627 });
2628
2629 fake_server
2630 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2631 Ok(Some(lsp::CompletionResponse::Array(vec![
2632 lsp::CompletionItem {
2633 label: "fullyQualifiedName?".into(),
2634 insert_text: Some("fully\rQualified\r\nName".into()),
2635 ..Default::default()
2636 },
2637 ])))
2638 })
2639 .next()
2640 .await;
2641 let completions = completions.await.unwrap();
2642 assert_eq!(completions.len(), 1);
2643 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2644}
2645
2646#[gpui::test(iterations = 10)]
2647async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2648 init_test(cx);
2649
2650 let fs = FakeFs::new(cx.executor());
2651 fs.insert_tree(
2652 "/dir",
2653 json!({
2654 "a.ts": "a",
2655 }),
2656 )
2657 .await;
2658
2659 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2660
2661 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2662 language_registry.add(typescript_lang());
2663 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2664 "TypeScript",
2665 FakeLspAdapter {
2666 capabilities: lsp::ServerCapabilities {
2667 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2668 lsp::CodeActionOptions {
2669 resolve_provider: Some(true),
2670 ..lsp::CodeActionOptions::default()
2671 },
2672 )),
2673 ..lsp::ServerCapabilities::default()
2674 },
2675 ..FakeLspAdapter::default()
2676 },
2677 );
2678
2679 let buffer = project
2680 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2681 .await
2682 .unwrap();
2683
2684 let fake_server = fake_language_servers.next().await.unwrap();
2685
2686 // Language server returns code actions that contain commands, and not edits.
2687 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2688 fake_server
2689 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2690 Ok(Some(vec![
2691 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2692 title: "The code action".into(),
2693 data: Some(serde_json::json!({
2694 "command": "_the/command",
2695 })),
2696 ..lsp::CodeAction::default()
2697 }),
2698 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2699 title: "two".into(),
2700 ..lsp::CodeAction::default()
2701 }),
2702 ]))
2703 })
2704 .next()
2705 .await;
2706
2707 let action = actions.await[0].clone();
2708 let apply = project.update(cx, |project, cx| {
2709 project.apply_code_action(buffer.clone(), action, true, cx)
2710 });
2711
2712 // Resolving the code action does not populate its edits. In absence of
2713 // edits, we must execute the given command.
2714 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2715 |mut action, _| async move {
2716 if action.data.is_some() {
2717 action.command = Some(lsp::Command {
2718 title: "The command".into(),
2719 command: "_the/command".into(),
2720 arguments: Some(vec![json!("the-argument")]),
2721 });
2722 }
2723 Ok(action)
2724 },
2725 );
2726
2727 // While executing the command, the language server sends the editor
2728 // a `workspaceEdit` request.
2729 fake_server
2730 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2731 let fake = fake_server.clone();
2732 move |params, _| {
2733 assert_eq!(params.command, "_the/command");
2734 let fake = fake.clone();
2735 async move {
2736 fake.server
2737 .request::<lsp::request::ApplyWorkspaceEdit>(
2738 lsp::ApplyWorkspaceEditParams {
2739 label: None,
2740 edit: lsp::WorkspaceEdit {
2741 changes: Some(
2742 [(
2743 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2744 vec![lsp::TextEdit {
2745 range: lsp::Range::new(
2746 lsp::Position::new(0, 0),
2747 lsp::Position::new(0, 0),
2748 ),
2749 new_text: "X".into(),
2750 }],
2751 )]
2752 .into_iter()
2753 .collect(),
2754 ),
2755 ..Default::default()
2756 },
2757 },
2758 )
2759 .await
2760 .unwrap();
2761 Ok(Some(json!(null)))
2762 }
2763 }
2764 })
2765 .next()
2766 .await;
2767
2768 // Applying the code action returns a project transaction containing the edits
2769 // sent by the language server in its `workspaceEdit` request.
2770 let transaction = apply.await.unwrap();
2771 assert!(transaction.0.contains_key(&buffer));
2772 buffer.update(cx, |buffer, cx| {
2773 assert_eq!(buffer.text(), "Xa");
2774 buffer.undo(cx);
2775 assert_eq!(buffer.text(), "a");
2776 });
2777}
2778
2779#[gpui::test(iterations = 10)]
2780async fn test_save_file(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree(
2785 "/dir",
2786 json!({
2787 "file1": "the old contents",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2793 let buffer = project
2794 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2795 .await
2796 .unwrap();
2797 buffer.update(cx, |buffer, cx| {
2798 assert_eq!(buffer.text(), "the old contents");
2799 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2800 });
2801
2802 project
2803 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2804 .await
2805 .unwrap();
2806
2807 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2808 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2809}
2810
2811#[gpui::test(iterations = 30)]
2812async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2813 init_test(cx);
2814
2815 let fs = FakeFs::new(cx.executor().clone());
2816 fs.insert_tree(
2817 "/dir",
2818 json!({
2819 "file1": "the original contents",
2820 }),
2821 )
2822 .await;
2823
2824 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2825 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2826 let buffer = project
2827 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2828 .await
2829 .unwrap();
2830
2831 // Simulate buffer diffs being slow, so that they don't complete before
2832 // the next file change occurs.
2833 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2834
2835 // Change the buffer's file on disk, and then wait for the file change
2836 // to be detected by the worktree, so that the buffer starts reloading.
2837 fs.save(
2838 "/dir/file1".as_ref(),
2839 &"the first contents".into(),
2840 Default::default(),
2841 )
2842 .await
2843 .unwrap();
2844 worktree.next_event(cx).await;
2845
2846 // Change the buffer's file again. Depending on the random seed, the
2847 // previous file change may still be in progress.
2848 fs.save(
2849 "/dir/file1".as_ref(),
2850 &"the second contents".into(),
2851 Default::default(),
2852 )
2853 .await
2854 .unwrap();
2855 worktree.next_event(cx).await;
2856
2857 cx.executor().run_until_parked();
2858 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2859 buffer.read_with(cx, |buffer, _| {
2860 assert_eq!(buffer.text(), on_disk_text);
2861 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2862 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2863 });
2864}
2865
2866#[gpui::test(iterations = 30)]
2867async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2868 init_test(cx);
2869
2870 let fs = FakeFs::new(cx.executor().clone());
2871 fs.insert_tree(
2872 "/dir",
2873 json!({
2874 "file1": "the original contents",
2875 }),
2876 )
2877 .await;
2878
2879 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2880 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2881 let buffer = project
2882 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2883 .await
2884 .unwrap();
2885
2886 // Simulate buffer diffs being slow, so that they don't complete before
2887 // the next file change occurs.
2888 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2889
2890 // Change the buffer's file on disk, and then wait for the file change
2891 // to be detected by the worktree, so that the buffer starts reloading.
2892 fs.save(
2893 "/dir/file1".as_ref(),
2894 &"the first contents".into(),
2895 Default::default(),
2896 )
2897 .await
2898 .unwrap();
2899 worktree.next_event(cx).await;
2900
2901 cx.executor()
2902 .spawn(cx.executor().simulate_random_delay())
2903 .await;
2904
2905 // Perform a noop edit, causing the buffer's version to increase.
2906 buffer.update(cx, |buffer, cx| {
2907 buffer.edit([(0..0, " ")], None, cx);
2908 buffer.undo(cx);
2909 });
2910
2911 cx.executor().run_until_parked();
2912 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2913 buffer.read_with(cx, |buffer, _| {
2914 let buffer_text = buffer.text();
2915 if buffer_text == on_disk_text {
2916 assert!(
2917 !buffer.is_dirty() && !buffer.has_conflict(),
2918 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2919 );
2920 }
2921 // If the file change occurred while the buffer was processing the first
2922 // change, the buffer will be in a conflicting state.
2923 else {
2924 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2925 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2926 }
2927 });
2928}
2929
2930#[gpui::test]
2931async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2932 init_test(cx);
2933
2934 let fs = FakeFs::new(cx.executor());
2935 fs.insert_tree(
2936 "/dir",
2937 json!({
2938 "file1": "the old contents",
2939 }),
2940 )
2941 .await;
2942
2943 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2944 let buffer = project
2945 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2946 .await
2947 .unwrap();
2948 buffer.update(cx, |buffer, cx| {
2949 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2950 });
2951
2952 project
2953 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2954 .await
2955 .unwrap();
2956
2957 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2958 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2959}
2960
2961#[gpui::test]
2962async fn test_save_as(cx: &mut gpui::TestAppContext) {
2963 init_test(cx);
2964
2965 let fs = FakeFs::new(cx.executor());
2966 fs.insert_tree("/dir", json!({})).await;
2967
2968 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2969
2970 let languages = project.update(cx, |project, _| project.languages().clone());
2971 languages.add(rust_lang());
2972
2973 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2974 buffer.update(cx, |buffer, cx| {
2975 buffer.edit([(0..0, "abc")], None, cx);
2976 assert!(buffer.is_dirty());
2977 assert!(!buffer.has_conflict());
2978 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2979 });
2980 project
2981 .update(cx, |project, cx| {
2982 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
2983 let path = ProjectPath {
2984 worktree_id,
2985 path: Arc::from(Path::new("file1.rs")),
2986 };
2987 project.save_buffer_as(buffer.clone(), path, cx)
2988 })
2989 .await
2990 .unwrap();
2991 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2992
2993 cx.executor().run_until_parked();
2994 buffer.update(cx, |buffer, cx| {
2995 assert_eq!(
2996 buffer.file().unwrap().full_path(cx),
2997 Path::new("dir/file1.rs")
2998 );
2999 assert!(!buffer.is_dirty());
3000 assert!(!buffer.has_conflict());
3001 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
3002 });
3003
3004 let opened_buffer = project
3005 .update(cx, |project, cx| {
3006 project.open_local_buffer("/dir/file1.rs", cx)
3007 })
3008 .await
3009 .unwrap();
3010 assert_eq!(opened_buffer, buffer);
3011}
3012
3013#[gpui::test(retries = 5)]
3014async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3015 use worktree::WorktreeModelHandle as _;
3016
3017 init_test(cx);
3018 cx.executor().allow_parking();
3019
3020 let dir = temp_tree(json!({
3021 "a": {
3022 "file1": "",
3023 "file2": "",
3024 "file3": "",
3025 },
3026 "b": {
3027 "c": {
3028 "file4": "",
3029 "file5": "",
3030 }
3031 }
3032 }));
3033
3034 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3035
3036 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3037 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3038 async move { buffer.await.unwrap() }
3039 };
3040 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3041 project.update(cx, |project, cx| {
3042 let tree = project.worktrees(cx).next().unwrap();
3043 tree.read(cx)
3044 .entry_for_path(path)
3045 .unwrap_or_else(|| panic!("no entry for path {}", path))
3046 .id
3047 })
3048 };
3049
3050 let buffer2 = buffer_for_path("a/file2", cx).await;
3051 let buffer3 = buffer_for_path("a/file3", cx).await;
3052 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3053 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3054
3055 let file2_id = id_for_path("a/file2", cx);
3056 let file3_id = id_for_path("a/file3", cx);
3057 let file4_id = id_for_path("b/c/file4", cx);
3058
3059 // Create a remote copy of this worktree.
3060 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3061 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3062
3063 let updates = Arc::new(Mutex::new(Vec::new()));
3064 tree.update(cx, |tree, cx| {
3065 let updates = updates.clone();
3066 tree.observe_updates(0, cx, move |update| {
3067 updates.lock().push(update);
3068 async { true }
3069 });
3070 });
3071
3072 let remote =
3073 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3074
3075 cx.executor().run_until_parked();
3076
3077 cx.update(|cx| {
3078 assert!(!buffer2.read(cx).is_dirty());
3079 assert!(!buffer3.read(cx).is_dirty());
3080 assert!(!buffer4.read(cx).is_dirty());
3081 assert!(!buffer5.read(cx).is_dirty());
3082 });
3083
3084 // Rename and delete files and directories.
3085 tree.flush_fs_events(cx).await;
3086 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3087 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3088 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3089 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3090 tree.flush_fs_events(cx).await;
3091
3092 let expected_paths = vec![
3093 "a",
3094 "a/file1",
3095 "a/file2.new",
3096 "b",
3097 "d",
3098 "d/file3",
3099 "d/file4",
3100 ];
3101
3102 cx.update(|app| {
3103 assert_eq!(
3104 tree.read(app)
3105 .paths()
3106 .map(|p| p.to_str().unwrap())
3107 .collect::<Vec<_>>(),
3108 expected_paths
3109 );
3110 });
3111
3112 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3113 assert_eq!(id_for_path("d/file3", cx), file3_id);
3114 assert_eq!(id_for_path("d/file4", cx), file4_id);
3115
3116 cx.update(|cx| {
3117 assert_eq!(
3118 buffer2.read(cx).file().unwrap().path().as_ref(),
3119 Path::new("a/file2.new")
3120 );
3121 assert_eq!(
3122 buffer3.read(cx).file().unwrap().path().as_ref(),
3123 Path::new("d/file3")
3124 );
3125 assert_eq!(
3126 buffer4.read(cx).file().unwrap().path().as_ref(),
3127 Path::new("d/file4")
3128 );
3129 assert_eq!(
3130 buffer5.read(cx).file().unwrap().path().as_ref(),
3131 Path::new("b/c/file5")
3132 );
3133
3134 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3135 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3136 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3137 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3138 });
3139
3140 // Update the remote worktree. Check that it becomes consistent with the
3141 // local worktree.
3142 cx.executor().run_until_parked();
3143
3144 remote.update(cx, |remote, _| {
3145 for update in updates.lock().drain(..) {
3146 remote.as_remote_mut().unwrap().update_from_remote(update);
3147 }
3148 });
3149 cx.executor().run_until_parked();
3150 remote.update(cx, |remote, _| {
3151 assert_eq!(
3152 remote
3153 .paths()
3154 .map(|p| p.to_str().unwrap())
3155 .collect::<Vec<_>>(),
3156 expected_paths
3157 );
3158 });
3159}
3160
3161#[gpui::test(iterations = 10)]
3162async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3163 init_test(cx);
3164
3165 let fs = FakeFs::new(cx.executor());
3166 fs.insert_tree(
3167 "/dir",
3168 json!({
3169 "a": {
3170 "file1": "",
3171 }
3172 }),
3173 )
3174 .await;
3175
3176 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3177 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3178 let tree_id = tree.update(cx, |tree, _| tree.id());
3179
3180 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3181 project.update(cx, |project, cx| {
3182 let tree = project.worktrees(cx).next().unwrap();
3183 tree.read(cx)
3184 .entry_for_path(path)
3185 .unwrap_or_else(|| panic!("no entry for path {}", path))
3186 .id
3187 })
3188 };
3189
3190 let dir_id = id_for_path("a", cx);
3191 let file_id = id_for_path("a/file1", cx);
3192 let buffer = project
3193 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3194 .await
3195 .unwrap();
3196 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3197
3198 project
3199 .update(cx, |project, cx| {
3200 project.rename_entry(dir_id, Path::new("b"), cx)
3201 })
3202 .unwrap()
3203 .await
3204 .to_included()
3205 .unwrap();
3206 cx.executor().run_until_parked();
3207
3208 assert_eq!(id_for_path("b", cx), dir_id);
3209 assert_eq!(id_for_path("b/file1", cx), file_id);
3210 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3211}
3212
3213#[gpui::test]
3214async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3215 init_test(cx);
3216
3217 let fs = FakeFs::new(cx.executor());
3218 fs.insert_tree(
3219 "/dir",
3220 json!({
3221 "a.txt": "a-contents",
3222 "b.txt": "b-contents",
3223 }),
3224 )
3225 .await;
3226
3227 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3228
3229 // Spawn multiple tasks to open paths, repeating some paths.
3230 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3231 (
3232 p.open_local_buffer("/dir/a.txt", cx),
3233 p.open_local_buffer("/dir/b.txt", cx),
3234 p.open_local_buffer("/dir/a.txt", cx),
3235 )
3236 });
3237
3238 let buffer_a_1 = buffer_a_1.await.unwrap();
3239 let buffer_a_2 = buffer_a_2.await.unwrap();
3240 let buffer_b = buffer_b.await.unwrap();
3241 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3242 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3243
3244 // There is only one buffer per path.
3245 let buffer_a_id = buffer_a_1.entity_id();
3246 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3247
3248 // Open the same path again while it is still open.
3249 drop(buffer_a_1);
3250 let buffer_a_3 = project
3251 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3252 .await
3253 .unwrap();
3254
3255 // There's still only one buffer per path.
3256 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3257}
3258
3259#[gpui::test]
3260async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3261 init_test(cx);
3262
3263 let fs = FakeFs::new(cx.executor());
3264 fs.insert_tree(
3265 "/dir",
3266 json!({
3267 "file1": "abc",
3268 "file2": "def",
3269 "file3": "ghi",
3270 }),
3271 )
3272 .await;
3273
3274 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3275
3276 let buffer1 = project
3277 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3278 .await
3279 .unwrap();
3280 let events = Arc::new(Mutex::new(Vec::new()));
3281
3282 // initially, the buffer isn't dirty.
3283 buffer1.update(cx, |buffer, cx| {
3284 cx.subscribe(&buffer1, {
3285 let events = events.clone();
3286 move |_, _, event, _| match event {
3287 BufferEvent::Operation(_) => {}
3288 _ => events.lock().push(event.clone()),
3289 }
3290 })
3291 .detach();
3292
3293 assert!(!buffer.is_dirty());
3294 assert!(events.lock().is_empty());
3295
3296 buffer.edit([(1..2, "")], None, cx);
3297 });
3298
3299 // after the first edit, the buffer is dirty, and emits a dirtied event.
3300 buffer1.update(cx, |buffer, cx| {
3301 assert!(buffer.text() == "ac");
3302 assert!(buffer.is_dirty());
3303 assert_eq!(
3304 *events.lock(),
3305 &[language::Event::Edited, language::Event::DirtyChanged]
3306 );
3307 events.lock().clear();
3308 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3309 });
3310
3311 // after saving, the buffer is not dirty, and emits a saved event.
3312 buffer1.update(cx, |buffer, cx| {
3313 assert!(!buffer.is_dirty());
3314 assert_eq!(*events.lock(), &[language::Event::Saved]);
3315 events.lock().clear();
3316
3317 buffer.edit([(1..1, "B")], None, cx);
3318 buffer.edit([(2..2, "D")], None, cx);
3319 });
3320
3321 // after editing again, the buffer is dirty, and emits another dirty event.
3322 buffer1.update(cx, |buffer, cx| {
3323 assert!(buffer.text() == "aBDc");
3324 assert!(buffer.is_dirty());
3325 assert_eq!(
3326 *events.lock(),
3327 &[
3328 language::Event::Edited,
3329 language::Event::DirtyChanged,
3330 language::Event::Edited,
3331 ],
3332 );
3333 events.lock().clear();
3334
3335 // After restoring the buffer to its previously-saved state,
3336 // the buffer is not considered dirty anymore.
3337 buffer.edit([(1..3, "")], None, cx);
3338 assert!(buffer.text() == "ac");
3339 assert!(!buffer.is_dirty());
3340 });
3341
3342 assert_eq!(
3343 *events.lock(),
3344 &[language::Event::Edited, language::Event::DirtyChanged]
3345 );
3346
3347 // When a file is deleted, the buffer is considered dirty.
3348 let events = Arc::new(Mutex::new(Vec::new()));
3349 let buffer2 = project
3350 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3351 .await
3352 .unwrap();
3353 buffer2.update(cx, |_, cx| {
3354 cx.subscribe(&buffer2, {
3355 let events = events.clone();
3356 move |_, _, event, _| events.lock().push(event.clone())
3357 })
3358 .detach();
3359 });
3360
3361 fs.remove_file("/dir/file2".as_ref(), Default::default())
3362 .await
3363 .unwrap();
3364 cx.executor().run_until_parked();
3365 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3366 assert_eq!(
3367 *events.lock(),
3368 &[
3369 language::Event::DirtyChanged,
3370 language::Event::FileHandleChanged
3371 ]
3372 );
3373
3374 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3375 let events = Arc::new(Mutex::new(Vec::new()));
3376 let buffer3 = project
3377 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3378 .await
3379 .unwrap();
3380 buffer3.update(cx, |_, cx| {
3381 cx.subscribe(&buffer3, {
3382 let events = events.clone();
3383 move |_, _, event, _| events.lock().push(event.clone())
3384 })
3385 .detach();
3386 });
3387
3388 buffer3.update(cx, |buffer, cx| {
3389 buffer.edit([(0..0, "x")], None, cx);
3390 });
3391 events.lock().clear();
3392 fs.remove_file("/dir/file3".as_ref(), Default::default())
3393 .await
3394 .unwrap();
3395 cx.executor().run_until_parked();
3396 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3397 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3398}
3399
3400#[gpui::test]
3401async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3402 init_test(cx);
3403
3404 let initial_contents = "aaa\nbbbbb\nc\n";
3405 let fs = FakeFs::new(cx.executor());
3406 fs.insert_tree(
3407 "/dir",
3408 json!({
3409 "the-file": initial_contents,
3410 }),
3411 )
3412 .await;
3413 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3414 let buffer = project
3415 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3416 .await
3417 .unwrap();
3418
3419 let anchors = (0..3)
3420 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3421 .collect::<Vec<_>>();
3422
3423 // Change the file on disk, adding two new lines of text, and removing
3424 // one line.
3425 buffer.update(cx, |buffer, _| {
3426 assert!(!buffer.is_dirty());
3427 assert!(!buffer.has_conflict());
3428 });
3429 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3430 fs.save(
3431 "/dir/the-file".as_ref(),
3432 &new_contents.into(),
3433 LineEnding::Unix,
3434 )
3435 .await
3436 .unwrap();
3437
3438 // Because the buffer was not modified, it is reloaded from disk. Its
3439 // contents are edited according to the diff between the old and new
3440 // file contents.
3441 cx.executor().run_until_parked();
3442 buffer.update(cx, |buffer, _| {
3443 assert_eq!(buffer.text(), new_contents);
3444 assert!(!buffer.is_dirty());
3445 assert!(!buffer.has_conflict());
3446
3447 let anchor_positions = anchors
3448 .iter()
3449 .map(|anchor| anchor.to_point(&*buffer))
3450 .collect::<Vec<_>>();
3451 assert_eq!(
3452 anchor_positions,
3453 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3454 );
3455 });
3456
3457 // Modify the buffer
3458 buffer.update(cx, |buffer, cx| {
3459 buffer.edit([(0..0, " ")], None, cx);
3460 assert!(buffer.is_dirty());
3461 assert!(!buffer.has_conflict());
3462 });
3463
3464 // Change the file on disk again, adding blank lines to the beginning.
3465 fs.save(
3466 "/dir/the-file".as_ref(),
3467 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3468 LineEnding::Unix,
3469 )
3470 .await
3471 .unwrap();
3472
3473 // Because the buffer is modified, it doesn't reload from disk, but is
3474 // marked as having a conflict.
3475 cx.executor().run_until_parked();
3476 buffer.update(cx, |buffer, _| {
3477 assert!(buffer.has_conflict());
3478 });
3479}
3480
3481#[gpui::test]
3482async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3483 init_test(cx);
3484
3485 let fs = FakeFs::new(cx.executor());
3486 fs.insert_tree(
3487 "/dir",
3488 json!({
3489 "file1": "a\nb\nc\n",
3490 "file2": "one\r\ntwo\r\nthree\r\n",
3491 }),
3492 )
3493 .await;
3494
3495 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3496 let buffer1 = project
3497 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3498 .await
3499 .unwrap();
3500 let buffer2 = project
3501 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3502 .await
3503 .unwrap();
3504
3505 buffer1.update(cx, |buffer, _| {
3506 assert_eq!(buffer.text(), "a\nb\nc\n");
3507 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3508 });
3509 buffer2.update(cx, |buffer, _| {
3510 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3511 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3512 });
3513
3514 // Change a file's line endings on disk from unix to windows. The buffer's
3515 // state updates correctly.
3516 fs.save(
3517 "/dir/file1".as_ref(),
3518 &"aaa\nb\nc\n".into(),
3519 LineEnding::Windows,
3520 )
3521 .await
3522 .unwrap();
3523 cx.executor().run_until_parked();
3524 buffer1.update(cx, |buffer, _| {
3525 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3526 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3527 });
3528
3529 // Save a file with windows line endings. The file is written correctly.
3530 buffer2.update(cx, |buffer, cx| {
3531 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3532 });
3533 project
3534 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3535 .await
3536 .unwrap();
3537 assert_eq!(
3538 fs.load("/dir/file2".as_ref()).await.unwrap(),
3539 "one\r\ntwo\r\nthree\r\nfour\r\n",
3540 );
3541}
3542
3543#[gpui::test]
3544async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3545 init_test(cx);
3546
3547 let fs = FakeFs::new(cx.executor());
3548 fs.insert_tree(
3549 "/the-dir",
3550 json!({
3551 "a.rs": "
3552 fn foo(mut v: Vec<usize>) {
3553 for x in &v {
3554 v.push(1);
3555 }
3556 }
3557 "
3558 .unindent(),
3559 }),
3560 )
3561 .await;
3562
3563 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3564 let buffer = project
3565 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3566 .await
3567 .unwrap();
3568
3569 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3570 let message = lsp::PublishDiagnosticsParams {
3571 uri: buffer_uri.clone(),
3572 diagnostics: vec![
3573 lsp::Diagnostic {
3574 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3575 severity: Some(DiagnosticSeverity::WARNING),
3576 message: "error 1".to_string(),
3577 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3578 location: lsp::Location {
3579 uri: buffer_uri.clone(),
3580 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3581 },
3582 message: "error 1 hint 1".to_string(),
3583 }]),
3584 ..Default::default()
3585 },
3586 lsp::Diagnostic {
3587 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3588 severity: Some(DiagnosticSeverity::HINT),
3589 message: "error 1 hint 1".to_string(),
3590 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3591 location: lsp::Location {
3592 uri: buffer_uri.clone(),
3593 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3594 },
3595 message: "original diagnostic".to_string(),
3596 }]),
3597 ..Default::default()
3598 },
3599 lsp::Diagnostic {
3600 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3601 severity: Some(DiagnosticSeverity::ERROR),
3602 message: "error 2".to_string(),
3603 related_information: Some(vec![
3604 lsp::DiagnosticRelatedInformation {
3605 location: lsp::Location {
3606 uri: buffer_uri.clone(),
3607 range: lsp::Range::new(
3608 lsp::Position::new(1, 13),
3609 lsp::Position::new(1, 15),
3610 ),
3611 },
3612 message: "error 2 hint 1".to_string(),
3613 },
3614 lsp::DiagnosticRelatedInformation {
3615 location: lsp::Location {
3616 uri: buffer_uri.clone(),
3617 range: lsp::Range::new(
3618 lsp::Position::new(1, 13),
3619 lsp::Position::new(1, 15),
3620 ),
3621 },
3622 message: "error 2 hint 2".to_string(),
3623 },
3624 ]),
3625 ..Default::default()
3626 },
3627 lsp::Diagnostic {
3628 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3629 severity: Some(DiagnosticSeverity::HINT),
3630 message: "error 2 hint 1".to_string(),
3631 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3632 location: lsp::Location {
3633 uri: buffer_uri.clone(),
3634 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3635 },
3636 message: "original diagnostic".to_string(),
3637 }]),
3638 ..Default::default()
3639 },
3640 lsp::Diagnostic {
3641 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3642 severity: Some(DiagnosticSeverity::HINT),
3643 message: "error 2 hint 2".to_string(),
3644 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3645 location: lsp::Location {
3646 uri: buffer_uri,
3647 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3648 },
3649 message: "original diagnostic".to_string(),
3650 }]),
3651 ..Default::default()
3652 },
3653 ],
3654 version: None,
3655 };
3656
3657 project
3658 .update(cx, |p, cx| {
3659 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3660 })
3661 .unwrap();
3662 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3663
3664 assert_eq!(
3665 buffer
3666 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3667 .collect::<Vec<_>>(),
3668 &[
3669 DiagnosticEntry {
3670 range: Point::new(1, 8)..Point::new(1, 9),
3671 diagnostic: Diagnostic {
3672 severity: DiagnosticSeverity::WARNING,
3673 message: "error 1".to_string(),
3674 group_id: 1,
3675 is_primary: true,
3676 ..Default::default()
3677 }
3678 },
3679 DiagnosticEntry {
3680 range: Point::new(1, 8)..Point::new(1, 9),
3681 diagnostic: Diagnostic {
3682 severity: DiagnosticSeverity::HINT,
3683 message: "error 1 hint 1".to_string(),
3684 group_id: 1,
3685 is_primary: false,
3686 ..Default::default()
3687 }
3688 },
3689 DiagnosticEntry {
3690 range: Point::new(1, 13)..Point::new(1, 15),
3691 diagnostic: Diagnostic {
3692 severity: DiagnosticSeverity::HINT,
3693 message: "error 2 hint 1".to_string(),
3694 group_id: 0,
3695 is_primary: false,
3696 ..Default::default()
3697 }
3698 },
3699 DiagnosticEntry {
3700 range: Point::new(1, 13)..Point::new(1, 15),
3701 diagnostic: Diagnostic {
3702 severity: DiagnosticSeverity::HINT,
3703 message: "error 2 hint 2".to_string(),
3704 group_id: 0,
3705 is_primary: false,
3706 ..Default::default()
3707 }
3708 },
3709 DiagnosticEntry {
3710 range: Point::new(2, 8)..Point::new(2, 17),
3711 diagnostic: Diagnostic {
3712 severity: DiagnosticSeverity::ERROR,
3713 message: "error 2".to_string(),
3714 group_id: 0,
3715 is_primary: true,
3716 ..Default::default()
3717 }
3718 }
3719 ]
3720 );
3721
3722 assert_eq!(
3723 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3724 &[
3725 DiagnosticEntry {
3726 range: Point::new(1, 13)..Point::new(1, 15),
3727 diagnostic: Diagnostic {
3728 severity: DiagnosticSeverity::HINT,
3729 message: "error 2 hint 1".to_string(),
3730 group_id: 0,
3731 is_primary: false,
3732 ..Default::default()
3733 }
3734 },
3735 DiagnosticEntry {
3736 range: Point::new(1, 13)..Point::new(1, 15),
3737 diagnostic: Diagnostic {
3738 severity: DiagnosticSeverity::HINT,
3739 message: "error 2 hint 2".to_string(),
3740 group_id: 0,
3741 is_primary: false,
3742 ..Default::default()
3743 }
3744 },
3745 DiagnosticEntry {
3746 range: Point::new(2, 8)..Point::new(2, 17),
3747 diagnostic: Diagnostic {
3748 severity: DiagnosticSeverity::ERROR,
3749 message: "error 2".to_string(),
3750 group_id: 0,
3751 is_primary: true,
3752 ..Default::default()
3753 }
3754 }
3755 ]
3756 );
3757
3758 assert_eq!(
3759 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3760 &[
3761 DiagnosticEntry {
3762 range: Point::new(1, 8)..Point::new(1, 9),
3763 diagnostic: Diagnostic {
3764 severity: DiagnosticSeverity::WARNING,
3765 message: "error 1".to_string(),
3766 group_id: 1,
3767 is_primary: true,
3768 ..Default::default()
3769 }
3770 },
3771 DiagnosticEntry {
3772 range: Point::new(1, 8)..Point::new(1, 9),
3773 diagnostic: Diagnostic {
3774 severity: DiagnosticSeverity::HINT,
3775 message: "error 1 hint 1".to_string(),
3776 group_id: 1,
3777 is_primary: false,
3778 ..Default::default()
3779 }
3780 },
3781 ]
3782 );
3783}
3784
3785#[gpui::test]
3786async fn test_rename(cx: &mut gpui::TestAppContext) {
3787 // hi
3788 init_test(cx);
3789
3790 let fs = FakeFs::new(cx.executor());
3791 fs.insert_tree(
3792 "/dir",
3793 json!({
3794 "one.rs": "const ONE: usize = 1;",
3795 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3796 }),
3797 )
3798 .await;
3799
3800 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3801
3802 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3803 language_registry.add(rust_lang());
3804 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3805 "Rust",
3806 FakeLspAdapter {
3807 capabilities: lsp::ServerCapabilities {
3808 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3809 prepare_provider: Some(true),
3810 work_done_progress_options: Default::default(),
3811 })),
3812 ..Default::default()
3813 },
3814 ..Default::default()
3815 },
3816 );
3817
3818 let buffer = project
3819 .update(cx, |project, cx| {
3820 project.open_local_buffer("/dir/one.rs", cx)
3821 })
3822 .await
3823 .unwrap();
3824
3825 let fake_server = fake_servers.next().await.unwrap();
3826
3827 let response = project.update(cx, |project, cx| {
3828 project.prepare_rename(buffer.clone(), 7, cx)
3829 });
3830 fake_server
3831 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3832 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3833 assert_eq!(params.position, lsp::Position::new(0, 7));
3834 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3835 lsp::Position::new(0, 6),
3836 lsp::Position::new(0, 9),
3837 ))))
3838 })
3839 .next()
3840 .await
3841 .unwrap();
3842 let range = response.await.unwrap().unwrap();
3843 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3844 assert_eq!(range, 6..9);
3845
3846 let response = project.update(cx, |project, cx| {
3847 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3848 });
3849 fake_server
3850 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3851 assert_eq!(
3852 params.text_document_position.text_document.uri.as_str(),
3853 "file:///dir/one.rs"
3854 );
3855 assert_eq!(
3856 params.text_document_position.position,
3857 lsp::Position::new(0, 7)
3858 );
3859 assert_eq!(params.new_name, "THREE");
3860 Ok(Some(lsp::WorkspaceEdit {
3861 changes: Some(
3862 [
3863 (
3864 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3865 vec![lsp::TextEdit::new(
3866 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3867 "THREE".to_string(),
3868 )],
3869 ),
3870 (
3871 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3872 vec![
3873 lsp::TextEdit::new(
3874 lsp::Range::new(
3875 lsp::Position::new(0, 24),
3876 lsp::Position::new(0, 27),
3877 ),
3878 "THREE".to_string(),
3879 ),
3880 lsp::TextEdit::new(
3881 lsp::Range::new(
3882 lsp::Position::new(0, 35),
3883 lsp::Position::new(0, 38),
3884 ),
3885 "THREE".to_string(),
3886 ),
3887 ],
3888 ),
3889 ]
3890 .into_iter()
3891 .collect(),
3892 ),
3893 ..Default::default()
3894 }))
3895 })
3896 .next()
3897 .await
3898 .unwrap();
3899 let mut transaction = response.await.unwrap().0;
3900 assert_eq!(transaction.len(), 2);
3901 assert_eq!(
3902 transaction
3903 .remove_entry(&buffer)
3904 .unwrap()
3905 .0
3906 .update(cx, |buffer, _| buffer.text()),
3907 "const THREE: usize = 1;"
3908 );
3909 assert_eq!(
3910 transaction
3911 .into_keys()
3912 .next()
3913 .unwrap()
3914 .update(cx, |buffer, _| buffer.text()),
3915 "const TWO: usize = one::THREE + one::THREE;"
3916 );
3917}
3918
3919#[gpui::test]
3920async fn test_search(cx: &mut gpui::TestAppContext) {
3921 init_test(cx);
3922
3923 let fs = FakeFs::new(cx.executor());
3924 fs.insert_tree(
3925 "/dir",
3926 json!({
3927 "one.rs": "const ONE: usize = 1;",
3928 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3929 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3930 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3931 }),
3932 )
3933 .await;
3934 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3935 assert_eq!(
3936 search(
3937 &project,
3938 SearchQuery::text(
3939 "TWO",
3940 false,
3941 true,
3942 false,
3943 Default::default(),
3944 Default::default()
3945 )
3946 .unwrap(),
3947 cx
3948 )
3949 .await
3950 .unwrap(),
3951 HashMap::from_iter([
3952 ("dir/two.rs".to_string(), vec![6..9]),
3953 ("dir/three.rs".to_string(), vec![37..40])
3954 ])
3955 );
3956
3957 let buffer_4 = project
3958 .update(cx, |project, cx| {
3959 project.open_local_buffer("/dir/four.rs", cx)
3960 })
3961 .await
3962 .unwrap();
3963 buffer_4.update(cx, |buffer, cx| {
3964 let text = "two::TWO";
3965 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3966 });
3967
3968 assert_eq!(
3969 search(
3970 &project,
3971 SearchQuery::text(
3972 "TWO",
3973 false,
3974 true,
3975 false,
3976 Default::default(),
3977 Default::default()
3978 )
3979 .unwrap(),
3980 cx
3981 )
3982 .await
3983 .unwrap(),
3984 HashMap::from_iter([
3985 ("dir/two.rs".to_string(), vec![6..9]),
3986 ("dir/three.rs".to_string(), vec![37..40]),
3987 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3988 ])
3989 );
3990}
3991
3992#[gpui::test]
3993async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3994 init_test(cx);
3995
3996 let search_query = "file";
3997
3998 let fs = FakeFs::new(cx.executor());
3999 fs.insert_tree(
4000 "/dir",
4001 json!({
4002 "one.rs": r#"// Rust file one"#,
4003 "one.ts": r#"// TypeScript file one"#,
4004 "two.rs": r#"// Rust file two"#,
4005 "two.ts": r#"// TypeScript file two"#,
4006 }),
4007 )
4008 .await;
4009 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4010
4011 assert!(
4012 search(
4013 &project,
4014 SearchQuery::text(
4015 search_query,
4016 false,
4017 true,
4018 false,
4019 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4020 Default::default()
4021 )
4022 .unwrap(),
4023 cx
4024 )
4025 .await
4026 .unwrap()
4027 .is_empty(),
4028 "If no inclusions match, no files should be returned"
4029 );
4030
4031 assert_eq!(
4032 search(
4033 &project,
4034 SearchQuery::text(
4035 search_query,
4036 false,
4037 true,
4038 false,
4039 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4040 Default::default()
4041 )
4042 .unwrap(),
4043 cx
4044 )
4045 .await
4046 .unwrap(),
4047 HashMap::from_iter([
4048 ("dir/one.rs".to_string(), vec![8..12]),
4049 ("dir/two.rs".to_string(), vec![8..12]),
4050 ]),
4051 "Rust only search should give only Rust files"
4052 );
4053
4054 assert_eq!(
4055 search(
4056 &project,
4057 SearchQuery::text(
4058 search_query,
4059 false,
4060 true,
4061 false,
4062
4063 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4064
4065 Default::default(),
4066 ).unwrap(),
4067 cx
4068 )
4069 .await
4070 .unwrap(),
4071 HashMap::from_iter([
4072 ("dir/one.ts".to_string(), vec![14..18]),
4073 ("dir/two.ts".to_string(), vec![14..18]),
4074 ]),
4075 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4076 );
4077
4078 assert_eq!(
4079 search(
4080 &project,
4081 SearchQuery::text(
4082 search_query,
4083 false,
4084 true,
4085 false,
4086
4087 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4088
4089 Default::default(),
4090 ).unwrap(),
4091 cx
4092 )
4093 .await
4094 .unwrap(),
4095 HashMap::from_iter([
4096 ("dir/two.ts".to_string(), vec![14..18]),
4097 ("dir/one.rs".to_string(), vec![8..12]),
4098 ("dir/one.ts".to_string(), vec![14..18]),
4099 ("dir/two.rs".to_string(), vec![8..12]),
4100 ]),
4101 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4102 );
4103}
4104
4105#[gpui::test]
4106async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4107 init_test(cx);
4108
4109 let search_query = "file";
4110
4111 let fs = FakeFs::new(cx.executor());
4112 fs.insert_tree(
4113 "/dir",
4114 json!({
4115 "one.rs": r#"// Rust file one"#,
4116 "one.ts": r#"// TypeScript file one"#,
4117 "two.rs": r#"// Rust file two"#,
4118 "two.ts": r#"// TypeScript file two"#,
4119 }),
4120 )
4121 .await;
4122 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4123
4124 assert_eq!(
4125 search(
4126 &project,
4127 SearchQuery::text(
4128 search_query,
4129 false,
4130 true,
4131 false,
4132 Default::default(),
4133 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4134 )
4135 .unwrap(),
4136 cx
4137 )
4138 .await
4139 .unwrap(),
4140 HashMap::from_iter([
4141 ("dir/one.rs".to_string(), vec![8..12]),
4142 ("dir/one.ts".to_string(), vec![14..18]),
4143 ("dir/two.rs".to_string(), vec![8..12]),
4144 ("dir/two.ts".to_string(), vec![14..18]),
4145 ]),
4146 "If no exclusions match, all files should be returned"
4147 );
4148
4149 assert_eq!(
4150 search(
4151 &project,
4152 SearchQuery::text(
4153 search_query,
4154 false,
4155 true,
4156 false,
4157 Default::default(),
4158 PathMatcher::new(&["*.rs".to_owned()]).unwrap()
4159 )
4160 .unwrap(),
4161 cx
4162 )
4163 .await
4164 .unwrap(),
4165 HashMap::from_iter([
4166 ("dir/one.ts".to_string(), vec![14..18]),
4167 ("dir/two.ts".to_string(), vec![14..18]),
4168 ]),
4169 "Rust exclusion search should give only TypeScript files"
4170 );
4171
4172 assert_eq!(
4173 search(
4174 &project,
4175 SearchQuery::text(
4176 search_query,
4177 false,
4178 true,
4179 false,
4180 Default::default(),
4181
4182 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4183
4184 ).unwrap(),
4185 cx
4186 )
4187 .await
4188 .unwrap(),
4189 HashMap::from_iter([
4190 ("dir/one.rs".to_string(), vec![8..12]),
4191 ("dir/two.rs".to_string(), vec![8..12]),
4192 ]),
4193 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4194 );
4195
4196 assert!(
4197 search(
4198 &project,
4199 SearchQuery::text(
4200 search_query,
4201 false,
4202 true,
4203 false,
4204 Default::default(),
4205
4206 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4207
4208 ).unwrap(),
4209 cx
4210 )
4211 .await
4212 .unwrap().is_empty(),
4213 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4214 );
4215}
4216
4217#[gpui::test]
4218async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4219 init_test(cx);
4220
4221 let search_query = "file";
4222
4223 let fs = FakeFs::new(cx.executor());
4224 fs.insert_tree(
4225 "/dir",
4226 json!({
4227 "one.rs": r#"// Rust file one"#,
4228 "one.ts": r#"// TypeScript file one"#,
4229 "two.rs": r#"// Rust file two"#,
4230 "two.ts": r#"// TypeScript file two"#,
4231 }),
4232 )
4233 .await;
4234 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4235
4236 assert!(
4237 search(
4238 &project,
4239 SearchQuery::text(
4240 search_query,
4241 false,
4242 true,
4243 false,
4244 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4245 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4246 )
4247 .unwrap(),
4248 cx
4249 )
4250 .await
4251 .unwrap()
4252 .is_empty(),
4253 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4254 );
4255
4256 assert!(
4257 search(
4258 &project,
4259 SearchQuery::text(
4260 search_query,
4261 false,
4262 true,
4263 false,
4264 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4265 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4266 ).unwrap(),
4267 cx
4268 )
4269 .await
4270 .unwrap()
4271 .is_empty(),
4272 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4273 );
4274
4275 assert!(
4276 search(
4277 &project,
4278 SearchQuery::text(
4279 search_query,
4280 false,
4281 true,
4282 false,
4283 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4284 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4285 )
4286 .unwrap(),
4287 cx
4288 )
4289 .await
4290 .unwrap()
4291 .is_empty(),
4292 "Non-matching inclusions and exclusions should not change that."
4293 );
4294
4295 assert_eq!(
4296 search(
4297 &project,
4298 SearchQuery::text(
4299 search_query,
4300 false,
4301 true,
4302 false,
4303 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4304 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4305 )
4306 .unwrap(),
4307 cx
4308 )
4309 .await
4310 .unwrap(),
4311 HashMap::from_iter([
4312 ("dir/one.ts".to_string(), vec![14..18]),
4313 ("dir/two.ts".to_string(), vec![14..18]),
4314 ]),
4315 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4316 );
4317}
4318
4319#[gpui::test]
4320async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4321 init_test(cx);
4322
4323 let fs = FakeFs::new(cx.executor());
4324 fs.insert_tree(
4325 "/worktree-a",
4326 json!({
4327 "haystack.rs": r#"// NEEDLE"#,
4328 "haystack.ts": r#"// NEEDLE"#,
4329 }),
4330 )
4331 .await;
4332 fs.insert_tree(
4333 "/worktree-b",
4334 json!({
4335 "haystack.rs": r#"// NEEDLE"#,
4336 "haystack.ts": r#"// NEEDLE"#,
4337 }),
4338 )
4339 .await;
4340
4341 let project = Project::test(
4342 fs.clone(),
4343 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4344 cx,
4345 )
4346 .await;
4347
4348 assert_eq!(
4349 search(
4350 &project,
4351 SearchQuery::text(
4352 "NEEDLE",
4353 false,
4354 true,
4355 false,
4356 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4357 Default::default()
4358 )
4359 .unwrap(),
4360 cx
4361 )
4362 .await
4363 .unwrap(),
4364 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4365 "should only return results from included worktree"
4366 );
4367 assert_eq!(
4368 search(
4369 &project,
4370 SearchQuery::text(
4371 "NEEDLE",
4372 false,
4373 true,
4374 false,
4375 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4376 Default::default()
4377 )
4378 .unwrap(),
4379 cx
4380 )
4381 .await
4382 .unwrap(),
4383 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4384 "should only return results from included worktree"
4385 );
4386
4387 assert_eq!(
4388 search(
4389 &project,
4390 SearchQuery::text(
4391 "NEEDLE",
4392 false,
4393 true,
4394 false,
4395 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4396 Default::default()
4397 )
4398 .unwrap(),
4399 cx
4400 )
4401 .await
4402 .unwrap(),
4403 HashMap::from_iter([
4404 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4405 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4406 ]),
4407 "should return results from both worktrees"
4408 );
4409}
4410
4411#[gpui::test]
4412async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4413 init_test(cx);
4414
4415 let fs = FakeFs::new(cx.background_executor.clone());
4416 fs.insert_tree(
4417 "/dir",
4418 json!({
4419 ".git": {},
4420 ".gitignore": "**/target\n/node_modules\n",
4421 "target": {
4422 "index.txt": "index_key:index_value"
4423 },
4424 "node_modules": {
4425 "eslint": {
4426 "index.ts": "const eslint_key = 'eslint value'",
4427 "package.json": r#"{ "some_key": "some value" }"#,
4428 },
4429 "prettier": {
4430 "index.ts": "const prettier_key = 'prettier value'",
4431 "package.json": r#"{ "other_key": "other value" }"#,
4432 },
4433 },
4434 "package.json": r#"{ "main_key": "main value" }"#,
4435 }),
4436 )
4437 .await;
4438 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4439
4440 let query = "key";
4441 assert_eq!(
4442 search(
4443 &project,
4444 SearchQuery::text(
4445 query,
4446 false,
4447 false,
4448 false,
4449 Default::default(),
4450 Default::default()
4451 )
4452 .unwrap(),
4453 cx
4454 )
4455 .await
4456 .unwrap(),
4457 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4458 "Only one non-ignored file should have the query"
4459 );
4460
4461 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4462 assert_eq!(
4463 search(
4464 &project,
4465 SearchQuery::text(
4466 query,
4467 false,
4468 false,
4469 true,
4470 Default::default(),
4471 Default::default()
4472 )
4473 .unwrap(),
4474 cx
4475 )
4476 .await
4477 .unwrap(),
4478 HashMap::from_iter([
4479 ("dir/package.json".to_string(), vec![8..11]),
4480 ("dir/target/index.txt".to_string(), vec![6..9]),
4481 (
4482 "dir/node_modules/prettier/package.json".to_string(),
4483 vec![9..12]
4484 ),
4485 (
4486 "dir/node_modules/prettier/index.ts".to_string(),
4487 vec![15..18]
4488 ),
4489 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4490 (
4491 "dir/node_modules/eslint/package.json".to_string(),
4492 vec![8..11]
4493 ),
4494 ]),
4495 "Unrestricted search with ignored directories should find every file with the query"
4496 );
4497
4498 let files_to_include = PathMatcher::new(&["/dir/node_modules/prettier/**".to_owned()]).unwrap();
4499 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4500 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4501 assert_eq!(
4502 search(
4503 &project,
4504 SearchQuery::text(
4505 query,
4506 false,
4507 false,
4508 true,
4509 files_to_include,
4510 files_to_exclude,
4511 )
4512 .unwrap(),
4513 cx
4514 )
4515 .await
4516 .unwrap(),
4517 HashMap::from_iter([(
4518 "dir/node_modules/prettier/package.json".to_string(),
4519 vec![9..12]
4520 )]),
4521 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4522 );
4523}
4524
4525#[test]
4526fn test_glob_literal_prefix() {
4527 assert_eq!(glob_literal_prefix("**/*.js"), "");
4528 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4529 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4530 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4531}
4532
4533#[gpui::test]
4534async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4535 init_test(cx);
4536
4537 let fs = FakeFs::new(cx.executor().clone());
4538 fs.insert_tree(
4539 "/one/two",
4540 json!({
4541 "three": {
4542 "a.txt": "",
4543 "four": {}
4544 },
4545 "c.rs": ""
4546 }),
4547 )
4548 .await;
4549
4550 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4551 project
4552 .update(cx, |project, cx| {
4553 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4554 project.create_entry((id, "b.."), true, cx)
4555 })
4556 .unwrap()
4557 .await
4558 .to_included()
4559 .unwrap();
4560
4561 // Can't create paths outside the project
4562 let result = project
4563 .update(cx, |project, cx| {
4564 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4565 project.create_entry((id, "../../boop"), true, cx)
4566 })
4567 .await;
4568 assert!(result.is_err());
4569
4570 // Can't create paths with '..'
4571 let result = project
4572 .update(cx, |project, cx| {
4573 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4574 project.create_entry((id, "four/../beep"), true, cx)
4575 })
4576 .await;
4577 assert!(result.is_err());
4578
4579 assert_eq!(
4580 fs.paths(true),
4581 vec![
4582 PathBuf::from("/"),
4583 PathBuf::from("/one"),
4584 PathBuf::from("/one/two"),
4585 PathBuf::from("/one/two/c.rs"),
4586 PathBuf::from("/one/two/three"),
4587 PathBuf::from("/one/two/three/a.txt"),
4588 PathBuf::from("/one/two/three/b.."),
4589 PathBuf::from("/one/two/three/four"),
4590 ]
4591 );
4592
4593 // And we cannot open buffers with '..'
4594 let result = project
4595 .update(cx, |project, cx| {
4596 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4597 project.open_buffer((id, "../c.rs"), cx)
4598 })
4599 .await;
4600 assert!(result.is_err())
4601}
4602
4603#[gpui::test]
4604async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4605 init_test(cx);
4606
4607 let fs = FakeFs::new(cx.executor());
4608 fs.insert_tree(
4609 "/dir",
4610 json!({
4611 "a.tsx": "a",
4612 }),
4613 )
4614 .await;
4615
4616 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4617
4618 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4619 language_registry.add(tsx_lang());
4620 let language_server_names = [
4621 "TypeScriptServer",
4622 "TailwindServer",
4623 "ESLintServer",
4624 "NoHoverCapabilitiesServer",
4625 ];
4626 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4627 "tsx",
4628 true,
4629 FakeLspAdapter {
4630 name: &language_server_names[0],
4631 capabilities: lsp::ServerCapabilities {
4632 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4633 ..lsp::ServerCapabilities::default()
4634 },
4635 ..FakeLspAdapter::default()
4636 },
4637 );
4638 let _a = language_registry.register_specific_fake_lsp_adapter(
4639 "tsx",
4640 false,
4641 FakeLspAdapter {
4642 name: &language_server_names[1],
4643 capabilities: lsp::ServerCapabilities {
4644 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4645 ..lsp::ServerCapabilities::default()
4646 },
4647 ..FakeLspAdapter::default()
4648 },
4649 );
4650 let _b = language_registry.register_specific_fake_lsp_adapter(
4651 "tsx",
4652 false,
4653 FakeLspAdapter {
4654 name: &language_server_names[2],
4655 capabilities: lsp::ServerCapabilities {
4656 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4657 ..lsp::ServerCapabilities::default()
4658 },
4659 ..FakeLspAdapter::default()
4660 },
4661 );
4662 let _c = language_registry.register_specific_fake_lsp_adapter(
4663 "tsx",
4664 false,
4665 FakeLspAdapter {
4666 name: &language_server_names[3],
4667 capabilities: lsp::ServerCapabilities {
4668 hover_provider: None,
4669 ..lsp::ServerCapabilities::default()
4670 },
4671 ..FakeLspAdapter::default()
4672 },
4673 );
4674
4675 let buffer = project
4676 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4677 .await
4678 .unwrap();
4679 cx.executor().run_until_parked();
4680
4681 let mut servers_with_hover_requests = HashMap::default();
4682 for i in 0..language_server_names.len() {
4683 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4684 panic!(
4685 "Failed to get language server #{i} with name {}",
4686 &language_server_names[i]
4687 )
4688 });
4689 let new_server_name = new_server.server.name();
4690 assert!(
4691 !servers_with_hover_requests.contains_key(new_server_name),
4692 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4693 );
4694 let new_server_name = new_server_name.to_string();
4695 match new_server_name.as_str() {
4696 "TailwindServer" | "TypeScriptServer" => {
4697 servers_with_hover_requests.insert(
4698 new_server_name.clone(),
4699 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4700 let name = new_server_name.clone();
4701 async move {
4702 Ok(Some(lsp::Hover {
4703 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4704 format!("{name} hover"),
4705 )),
4706 range: None,
4707 }))
4708 }
4709 }),
4710 );
4711 }
4712 "ESLintServer" => {
4713 servers_with_hover_requests.insert(
4714 new_server_name,
4715 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4716 |_, _| async move { Ok(None) },
4717 ),
4718 );
4719 }
4720 "NoHoverCapabilitiesServer" => {
4721 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4722 |_, _| async move {
4723 panic!(
4724 "Should not call for hovers server with no corresponding capabilities"
4725 )
4726 },
4727 );
4728 }
4729 unexpected => panic!("Unexpected server name: {unexpected}"),
4730 }
4731 }
4732
4733 let hover_task = project.update(cx, |project, cx| {
4734 project.hover(&buffer, Point::new(0, 0), cx)
4735 });
4736 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4737 |mut hover_request| async move {
4738 hover_request
4739 .next()
4740 .await
4741 .expect("All hover requests should have been triggered")
4742 },
4743 ))
4744 .await;
4745 assert_eq!(
4746 vec!["TailwindServer hover", "TypeScriptServer hover"],
4747 hover_task
4748 .await
4749 .into_iter()
4750 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4751 .sorted()
4752 .collect::<Vec<_>>(),
4753 "Should receive hover responses from all related servers with hover capabilities"
4754 );
4755}
4756
4757#[gpui::test]
4758async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4759 init_test(cx);
4760
4761 let fs = FakeFs::new(cx.executor());
4762 fs.insert_tree(
4763 "/dir",
4764 json!({
4765 "a.ts": "a",
4766 }),
4767 )
4768 .await;
4769
4770 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4771
4772 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4773 language_registry.add(typescript_lang());
4774 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4775 "TypeScript",
4776 FakeLspAdapter {
4777 capabilities: lsp::ServerCapabilities {
4778 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4779 ..lsp::ServerCapabilities::default()
4780 },
4781 ..FakeLspAdapter::default()
4782 },
4783 );
4784
4785 let buffer = project
4786 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4787 .await
4788 .unwrap();
4789 cx.executor().run_until_parked();
4790
4791 let fake_server = fake_language_servers
4792 .next()
4793 .await
4794 .expect("failed to get the language server");
4795
4796 let mut request_handled =
4797 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4798 Ok(Some(lsp::Hover {
4799 contents: lsp::HoverContents::Array(vec![
4800 lsp::MarkedString::String("".to_string()),
4801 lsp::MarkedString::String(" ".to_string()),
4802 lsp::MarkedString::String("\n\n\n".to_string()),
4803 ]),
4804 range: None,
4805 }))
4806 });
4807
4808 let hover_task = project.update(cx, |project, cx| {
4809 project.hover(&buffer, Point::new(0, 0), cx)
4810 });
4811 let () = request_handled
4812 .next()
4813 .await
4814 .expect("All hover requests should have been triggered");
4815 assert_eq!(
4816 Vec::<String>::new(),
4817 hover_task
4818 .await
4819 .into_iter()
4820 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4821 .sorted()
4822 .collect::<Vec<_>>(),
4823 "Empty hover parts should be ignored"
4824 );
4825}
4826
4827#[gpui::test]
4828async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4829 init_test(cx);
4830
4831 let fs = FakeFs::new(cx.executor());
4832 fs.insert_tree(
4833 "/dir",
4834 json!({
4835 "a.tsx": "a",
4836 }),
4837 )
4838 .await;
4839
4840 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4841
4842 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4843 language_registry.add(tsx_lang());
4844 let language_server_names = [
4845 "TypeScriptServer",
4846 "TailwindServer",
4847 "ESLintServer",
4848 "NoActionsCapabilitiesServer",
4849 ];
4850 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4851 "tsx",
4852 true,
4853 FakeLspAdapter {
4854 name: &language_server_names[0],
4855 capabilities: lsp::ServerCapabilities {
4856 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4857 ..lsp::ServerCapabilities::default()
4858 },
4859 ..FakeLspAdapter::default()
4860 },
4861 );
4862 let _a = language_registry.register_specific_fake_lsp_adapter(
4863 "tsx",
4864 false,
4865 FakeLspAdapter {
4866 name: &language_server_names[1],
4867 capabilities: lsp::ServerCapabilities {
4868 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4869 ..lsp::ServerCapabilities::default()
4870 },
4871 ..FakeLspAdapter::default()
4872 },
4873 );
4874 let _b = language_registry.register_specific_fake_lsp_adapter(
4875 "tsx",
4876 false,
4877 FakeLspAdapter {
4878 name: &language_server_names[2],
4879 capabilities: lsp::ServerCapabilities {
4880 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4881 ..lsp::ServerCapabilities::default()
4882 },
4883 ..FakeLspAdapter::default()
4884 },
4885 );
4886 let _c = language_registry.register_specific_fake_lsp_adapter(
4887 "tsx",
4888 false,
4889 FakeLspAdapter {
4890 name: &language_server_names[3],
4891 capabilities: lsp::ServerCapabilities {
4892 code_action_provider: None,
4893 ..lsp::ServerCapabilities::default()
4894 },
4895 ..FakeLspAdapter::default()
4896 },
4897 );
4898
4899 let buffer = project
4900 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4901 .await
4902 .unwrap();
4903 cx.executor().run_until_parked();
4904
4905 let mut servers_with_actions_requests = HashMap::default();
4906 for i in 0..language_server_names.len() {
4907 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4908 panic!(
4909 "Failed to get language server #{i} with name {}",
4910 &language_server_names[i]
4911 )
4912 });
4913 let new_server_name = new_server.server.name();
4914 assert!(
4915 !servers_with_actions_requests.contains_key(new_server_name),
4916 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4917 );
4918 let new_server_name = new_server_name.to_string();
4919 match new_server_name.as_str() {
4920 "TailwindServer" | "TypeScriptServer" => {
4921 servers_with_actions_requests.insert(
4922 new_server_name.clone(),
4923 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4924 move |_, _| {
4925 let name = new_server_name.clone();
4926 async move {
4927 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4928 lsp::CodeAction {
4929 title: format!("{name} code action"),
4930 ..lsp::CodeAction::default()
4931 },
4932 )]))
4933 }
4934 },
4935 ),
4936 );
4937 }
4938 "ESLintServer" => {
4939 servers_with_actions_requests.insert(
4940 new_server_name,
4941 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4942 |_, _| async move { Ok(None) },
4943 ),
4944 );
4945 }
4946 "NoActionsCapabilitiesServer" => {
4947 let _never_handled = new_server
4948 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4949 panic!(
4950 "Should not call for code actions server with no corresponding capabilities"
4951 )
4952 });
4953 }
4954 unexpected => panic!("Unexpected server name: {unexpected}"),
4955 }
4956 }
4957
4958 let code_actions_task = project.update(cx, |project, cx| {
4959 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4960 });
4961 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4962 |mut code_actions_request| async move {
4963 code_actions_request
4964 .next()
4965 .await
4966 .expect("All code actions requests should have been triggered")
4967 },
4968 ))
4969 .await;
4970 assert_eq!(
4971 vec!["TailwindServer code action", "TypeScriptServer code action"],
4972 code_actions_task
4973 .await
4974 .into_iter()
4975 .map(|code_action| code_action.lsp_action.title)
4976 .sorted()
4977 .collect::<Vec<_>>(),
4978 "Should receive code actions responses from all related servers with hover capabilities"
4979 );
4980}
4981
4982#[gpui::test]
4983async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
4984 init_test(cx);
4985
4986 let fs = FakeFs::new(cx.executor());
4987 fs.insert_tree(
4988 "/dir",
4989 json!({
4990 "a.rs": "let a = 1;",
4991 "b.rs": "let b = 2;",
4992 "c.rs": "let c = 2;",
4993 }),
4994 )
4995 .await;
4996
4997 let project = Project::test(
4998 fs,
4999 [
5000 "/dir/a.rs".as_ref(),
5001 "/dir/b.rs".as_ref(),
5002 "/dir/c.rs".as_ref(),
5003 ],
5004 cx,
5005 )
5006 .await;
5007
5008 // check the initial state and get the worktrees
5009 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5010 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5011 assert_eq!(worktrees.len(), 3);
5012
5013 let worktree_a = worktrees[0].read(cx);
5014 let worktree_b = worktrees[1].read(cx);
5015 let worktree_c = worktrees[2].read(cx);
5016
5017 // check they start in the right order
5018 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5019 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5020 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5021
5022 (
5023 worktrees[0].clone(),
5024 worktrees[1].clone(),
5025 worktrees[2].clone(),
5026 )
5027 });
5028
5029 // move first worktree to after the second
5030 // [a, b, c] -> [b, a, c]
5031 project
5032 .update(cx, |project, cx| {
5033 let first = worktree_a.read(cx);
5034 let second = worktree_b.read(cx);
5035 project.move_worktree(first.id(), second.id(), cx)
5036 })
5037 .expect("moving first after second");
5038
5039 // check the state after moving
5040 project.update(cx, |project, cx| {
5041 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5042 assert_eq!(worktrees.len(), 3);
5043
5044 let first = worktrees[0].read(cx);
5045 let second = worktrees[1].read(cx);
5046 let third = worktrees[2].read(cx);
5047
5048 // check they are now in the right order
5049 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5050 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5051 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5052 });
5053
5054 // move the second worktree to before the first
5055 // [b, a, c] -> [a, b, c]
5056 project
5057 .update(cx, |project, cx| {
5058 let second = worktree_a.read(cx);
5059 let first = worktree_b.read(cx);
5060 project.move_worktree(first.id(), second.id(), cx)
5061 })
5062 .expect("moving second before first");
5063
5064 // check the state after moving
5065 project.update(cx, |project, cx| {
5066 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5067 assert_eq!(worktrees.len(), 3);
5068
5069 let first = worktrees[0].read(cx);
5070 let second = worktrees[1].read(cx);
5071 let third = worktrees[2].read(cx);
5072
5073 // check they are now in the right order
5074 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5075 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5076 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5077 });
5078
5079 // move the second worktree to after the third
5080 // [a, b, c] -> [a, c, b]
5081 project
5082 .update(cx, |project, cx| {
5083 let second = worktree_b.read(cx);
5084 let third = worktree_c.read(cx);
5085 project.move_worktree(second.id(), third.id(), cx)
5086 })
5087 .expect("moving second after third");
5088
5089 // check the state after moving
5090 project.update(cx, |project, cx| {
5091 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5092 assert_eq!(worktrees.len(), 3);
5093
5094 let first = worktrees[0].read(cx);
5095 let second = worktrees[1].read(cx);
5096 let third = worktrees[2].read(cx);
5097
5098 // check they are now in the right order
5099 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5100 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5101 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5102 });
5103
5104 // move the third worktree to before the second
5105 // [a, c, b] -> [a, b, c]
5106 project
5107 .update(cx, |project, cx| {
5108 let third = worktree_c.read(cx);
5109 let second = worktree_b.read(cx);
5110 project.move_worktree(third.id(), second.id(), cx)
5111 })
5112 .expect("moving third before second");
5113
5114 // check the state after moving
5115 project.update(cx, |project, cx| {
5116 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5117 assert_eq!(worktrees.len(), 3);
5118
5119 let first = worktrees[0].read(cx);
5120 let second = worktrees[1].read(cx);
5121 let third = worktrees[2].read(cx);
5122
5123 // check they are now in the right order
5124 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5125 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5126 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5127 });
5128
5129 // move the first worktree to after the third
5130 // [a, b, c] -> [b, c, a]
5131 project
5132 .update(cx, |project, cx| {
5133 let first = worktree_a.read(cx);
5134 let third = worktree_c.read(cx);
5135 project.move_worktree(first.id(), third.id(), cx)
5136 })
5137 .expect("moving first after third");
5138
5139 // check the state after moving
5140 project.update(cx, |project, cx| {
5141 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5142 assert_eq!(worktrees.len(), 3);
5143
5144 let first = worktrees[0].read(cx);
5145 let second = worktrees[1].read(cx);
5146 let third = worktrees[2].read(cx);
5147
5148 // check they are now in the right order
5149 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5150 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5151 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5152 });
5153
5154 // move the third worktree to before the first
5155 // [b, c, a] -> [a, b, c]
5156 project
5157 .update(cx, |project, cx| {
5158 let third = worktree_a.read(cx);
5159 let first = worktree_b.read(cx);
5160 project.move_worktree(third.id(), first.id(), cx)
5161 })
5162 .expect("moving third before first");
5163
5164 // check the state after moving
5165 project.update(cx, |project, cx| {
5166 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5167 assert_eq!(worktrees.len(), 3);
5168
5169 let first = worktrees[0].read(cx);
5170 let second = worktrees[1].read(cx);
5171 let third = worktrees[2].read(cx);
5172
5173 // check they are now in the right order
5174 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5175 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5176 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5177 });
5178}
5179
5180async fn search(
5181 project: &Model<Project>,
5182 query: SearchQuery,
5183 cx: &mut gpui::TestAppContext,
5184) -> Result<HashMap<String, Vec<Range<usize>>>> {
5185 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5186 let mut results = HashMap::default();
5187 while let Some(search_result) = search_rx.next().await {
5188 match search_result {
5189 SearchResult::Buffer { buffer, ranges } => {
5190 results.entry(buffer).or_insert(ranges);
5191 }
5192 SearchResult::LimitReached => {}
5193 }
5194 }
5195 Ok(results
5196 .into_iter()
5197 .map(|(buffer, ranges)| {
5198 buffer.update(cx, |buffer, cx| {
5199 let path = buffer
5200 .file()
5201 .unwrap()
5202 .full_path(cx)
5203 .to_string_lossy()
5204 .to_string();
5205 let ranges = ranges
5206 .into_iter()
5207 .map(|range| range.to_offset(buffer))
5208 .collect::<Vec<_>>();
5209 (path, ranges)
5210 })
5211 })
5212 .collect())
5213}
5214
5215fn init_test(cx: &mut gpui::TestAppContext) {
5216 if std::env::var("RUST_LOG").is_ok() {
5217 env_logger::try_init().ok();
5218 }
5219
5220 cx.update(|cx| {
5221 let settings_store = SettingsStore::test(cx);
5222 cx.set_global(settings_store);
5223 release_channel::init(SemanticVersion::default(), cx);
5224 language::init(cx);
5225 Project::init_settings(cx);
5226 });
5227}
5228
5229fn json_lang() -> Arc<Language> {
5230 Arc::new(Language::new(
5231 LanguageConfig {
5232 name: "JSON".into(),
5233 matcher: LanguageMatcher {
5234 path_suffixes: vec!["json".to_string()],
5235 ..Default::default()
5236 },
5237 ..Default::default()
5238 },
5239 None,
5240 ))
5241}
5242
5243fn js_lang() -> Arc<Language> {
5244 Arc::new(Language::new(
5245 LanguageConfig {
5246 name: Arc::from("JavaScript"),
5247 matcher: LanguageMatcher {
5248 path_suffixes: vec!["js".to_string()],
5249 ..Default::default()
5250 },
5251 ..Default::default()
5252 },
5253 None,
5254 ))
5255}
5256
5257fn rust_lang() -> Arc<Language> {
5258 Arc::new(Language::new(
5259 LanguageConfig {
5260 name: "Rust".into(),
5261 matcher: LanguageMatcher {
5262 path_suffixes: vec!["rs".to_string()],
5263 ..Default::default()
5264 },
5265 ..Default::default()
5266 },
5267 Some(tree_sitter_rust::language()),
5268 ))
5269}
5270
5271fn typescript_lang() -> Arc<Language> {
5272 Arc::new(Language::new(
5273 LanguageConfig {
5274 name: "TypeScript".into(),
5275 matcher: LanguageMatcher {
5276 path_suffixes: vec!["ts".to_string()],
5277 ..Default::default()
5278 },
5279 ..Default::default()
5280 },
5281 Some(tree_sitter_typescript::language_typescript()),
5282 ))
5283}
5284
5285fn tsx_lang() -> Arc<Language> {
5286 Arc::new(Language::new(
5287 LanguageConfig {
5288 name: "tsx".into(),
5289 matcher: LanguageMatcher {
5290 path_suffixes: vec!["tsx".to_string()],
5291 ..Default::default()
5292 },
5293 ..Default::default()
5294 },
5295 Some(tree_sitter_typescript::language_tsx()),
5296 ))
5297}
5298
5299fn get_all_tasks(
5300 project: &Model<Project>,
5301 worktree_id: Option<WorktreeId>,
5302 task_context: &TaskContext,
5303 cx: &mut AppContext,
5304) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5305 let resolved_tasks = project.update(cx, |project, cx| {
5306 project
5307 .task_inventory()
5308 .read(cx)
5309 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5310 });
5311
5312 cx.spawn(|_| async move {
5313 let (mut old, new) = resolved_tasks.await;
5314 old.extend(new);
5315 old
5316 })
5317}