1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::NumberOrString;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17use std::task::Poll;
18use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
19use unindent::Unindent as _;
20use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/tmp").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees(cx).next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let worktree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees(cx).next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: worktree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree".into(),
141 };
142
143 let all_tasks = cx
144 .update(|cx| {
145 let tree = worktree.read(cx);
146
147 let settings_a = language_settings(
148 None,
149 Some(
150 &(File::for_entry(
151 tree.entry_for_path("a/a.rs").unwrap().clone(),
152 worktree.clone(),
153 ) as _),
154 ),
155 cx,
156 );
157 let settings_b = language_settings(
158 None,
159 Some(
160 &(File::for_entry(
161 tree.entry_for_path("b/b.rs").unwrap().clone(),
162 worktree.clone(),
163 ) as _),
164 ),
165 cx,
166 );
167
168 assert_eq!(settings_a.tab_size.get(), 8);
169 assert_eq!(settings_b.tab_size.get(), 2);
170
171 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
172 })
173 .await
174 .into_iter()
175 .map(|(source_kind, task)| {
176 let resolved = task.resolved.unwrap();
177 (
178 source_kind,
179 task.resolved_label,
180 resolved.args,
181 resolved.env,
182 )
183 })
184 .collect::<Vec<_>>();
185 assert_eq!(
186 all_tasks,
187 vec![
188 (
189 global_task_source_kind.clone(),
190 "cargo check".to_string(),
191 vec!["check".to_string(), "--all".to_string()],
192 HashMap::default(),
193 ),
194 (
195 TaskSourceKind::Worktree {
196 id: worktree_id,
197 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
198 id_base: "local_tasks_for_worktree".into(),
199 },
200 "cargo check".to_string(),
201 vec!["check".to_string()],
202 HashMap::default(),
203 ),
204 ]
205 );
206
207 let (_, resolved_task) = cx
208 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
209 .await
210 .into_iter()
211 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
212 .expect("should have one global task");
213 project.update(cx, |project, cx| {
214 project.task_inventory().update(cx, |inventory, _| {
215 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
216 });
217 });
218
219 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
220 label: "cargo check".to_string(),
221 command: "cargo".to_string(),
222 args: vec![
223 "check".to_string(),
224 "--all".to_string(),
225 "--all-targets".to_string(),
226 ],
227 env: HashMap::from_iter(Some((
228 "RUSTFLAGS".to_string(),
229 "-Zunstable-options".to_string(),
230 ))),
231 ..TaskTemplate::default()
232 }]))
233 .unwrap();
234 let (tx, rx) = futures::channel::mpsc::unbounded();
235 cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.task_inventory().update(cx, |inventory, cx| {
238 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
239 inventory.add_source(
240 global_task_source_kind.clone(),
241 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
242 cx,
243 );
244 });
245 })
246 });
247 tx.unbounded_send(tasks).unwrap();
248
249 cx.run_until_parked();
250 let all_tasks = cx
251 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
252 .await
253 .into_iter()
254 .map(|(source_kind, task)| {
255 let resolved = task.resolved.unwrap();
256 (
257 source_kind,
258 task.resolved_label,
259 resolved.args,
260 resolved.env,
261 )
262 })
263 .collect::<Vec<_>>();
264 assert_eq!(
265 all_tasks,
266 vec![
267 (
268 TaskSourceKind::Worktree {
269 id: worktree_id,
270 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
271 id_base: "local_tasks_for_worktree".into(),
272 },
273 "cargo check".to_string(),
274 vec![
275 "check".to_string(),
276 "--all".to_string(),
277 "--all-targets".to_string()
278 ],
279 HashMap::from_iter(Some((
280 "RUSTFLAGS".to_string(),
281 "-Zunstable-options".to_string()
282 ))),
283 ),
284 (
285 TaskSourceKind::Worktree {
286 id: worktree_id,
287 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
288 id_base: "local_tasks_for_worktree".into(),
289 },
290 "cargo check".to_string(),
291 vec!["check".to_string()],
292 HashMap::default(),
293 ),
294 ]
295 );
296}
297
298#[gpui::test]
299async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301
302 let fs = FakeFs::new(cx.executor());
303 fs.insert_tree(
304 "/the-root",
305 json!({
306 "test.rs": "const A: i32 = 1;",
307 "test2.rs": "",
308 "Cargo.toml": "a = 1",
309 "package.json": "{\"a\": 1}",
310 }),
311 )
312 .await;
313
314 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
316
317 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
318 "Rust",
319 FakeLspAdapter {
320 name: "the-rust-language-server",
321 capabilities: lsp::ServerCapabilities {
322 completion_provider: Some(lsp::CompletionOptions {
323 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
324 ..Default::default()
325 }),
326 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
327 lsp::TextDocumentSyncOptions {
328 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
329 ..Default::default()
330 },
331 )),
332 ..Default::default()
333 },
334 ..Default::default()
335 },
336 );
337 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
338 "JSON",
339 FakeLspAdapter {
340 name: "the-json-language-server",
341 capabilities: lsp::ServerCapabilities {
342 completion_provider: Some(lsp::CompletionOptions {
343 trigger_characters: Some(vec![":".to_string()]),
344 ..Default::default()
345 }),
346 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
347 lsp::TextDocumentSyncOptions {
348 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
349 ..Default::default()
350 },
351 )),
352 ..Default::default()
353 },
354 ..Default::default()
355 },
356 );
357
358 // Open a buffer without an associated language server.
359 let toml_buffer = project
360 .update(cx, |project, cx| {
361 project.open_local_buffer("/the-root/Cargo.toml", cx)
362 })
363 .await
364 .unwrap();
365
366 // Open a buffer with an associated language server before the language for it has been loaded.
367 let rust_buffer = project
368 .update(cx, |project, cx| {
369 project.open_local_buffer("/the-root/test.rs", cx)
370 })
371 .await
372 .unwrap();
373 rust_buffer.update(cx, |buffer, _| {
374 assert_eq!(buffer.language().map(|l| l.name()), None);
375 });
376
377 // Now we add the languages to the project, and ensure they get assigned to all
378 // the relevant open buffers.
379 language_registry.add(json_lang());
380 language_registry.add(rust_lang());
381 cx.executor().run_until_parked();
382 rust_buffer.update(cx, |buffer, _| {
383 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
384 });
385
386 // A server is started up, and it is notified about Rust files.
387 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
388 assert_eq!(
389 fake_rust_server
390 .receive_notification::<lsp::notification::DidOpenTextDocument>()
391 .await
392 .text_document,
393 lsp::TextDocumentItem {
394 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
395 version: 0,
396 text: "const A: i32 = 1;".to_string(),
397 language_id: "rust".to_string(),
398 }
399 );
400
401 // The buffer is configured based on the language server's capabilities.
402 rust_buffer.update(cx, |buffer, _| {
403 assert_eq!(
404 buffer.completion_triggers(),
405 &[".".to_string(), "::".to_string()]
406 );
407 });
408 toml_buffer.update(cx, |buffer, _| {
409 assert!(buffer.completion_triggers().is_empty());
410 });
411
412 // Edit a buffer. The changes are reported to the language server.
413 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
414 assert_eq!(
415 fake_rust_server
416 .receive_notification::<lsp::notification::DidChangeTextDocument>()
417 .await
418 .text_document,
419 lsp::VersionedTextDocumentIdentifier::new(
420 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
421 1
422 )
423 );
424
425 // Open a third buffer with a different associated language server.
426 let json_buffer = project
427 .update(cx, |project, cx| {
428 project.open_local_buffer("/the-root/package.json", cx)
429 })
430 .await
431 .unwrap();
432
433 // A json language server is started up and is only notified about the json buffer.
434 let mut fake_json_server = fake_json_servers.next().await.unwrap();
435 assert_eq!(
436 fake_json_server
437 .receive_notification::<lsp::notification::DidOpenTextDocument>()
438 .await
439 .text_document,
440 lsp::TextDocumentItem {
441 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
442 version: 0,
443 text: "{\"a\": 1}".to_string(),
444 language_id: "json".to_string(),
445 }
446 );
447
448 // This buffer is configured based on the second language server's
449 // capabilities.
450 json_buffer.update(cx, |buffer, _| {
451 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
452 });
453
454 // When opening another buffer whose language server is already running,
455 // it is also configured based on the existing language server's capabilities.
456 let rust_buffer2 = project
457 .update(cx, |project, cx| {
458 project.open_local_buffer("/the-root/test2.rs", cx)
459 })
460 .await
461 .unwrap();
462 rust_buffer2.update(cx, |buffer, _| {
463 assert_eq!(
464 buffer.completion_triggers(),
465 &[".".to_string(), "::".to_string()]
466 );
467 });
468
469 // Changes are reported only to servers matching the buffer's language.
470 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
471 rust_buffer2.update(cx, |buffer, cx| {
472 buffer.edit([(0..0, "let x = 1;")], None, cx)
473 });
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidChangeTextDocument>()
477 .await
478 .text_document,
479 lsp::VersionedTextDocumentIdentifier::new(
480 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
481 1
482 )
483 );
484
485 // Save notifications are reported to all servers.
486 project
487 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
488 .await
489 .unwrap();
490 assert_eq!(
491 fake_rust_server
492 .receive_notification::<lsp::notification::DidSaveTextDocument>()
493 .await
494 .text_document,
495 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
496 );
497 assert_eq!(
498 fake_json_server
499 .receive_notification::<lsp::notification::DidSaveTextDocument>()
500 .await
501 .text_document,
502 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
503 );
504
505 // Renames are reported only to servers matching the buffer's language.
506 fs.rename(
507 Path::new("/the-root/test2.rs"),
508 Path::new("/the-root/test3.rs"),
509 Default::default(),
510 )
511 .await
512 .unwrap();
513 assert_eq!(
514 fake_rust_server
515 .receive_notification::<lsp::notification::DidCloseTextDocument>()
516 .await
517 .text_document,
518 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
519 );
520 assert_eq!(
521 fake_rust_server
522 .receive_notification::<lsp::notification::DidOpenTextDocument>()
523 .await
524 .text_document,
525 lsp::TextDocumentItem {
526 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
527 version: 0,
528 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
529 language_id: "rust".to_string(),
530 },
531 );
532
533 rust_buffer2.update(cx, |buffer, cx| {
534 buffer.update_diagnostics(
535 LanguageServerId(0),
536 DiagnosticSet::from_sorted_entries(
537 vec![DiagnosticEntry {
538 diagnostic: Default::default(),
539 range: Anchor::MIN..Anchor::MAX,
540 }],
541 &buffer.snapshot(),
542 ),
543 cx,
544 );
545 assert_eq!(
546 buffer
547 .snapshot()
548 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
549 .count(),
550 1
551 );
552 });
553
554 // When the rename changes the extension of the file, the buffer gets closed on the old
555 // language server and gets opened on the new one.
556 fs.rename(
557 Path::new("/the-root/test3.rs"),
558 Path::new("/the-root/test3.json"),
559 Default::default(),
560 )
561 .await
562 .unwrap();
563 assert_eq!(
564 fake_rust_server
565 .receive_notification::<lsp::notification::DidCloseTextDocument>()
566 .await
567 .text_document,
568 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
569 );
570 assert_eq!(
571 fake_json_server
572 .receive_notification::<lsp::notification::DidOpenTextDocument>()
573 .await
574 .text_document,
575 lsp::TextDocumentItem {
576 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
577 version: 0,
578 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
579 language_id: "json".to_string(),
580 },
581 );
582
583 // We clear the diagnostics, since the language has changed.
584 rust_buffer2.update(cx, |buffer, _| {
585 assert_eq!(
586 buffer
587 .snapshot()
588 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
589 .count(),
590 0
591 );
592 });
593
594 // The renamed file's version resets after changing language server.
595 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
596 assert_eq!(
597 fake_json_server
598 .receive_notification::<lsp::notification::DidChangeTextDocument>()
599 .await
600 .text_document,
601 lsp::VersionedTextDocumentIdentifier::new(
602 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
603 1
604 )
605 );
606
607 // Restart language servers
608 project.update(cx, |project, cx| {
609 project.restart_language_servers_for_buffers(
610 vec![rust_buffer.clone(), json_buffer.clone()],
611 cx,
612 );
613 });
614
615 let mut rust_shutdown_requests = fake_rust_server
616 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
617 let mut json_shutdown_requests = fake_json_server
618 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
619 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
620
621 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
622 let mut fake_json_server = fake_json_servers.next().await.unwrap();
623
624 // Ensure rust document is reopened in new rust language server
625 assert_eq!(
626 fake_rust_server
627 .receive_notification::<lsp::notification::DidOpenTextDocument>()
628 .await
629 .text_document,
630 lsp::TextDocumentItem {
631 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
632 version: 0,
633 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
634 language_id: "rust".to_string(),
635 }
636 );
637
638 // Ensure json documents are reopened in new json language server
639 assert_set_eq!(
640 [
641 fake_json_server
642 .receive_notification::<lsp::notification::DidOpenTextDocument>()
643 .await
644 .text_document,
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 ],
650 [
651 lsp::TextDocumentItem {
652 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
653 version: 0,
654 text: json_buffer.update(cx, |buffer, _| buffer.text()),
655 language_id: "json".to_string(),
656 },
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
659 version: 0,
660 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
661 language_id: "json".to_string(),
662 }
663 ]
664 );
665
666 // Close notifications are reported only to servers matching the buffer's language.
667 cx.update(|_| drop(json_buffer));
668 let close_message = lsp::DidCloseTextDocumentParams {
669 text_document: lsp::TextDocumentIdentifier::new(
670 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
671 ),
672 };
673 assert_eq!(
674 fake_json_server
675 .receive_notification::<lsp::notification::DidCloseTextDocument>()
676 .await,
677 close_message,
678 );
679}
680
681#[gpui::test]
682async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
683 init_test(cx);
684
685 let fs = FakeFs::new(cx.executor());
686 fs.insert_tree(
687 "/the-root",
688 json!({
689 ".gitignore": "target\n",
690 "src": {
691 "a.rs": "",
692 "b.rs": "",
693 },
694 "target": {
695 "x": {
696 "out": {
697 "x.rs": ""
698 }
699 },
700 "y": {
701 "out": {
702 "y.rs": "",
703 }
704 },
705 "z": {
706 "out": {
707 "z.rs": ""
708 }
709 }
710 }
711 }),
712 )
713 .await;
714
715 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
716 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
717 language_registry.add(rust_lang());
718 let mut fake_servers = language_registry.register_fake_lsp_adapter(
719 "Rust",
720 FakeLspAdapter {
721 name: "the-language-server",
722 ..Default::default()
723 },
724 );
725
726 cx.executor().run_until_parked();
727
728 // Start the language server by opening a buffer with a compatible file extension.
729 let _buffer = project
730 .update(cx, |project, cx| {
731 project.open_local_buffer("/the-root/src/a.rs", cx)
732 })
733 .await
734 .unwrap();
735
736 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
737 project.update(cx, |project, cx| {
738 let worktree = project.worktrees(cx).next().unwrap();
739 assert_eq!(
740 worktree
741 .read(cx)
742 .snapshot()
743 .entries(true, 0)
744 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
745 .collect::<Vec<_>>(),
746 &[
747 (Path::new(""), false),
748 (Path::new(".gitignore"), false),
749 (Path::new("src"), false),
750 (Path::new("src/a.rs"), false),
751 (Path::new("src/b.rs"), false),
752 (Path::new("target"), true),
753 ]
754 );
755 });
756
757 let prev_read_dir_count = fs.read_dir_call_count();
758
759 // Keep track of the FS events reported to the language server.
760 let fake_server = fake_servers.next().await.unwrap();
761 let file_changes = Arc::new(Mutex::new(Vec::new()));
762 fake_server
763 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
764 registrations: vec![lsp::Registration {
765 id: Default::default(),
766 method: "workspace/didChangeWatchedFiles".to_string(),
767 register_options: serde_json::to_value(
768 lsp::DidChangeWatchedFilesRegistrationOptions {
769 watchers: vec![
770 lsp::FileSystemWatcher {
771 glob_pattern: lsp::GlobPattern::String(
772 "/the-root/Cargo.toml".to_string(),
773 ),
774 kind: None,
775 },
776 lsp::FileSystemWatcher {
777 glob_pattern: lsp::GlobPattern::String(
778 "/the-root/src/*.{rs,c}".to_string(),
779 ),
780 kind: None,
781 },
782 lsp::FileSystemWatcher {
783 glob_pattern: lsp::GlobPattern::String(
784 "/the-root/target/y/**/*.rs".to_string(),
785 ),
786 kind: None,
787 },
788 ],
789 },
790 )
791 .ok(),
792 }],
793 })
794 .await
795 .unwrap();
796 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
797 let file_changes = file_changes.clone();
798 move |params, _| {
799 let mut file_changes = file_changes.lock();
800 file_changes.extend(params.changes);
801 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
802 }
803 });
804
805 cx.executor().run_until_parked();
806 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
807 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
808
809 // Now the language server has asked us to watch an ignored directory path,
810 // so we recursively load it.
811 project.update(cx, |project, cx| {
812 let worktree = project.worktrees(cx).next().unwrap();
813 assert_eq!(
814 worktree
815 .read(cx)
816 .snapshot()
817 .entries(true, 0)
818 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
819 .collect::<Vec<_>>(),
820 &[
821 (Path::new(""), false),
822 (Path::new(".gitignore"), false),
823 (Path::new("src"), false),
824 (Path::new("src/a.rs"), false),
825 (Path::new("src/b.rs"), false),
826 (Path::new("target"), true),
827 (Path::new("target/x"), true),
828 (Path::new("target/y"), true),
829 (Path::new("target/y/out"), true),
830 (Path::new("target/y/out/y.rs"), true),
831 (Path::new("target/z"), true),
832 ]
833 );
834 });
835
836 // Perform some file system mutations, two of which match the watched patterns,
837 // and one of which does not.
838 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
839 .await
840 .unwrap();
841 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
842 .await
843 .unwrap();
844 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
845 .await
846 .unwrap();
847 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
848 .await
849 .unwrap();
850 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
851 .await
852 .unwrap();
853
854 // The language server receives events for the FS mutations that match its watch patterns.
855 cx.executor().run_until_parked();
856 assert_eq!(
857 &*file_changes.lock(),
858 &[
859 lsp::FileEvent {
860 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
861 typ: lsp::FileChangeType::DELETED,
862 },
863 lsp::FileEvent {
864 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
865 typ: lsp::FileChangeType::CREATED,
866 },
867 lsp::FileEvent {
868 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
869 typ: lsp::FileChangeType::CREATED,
870 },
871 ]
872 );
873}
874
875#[gpui::test]
876async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
877 init_test(cx);
878
879 let fs = FakeFs::new(cx.executor());
880 fs.insert_tree(
881 "/dir",
882 json!({
883 "a.rs": "let a = 1;",
884 "b.rs": "let b = 2;"
885 }),
886 )
887 .await;
888
889 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
890
891 let buffer_a = project
892 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
893 .await
894 .unwrap();
895 let buffer_b = project
896 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
897 .await
898 .unwrap();
899
900 project.update(cx, |project, cx| {
901 project
902 .update_diagnostics(
903 LanguageServerId(0),
904 lsp::PublishDiagnosticsParams {
905 uri: Url::from_file_path("/dir/a.rs").unwrap(),
906 version: None,
907 diagnostics: vec![lsp::Diagnostic {
908 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
909 severity: Some(lsp::DiagnosticSeverity::ERROR),
910 message: "error 1".to_string(),
911 ..Default::default()
912 }],
913 },
914 &[],
915 cx,
916 )
917 .unwrap();
918 project
919 .update_diagnostics(
920 LanguageServerId(0),
921 lsp::PublishDiagnosticsParams {
922 uri: Url::from_file_path("/dir/b.rs").unwrap(),
923 version: None,
924 diagnostics: vec![lsp::Diagnostic {
925 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
926 severity: Some(lsp::DiagnosticSeverity::WARNING),
927 message: "error 2".to_string(),
928 ..Default::default()
929 }],
930 },
931 &[],
932 cx,
933 )
934 .unwrap();
935 });
936
937 buffer_a.update(cx, |buffer, _| {
938 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
939 assert_eq!(
940 chunks
941 .iter()
942 .map(|(s, d)| (s.as_str(), *d))
943 .collect::<Vec<_>>(),
944 &[
945 ("let ", None),
946 ("a", Some(DiagnosticSeverity::ERROR)),
947 (" = 1;", None),
948 ]
949 );
950 });
951 buffer_b.update(cx, |buffer, _| {
952 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
953 assert_eq!(
954 chunks
955 .iter()
956 .map(|(s, d)| (s.as_str(), *d))
957 .collect::<Vec<_>>(),
958 &[
959 ("let ", None),
960 ("b", Some(DiagnosticSeverity::WARNING)),
961 (" = 2;", None),
962 ]
963 );
964 });
965}
966
967#[gpui::test]
968async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
969 init_test(cx);
970
971 let fs = FakeFs::new(cx.executor());
972 fs.insert_tree(
973 "/root",
974 json!({
975 "dir": {
976 ".git": {
977 "HEAD": "ref: refs/heads/main",
978 },
979 ".gitignore": "b.rs",
980 "a.rs": "let a = 1;",
981 "b.rs": "let b = 2;",
982 },
983 "other.rs": "let b = c;"
984 }),
985 )
986 .await;
987
988 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
989 let (worktree, _) = project
990 .update(cx, |project, cx| {
991 project.find_or_create_worktree("/root/dir", true, cx)
992 })
993 .await
994 .unwrap();
995 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
996
997 let (worktree, _) = project
998 .update(cx, |project, cx| {
999 project.find_or_create_worktree("/root/other.rs", false, cx)
1000 })
1001 .await
1002 .unwrap();
1003 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1004
1005 let server_id = LanguageServerId(0);
1006 project.update(cx, |project, cx| {
1007 project
1008 .update_diagnostics(
1009 server_id,
1010 lsp::PublishDiagnosticsParams {
1011 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1012 version: None,
1013 diagnostics: vec![lsp::Diagnostic {
1014 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1015 severity: Some(lsp::DiagnosticSeverity::ERROR),
1016 message: "unused variable 'b'".to_string(),
1017 ..Default::default()
1018 }],
1019 },
1020 &[],
1021 cx,
1022 )
1023 .unwrap();
1024 project
1025 .update_diagnostics(
1026 server_id,
1027 lsp::PublishDiagnosticsParams {
1028 uri: Url::from_file_path("/root/other.rs").unwrap(),
1029 version: None,
1030 diagnostics: vec![lsp::Diagnostic {
1031 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1032 severity: Some(lsp::DiagnosticSeverity::ERROR),
1033 message: "unknown variable 'c'".to_string(),
1034 ..Default::default()
1035 }],
1036 },
1037 &[],
1038 cx,
1039 )
1040 .unwrap();
1041 });
1042
1043 let main_ignored_buffer = project
1044 .update(cx, |project, cx| {
1045 project.open_buffer((main_worktree_id, "b.rs"), cx)
1046 })
1047 .await
1048 .unwrap();
1049 main_ignored_buffer.update(cx, |buffer, _| {
1050 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1051 assert_eq!(
1052 chunks
1053 .iter()
1054 .map(|(s, d)| (s.as_str(), *d))
1055 .collect::<Vec<_>>(),
1056 &[
1057 ("let ", None),
1058 ("b", Some(DiagnosticSeverity::ERROR)),
1059 (" = 2;", None),
1060 ],
1061 "Gigitnored buffers should still get in-buffer diagnostics",
1062 );
1063 });
1064 let other_buffer = project
1065 .update(cx, |project, cx| {
1066 project.open_buffer((other_worktree_id, ""), cx)
1067 })
1068 .await
1069 .unwrap();
1070 other_buffer.update(cx, |buffer, _| {
1071 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1072 assert_eq!(
1073 chunks
1074 .iter()
1075 .map(|(s, d)| (s.as_str(), *d))
1076 .collect::<Vec<_>>(),
1077 &[
1078 ("let b = ", None),
1079 ("c", Some(DiagnosticSeverity::ERROR)),
1080 (";", None),
1081 ],
1082 "Buffers from hidden projects should still get in-buffer diagnostics"
1083 );
1084 });
1085
1086 project.update(cx, |project, cx| {
1087 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1088 assert_eq!(
1089 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1090 vec![(
1091 ProjectPath {
1092 worktree_id: main_worktree_id,
1093 path: Arc::from(Path::new("b.rs")),
1094 },
1095 server_id,
1096 DiagnosticSummary {
1097 error_count: 1,
1098 warning_count: 0,
1099 }
1100 )]
1101 );
1102 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1103 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1104 });
1105}
1106
1107#[gpui::test]
1108async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1109 init_test(cx);
1110
1111 let progress_token = "the-progress-token";
1112
1113 let fs = FakeFs::new(cx.executor());
1114 fs.insert_tree(
1115 "/dir",
1116 json!({
1117 "a.rs": "fn a() { A }",
1118 "b.rs": "const y: i32 = 1",
1119 }),
1120 )
1121 .await;
1122
1123 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1124 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1125
1126 language_registry.add(rust_lang());
1127 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1128 "Rust",
1129 FakeLspAdapter {
1130 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1131 disk_based_diagnostics_sources: vec!["disk".into()],
1132 ..Default::default()
1133 },
1134 );
1135
1136 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1137
1138 // Cause worktree to start the fake language server
1139 let _buffer = project
1140 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1141 .await
1142 .unwrap();
1143
1144 let mut events = cx.events(&project);
1145
1146 let fake_server = fake_servers.next().await.unwrap();
1147 assert_eq!(
1148 events.next().await.unwrap(),
1149 Event::LanguageServerAdded(LanguageServerId(0)),
1150 );
1151
1152 fake_server
1153 .start_progress(format!("{}/0", progress_token))
1154 .await;
1155 assert_eq!(
1156 events.next().await.unwrap(),
1157 Event::DiskBasedDiagnosticsStarted {
1158 language_server_id: LanguageServerId(0),
1159 }
1160 );
1161
1162 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1163 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1164 version: None,
1165 diagnostics: vec![lsp::Diagnostic {
1166 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1167 severity: Some(lsp::DiagnosticSeverity::ERROR),
1168 message: "undefined variable 'A'".to_string(),
1169 ..Default::default()
1170 }],
1171 });
1172 assert_eq!(
1173 events.next().await.unwrap(),
1174 Event::DiagnosticsUpdated {
1175 language_server_id: LanguageServerId(0),
1176 path: (worktree_id, Path::new("a.rs")).into()
1177 }
1178 );
1179
1180 fake_server.end_progress(format!("{}/0", progress_token));
1181 assert_eq!(
1182 events.next().await.unwrap(),
1183 Event::DiskBasedDiagnosticsFinished {
1184 language_server_id: LanguageServerId(0)
1185 }
1186 );
1187
1188 let buffer = project
1189 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1190 .await
1191 .unwrap();
1192
1193 buffer.update(cx, |buffer, _| {
1194 let snapshot = buffer.snapshot();
1195 let diagnostics = snapshot
1196 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1197 .collect::<Vec<_>>();
1198 assert_eq!(
1199 diagnostics,
1200 &[DiagnosticEntry {
1201 range: Point::new(0, 9)..Point::new(0, 10),
1202 diagnostic: Diagnostic {
1203 severity: lsp::DiagnosticSeverity::ERROR,
1204 message: "undefined variable 'A'".to_string(),
1205 group_id: 0,
1206 is_primary: true,
1207 ..Default::default()
1208 }
1209 }]
1210 )
1211 });
1212
1213 // Ensure publishing empty diagnostics twice only results in one update event.
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: None,
1217 diagnostics: Default::default(),
1218 });
1219 assert_eq!(
1220 events.next().await.unwrap(),
1221 Event::DiagnosticsUpdated {
1222 language_server_id: LanguageServerId(0),
1223 path: (worktree_id, Path::new("a.rs")).into()
1224 }
1225 );
1226
1227 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1228 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1229 version: None,
1230 diagnostics: Default::default(),
1231 });
1232 cx.executor().run_until_parked();
1233 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1234}
1235
1236#[gpui::test]
1237async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1238 init_test(cx);
1239
1240 let progress_token = "the-progress-token";
1241
1242 let fs = FakeFs::new(cx.executor());
1243 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1244
1245 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1246
1247 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1248 language_registry.add(rust_lang());
1249 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1250 "Rust",
1251 FakeLspAdapter {
1252 name: "the-language-server",
1253 disk_based_diagnostics_sources: vec!["disk".into()],
1254 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1255 ..Default::default()
1256 },
1257 );
1258
1259 let buffer = project
1260 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1261 .await
1262 .unwrap();
1263
1264 // Simulate diagnostics starting to update.
1265 let fake_server = fake_servers.next().await.unwrap();
1266 fake_server.start_progress(progress_token).await;
1267
1268 // Restart the server before the diagnostics finish updating.
1269 project.update(cx, |project, cx| {
1270 project.restart_language_servers_for_buffers([buffer], cx);
1271 });
1272 let mut events = cx.events(&project);
1273
1274 // Simulate the newly started server sending more diagnostics.
1275 let fake_server = fake_servers.next().await.unwrap();
1276 assert_eq!(
1277 events.next().await.unwrap(),
1278 Event::LanguageServerAdded(LanguageServerId(1))
1279 );
1280 fake_server.start_progress(progress_token).await;
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::DiskBasedDiagnosticsStarted {
1284 language_server_id: LanguageServerId(1)
1285 }
1286 );
1287 project.update(cx, |project, _| {
1288 assert_eq!(
1289 project
1290 .language_servers_running_disk_based_diagnostics()
1291 .collect::<Vec<_>>(),
1292 [LanguageServerId(1)]
1293 );
1294 });
1295
1296 // All diagnostics are considered done, despite the old server's diagnostic
1297 // task never completing.
1298 fake_server.end_progress(progress_token);
1299 assert_eq!(
1300 events.next().await.unwrap(),
1301 Event::DiskBasedDiagnosticsFinished {
1302 language_server_id: LanguageServerId(1)
1303 }
1304 );
1305 project.update(cx, |project, _| {
1306 assert_eq!(
1307 project
1308 .language_servers_running_disk_based_diagnostics()
1309 .collect::<Vec<_>>(),
1310 [] as [language::LanguageServerId; 0]
1311 );
1312 });
1313}
1314
1315#[gpui::test]
1316async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1317 init_test(cx);
1318
1319 let fs = FakeFs::new(cx.executor());
1320 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1321
1322 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1323
1324 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1325 language_registry.add(rust_lang());
1326 let mut fake_servers =
1327 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1328
1329 let buffer = project
1330 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1331 .await
1332 .unwrap();
1333
1334 // Publish diagnostics
1335 let fake_server = fake_servers.next().await.unwrap();
1336 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1337 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1338 version: None,
1339 diagnostics: vec![lsp::Diagnostic {
1340 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1341 severity: Some(lsp::DiagnosticSeverity::ERROR),
1342 message: "the message".to_string(),
1343 ..Default::default()
1344 }],
1345 });
1346
1347 cx.executor().run_until_parked();
1348 buffer.update(cx, |buffer, _| {
1349 assert_eq!(
1350 buffer
1351 .snapshot()
1352 .diagnostics_in_range::<_, usize>(0..1, false)
1353 .map(|entry| entry.diagnostic.message.clone())
1354 .collect::<Vec<_>>(),
1355 ["the message".to_string()]
1356 );
1357 });
1358 project.update(cx, |project, cx| {
1359 assert_eq!(
1360 project.diagnostic_summary(false, cx),
1361 DiagnosticSummary {
1362 error_count: 1,
1363 warning_count: 0,
1364 }
1365 );
1366 });
1367
1368 project.update(cx, |project, cx| {
1369 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1370 });
1371
1372 // The diagnostics are cleared.
1373 cx.executor().run_until_parked();
1374 buffer.update(cx, |buffer, _| {
1375 assert_eq!(
1376 buffer
1377 .snapshot()
1378 .diagnostics_in_range::<_, usize>(0..1, false)
1379 .map(|entry| entry.diagnostic.message.clone())
1380 .collect::<Vec<_>>(),
1381 Vec::<String>::new(),
1382 );
1383 });
1384 project.update(cx, |project, cx| {
1385 assert_eq!(
1386 project.diagnostic_summary(false, cx),
1387 DiagnosticSummary {
1388 error_count: 0,
1389 warning_count: 0,
1390 }
1391 );
1392 });
1393}
1394
1395#[gpui::test]
1396async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1397 init_test(cx);
1398
1399 let fs = FakeFs::new(cx.executor());
1400 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1401
1402 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1403 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1404
1405 language_registry.add(rust_lang());
1406 let mut fake_servers =
1407 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1408
1409 let buffer = project
1410 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1411 .await
1412 .unwrap();
1413
1414 // Before restarting the server, report diagnostics with an unknown buffer version.
1415 let fake_server = fake_servers.next().await.unwrap();
1416 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1417 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1418 version: Some(10000),
1419 diagnostics: Vec::new(),
1420 });
1421 cx.executor().run_until_parked();
1422
1423 project.update(cx, |project, cx| {
1424 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1425 });
1426 let mut fake_server = fake_servers.next().await.unwrap();
1427 let notification = fake_server
1428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1429 .await
1430 .text_document;
1431 assert_eq!(notification.version, 0);
1432}
1433
1434#[gpui::test]
1435async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1436 init_test(cx);
1437
1438 let progress_token = "the-progress-token";
1439
1440 let fs = FakeFs::new(cx.executor());
1441 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1442
1443 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1444
1445 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1446 language_registry.add(rust_lang());
1447 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1448 "Rust",
1449 FakeLspAdapter {
1450 name: "the-language-server",
1451 disk_based_diagnostics_sources: vec!["disk".into()],
1452 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1453 ..Default::default()
1454 },
1455 );
1456
1457 let buffer = project
1458 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1459 .await
1460 .unwrap();
1461
1462 // Simulate diagnostics starting to update.
1463 let mut fake_server = fake_servers.next().await.unwrap();
1464 fake_server
1465 .start_progress_with(
1466 "another-token",
1467 lsp::WorkDoneProgressBegin {
1468 cancellable: Some(false),
1469 ..Default::default()
1470 },
1471 )
1472 .await;
1473 fake_server
1474 .start_progress_with(
1475 progress_token,
1476 lsp::WorkDoneProgressBegin {
1477 cancellable: Some(true),
1478 ..Default::default()
1479 },
1480 )
1481 .await;
1482 cx.executor().run_until_parked();
1483
1484 project.update(cx, |project, cx| {
1485 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1486 });
1487
1488 let cancel_notification = fake_server
1489 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1490 .await;
1491 assert_eq!(
1492 cancel_notification.token,
1493 NumberOrString::String(progress_token.into())
1494 );
1495}
1496
1497#[gpui::test]
1498async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1499 init_test(cx);
1500
1501 let fs = FakeFs::new(cx.executor());
1502 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1503 .await;
1504
1505 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1506 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1507
1508 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1509 "Rust",
1510 FakeLspAdapter {
1511 name: "rust-lsp",
1512 ..Default::default()
1513 },
1514 );
1515 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1516 "JavaScript",
1517 FakeLspAdapter {
1518 name: "js-lsp",
1519 ..Default::default()
1520 },
1521 );
1522 language_registry.add(rust_lang());
1523 language_registry.add(js_lang());
1524
1525 let _rs_buffer = project
1526 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1527 .await
1528 .unwrap();
1529 let _js_buffer = project
1530 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1531 .await
1532 .unwrap();
1533
1534 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1535 assert_eq!(
1536 fake_rust_server_1
1537 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1538 .await
1539 .text_document
1540 .uri
1541 .as_str(),
1542 "file:///dir/a.rs"
1543 );
1544
1545 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1546 assert_eq!(
1547 fake_js_server
1548 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1549 .await
1550 .text_document
1551 .uri
1552 .as_str(),
1553 "file:///dir/b.js"
1554 );
1555
1556 // Disable Rust language server, ensuring only that server gets stopped.
1557 cx.update(|cx| {
1558 SettingsStore::update_global(cx, |settings, cx| {
1559 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1560 settings.languages.insert(
1561 Arc::from("Rust"),
1562 LanguageSettingsContent {
1563 enable_language_server: Some(false),
1564 ..Default::default()
1565 },
1566 );
1567 });
1568 })
1569 });
1570 fake_rust_server_1
1571 .receive_notification::<lsp::notification::Exit>()
1572 .await;
1573
1574 // Enable Rust and disable JavaScript language servers, ensuring that the
1575 // former gets started again and that the latter stops.
1576 cx.update(|cx| {
1577 SettingsStore::update_global(cx, |settings, cx| {
1578 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1579 settings.languages.insert(
1580 Arc::from("Rust"),
1581 LanguageSettingsContent {
1582 enable_language_server: Some(true),
1583 ..Default::default()
1584 },
1585 );
1586 settings.languages.insert(
1587 Arc::from("JavaScript"),
1588 LanguageSettingsContent {
1589 enable_language_server: Some(false),
1590 ..Default::default()
1591 },
1592 );
1593 });
1594 })
1595 });
1596 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1597 assert_eq!(
1598 fake_rust_server_2
1599 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1600 .await
1601 .text_document
1602 .uri
1603 .as_str(),
1604 "file:///dir/a.rs"
1605 );
1606 fake_js_server
1607 .receive_notification::<lsp::notification::Exit>()
1608 .await;
1609}
1610
1611#[gpui::test(iterations = 3)]
1612async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1613 init_test(cx);
1614
1615 let text = "
1616 fn a() { A }
1617 fn b() { BB }
1618 fn c() { CCC }
1619 "
1620 .unindent();
1621
1622 let fs = FakeFs::new(cx.executor());
1623 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1624
1625 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1626 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1627
1628 language_registry.add(rust_lang());
1629 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1630 "Rust",
1631 FakeLspAdapter {
1632 disk_based_diagnostics_sources: vec!["disk".into()],
1633 ..Default::default()
1634 },
1635 );
1636
1637 let buffer = project
1638 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1639 .await
1640 .unwrap();
1641
1642 let mut fake_server = fake_servers.next().await.unwrap();
1643 let open_notification = fake_server
1644 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1645 .await;
1646
1647 // Edit the buffer, moving the content down
1648 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1649 let change_notification_1 = fake_server
1650 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1651 .await;
1652 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1653
1654 // Report some diagnostics for the initial version of the buffer
1655 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1656 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1657 version: Some(open_notification.text_document.version),
1658 diagnostics: vec![
1659 lsp::Diagnostic {
1660 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1661 severity: Some(DiagnosticSeverity::ERROR),
1662 message: "undefined variable 'A'".to_string(),
1663 source: Some("disk".to_string()),
1664 ..Default::default()
1665 },
1666 lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1668 severity: Some(DiagnosticSeverity::ERROR),
1669 message: "undefined variable 'BB'".to_string(),
1670 source: Some("disk".to_string()),
1671 ..Default::default()
1672 },
1673 lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1675 severity: Some(DiagnosticSeverity::ERROR),
1676 source: Some("disk".to_string()),
1677 message: "undefined variable 'CCC'".to_string(),
1678 ..Default::default()
1679 },
1680 ],
1681 });
1682
1683 // The diagnostics have moved down since they were created.
1684 cx.executor().run_until_parked();
1685 buffer.update(cx, |buffer, _| {
1686 assert_eq!(
1687 buffer
1688 .snapshot()
1689 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1690 .collect::<Vec<_>>(),
1691 &[
1692 DiagnosticEntry {
1693 range: Point::new(3, 9)..Point::new(3, 11),
1694 diagnostic: Diagnostic {
1695 source: Some("disk".into()),
1696 severity: DiagnosticSeverity::ERROR,
1697 message: "undefined variable 'BB'".to_string(),
1698 is_disk_based: true,
1699 group_id: 1,
1700 is_primary: true,
1701 ..Default::default()
1702 },
1703 },
1704 DiagnosticEntry {
1705 range: Point::new(4, 9)..Point::new(4, 12),
1706 diagnostic: Diagnostic {
1707 source: Some("disk".into()),
1708 severity: DiagnosticSeverity::ERROR,
1709 message: "undefined variable 'CCC'".to_string(),
1710 is_disk_based: true,
1711 group_id: 2,
1712 is_primary: true,
1713 ..Default::default()
1714 }
1715 }
1716 ]
1717 );
1718 assert_eq!(
1719 chunks_with_diagnostics(buffer, 0..buffer.len()),
1720 [
1721 ("\n\nfn a() { ".to_string(), None),
1722 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1723 (" }\nfn b() { ".to_string(), None),
1724 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1725 (" }\nfn c() { ".to_string(), None),
1726 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1727 (" }\n".to_string(), None),
1728 ]
1729 );
1730 assert_eq!(
1731 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1732 [
1733 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1734 (" }\nfn c() { ".to_string(), None),
1735 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1736 ]
1737 );
1738 });
1739
1740 // Ensure overlapping diagnostics are highlighted correctly.
1741 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1742 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1743 version: Some(open_notification.text_document.version),
1744 diagnostics: vec![
1745 lsp::Diagnostic {
1746 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1747 severity: Some(DiagnosticSeverity::ERROR),
1748 message: "undefined variable 'A'".to_string(),
1749 source: Some("disk".to_string()),
1750 ..Default::default()
1751 },
1752 lsp::Diagnostic {
1753 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1754 severity: Some(DiagnosticSeverity::WARNING),
1755 message: "unreachable statement".to_string(),
1756 source: Some("disk".to_string()),
1757 ..Default::default()
1758 },
1759 ],
1760 });
1761
1762 cx.executor().run_until_parked();
1763 buffer.update(cx, |buffer, _| {
1764 assert_eq!(
1765 buffer
1766 .snapshot()
1767 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1768 .collect::<Vec<_>>(),
1769 &[
1770 DiagnosticEntry {
1771 range: Point::new(2, 9)..Point::new(2, 12),
1772 diagnostic: Diagnostic {
1773 source: Some("disk".into()),
1774 severity: DiagnosticSeverity::WARNING,
1775 message: "unreachable statement".to_string(),
1776 is_disk_based: true,
1777 group_id: 4,
1778 is_primary: true,
1779 ..Default::default()
1780 }
1781 },
1782 DiagnosticEntry {
1783 range: Point::new(2, 9)..Point::new(2, 10),
1784 diagnostic: Diagnostic {
1785 source: Some("disk".into()),
1786 severity: DiagnosticSeverity::ERROR,
1787 message: "undefined variable 'A'".to_string(),
1788 is_disk_based: true,
1789 group_id: 3,
1790 is_primary: true,
1791 ..Default::default()
1792 },
1793 }
1794 ]
1795 );
1796 assert_eq!(
1797 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1798 [
1799 ("fn a() { ".to_string(), None),
1800 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1801 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1802 ("\n".to_string(), None),
1803 ]
1804 );
1805 assert_eq!(
1806 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1807 [
1808 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1809 ("\n".to_string(), None),
1810 ]
1811 );
1812 });
1813
1814 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1815 // changes since the last save.
1816 buffer.update(cx, |buffer, cx| {
1817 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1818 buffer.edit(
1819 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1820 None,
1821 cx,
1822 );
1823 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1824 });
1825 let change_notification_2 = fake_server
1826 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1827 .await;
1828 assert!(
1829 change_notification_2.text_document.version > change_notification_1.text_document.version
1830 );
1831
1832 // Handle out-of-order diagnostics
1833 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1834 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1835 version: Some(change_notification_2.text_document.version),
1836 diagnostics: vec![
1837 lsp::Diagnostic {
1838 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1839 severity: Some(DiagnosticSeverity::ERROR),
1840 message: "undefined variable 'BB'".to_string(),
1841 source: Some("disk".to_string()),
1842 ..Default::default()
1843 },
1844 lsp::Diagnostic {
1845 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1846 severity: Some(DiagnosticSeverity::WARNING),
1847 message: "undefined variable 'A'".to_string(),
1848 source: Some("disk".to_string()),
1849 ..Default::default()
1850 },
1851 ],
1852 });
1853
1854 cx.executor().run_until_parked();
1855 buffer.update(cx, |buffer, _| {
1856 assert_eq!(
1857 buffer
1858 .snapshot()
1859 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1860 .collect::<Vec<_>>(),
1861 &[
1862 DiagnosticEntry {
1863 range: Point::new(2, 21)..Point::new(2, 22),
1864 diagnostic: Diagnostic {
1865 source: Some("disk".into()),
1866 severity: DiagnosticSeverity::WARNING,
1867 message: "undefined variable 'A'".to_string(),
1868 is_disk_based: true,
1869 group_id: 6,
1870 is_primary: true,
1871 ..Default::default()
1872 }
1873 },
1874 DiagnosticEntry {
1875 range: Point::new(3, 9)..Point::new(3, 14),
1876 diagnostic: Diagnostic {
1877 source: Some("disk".into()),
1878 severity: DiagnosticSeverity::ERROR,
1879 message: "undefined variable 'BB'".to_string(),
1880 is_disk_based: true,
1881 group_id: 5,
1882 is_primary: true,
1883 ..Default::default()
1884 },
1885 }
1886 ]
1887 );
1888 });
1889}
1890
1891#[gpui::test]
1892async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1893 init_test(cx);
1894
1895 let text = concat!(
1896 "let one = ;\n", //
1897 "let two = \n",
1898 "let three = 3;\n",
1899 );
1900
1901 let fs = FakeFs::new(cx.executor());
1902 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1903
1904 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1905 let buffer = project
1906 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1907 .await
1908 .unwrap();
1909
1910 project.update(cx, |project, cx| {
1911 project
1912 .update_buffer_diagnostics(
1913 &buffer,
1914 LanguageServerId(0),
1915 None,
1916 vec![
1917 DiagnosticEntry {
1918 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1919 diagnostic: Diagnostic {
1920 severity: DiagnosticSeverity::ERROR,
1921 message: "syntax error 1".to_string(),
1922 ..Default::default()
1923 },
1924 },
1925 DiagnosticEntry {
1926 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1927 diagnostic: Diagnostic {
1928 severity: DiagnosticSeverity::ERROR,
1929 message: "syntax error 2".to_string(),
1930 ..Default::default()
1931 },
1932 },
1933 ],
1934 cx,
1935 )
1936 .unwrap();
1937 });
1938
1939 // An empty range is extended forward to include the following character.
1940 // At the end of a line, an empty range is extended backward to include
1941 // the preceding character.
1942 buffer.update(cx, |buffer, _| {
1943 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1944 assert_eq!(
1945 chunks
1946 .iter()
1947 .map(|(s, d)| (s.as_str(), *d))
1948 .collect::<Vec<_>>(),
1949 &[
1950 ("let one = ", None),
1951 (";", Some(DiagnosticSeverity::ERROR)),
1952 ("\nlet two =", None),
1953 (" ", Some(DiagnosticSeverity::ERROR)),
1954 ("\nlet three = 3;\n", None)
1955 ]
1956 );
1957 });
1958}
1959
1960#[gpui::test]
1961async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1962 init_test(cx);
1963
1964 let fs = FakeFs::new(cx.executor());
1965 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1966 .await;
1967
1968 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1969
1970 project.update(cx, |project, cx| {
1971 project
1972 .update_diagnostic_entries(
1973 LanguageServerId(0),
1974 Path::new("/dir/a.rs").to_owned(),
1975 None,
1976 vec![DiagnosticEntry {
1977 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1978 diagnostic: Diagnostic {
1979 severity: DiagnosticSeverity::ERROR,
1980 is_primary: true,
1981 message: "syntax error a1".to_string(),
1982 ..Default::default()
1983 },
1984 }],
1985 cx,
1986 )
1987 .unwrap();
1988 project
1989 .update_diagnostic_entries(
1990 LanguageServerId(1),
1991 Path::new("/dir/a.rs").to_owned(),
1992 None,
1993 vec![DiagnosticEntry {
1994 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1995 diagnostic: Diagnostic {
1996 severity: DiagnosticSeverity::ERROR,
1997 is_primary: true,
1998 message: "syntax error b1".to_string(),
1999 ..Default::default()
2000 },
2001 }],
2002 cx,
2003 )
2004 .unwrap();
2005
2006 assert_eq!(
2007 project.diagnostic_summary(false, cx),
2008 DiagnosticSummary {
2009 error_count: 2,
2010 warning_count: 0,
2011 }
2012 );
2013 });
2014}
2015
2016#[gpui::test]
2017async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2018 init_test(cx);
2019
2020 let text = "
2021 fn a() {
2022 f1();
2023 }
2024 fn b() {
2025 f2();
2026 }
2027 fn c() {
2028 f3();
2029 }
2030 "
2031 .unindent();
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 "/dir",
2036 json!({
2037 "a.rs": text.clone(),
2038 }),
2039 )
2040 .await;
2041
2042 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2043
2044 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2045 language_registry.add(rust_lang());
2046 let mut fake_servers =
2047 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2048
2049 let buffer = project
2050 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2051 .await
2052 .unwrap();
2053
2054 let mut fake_server = fake_servers.next().await.unwrap();
2055 let lsp_document_version = fake_server
2056 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2057 .await
2058 .text_document
2059 .version;
2060
2061 // Simulate editing the buffer after the language server computes some edits.
2062 buffer.update(cx, |buffer, cx| {
2063 buffer.edit(
2064 [(
2065 Point::new(0, 0)..Point::new(0, 0),
2066 "// above first function\n",
2067 )],
2068 None,
2069 cx,
2070 );
2071 buffer.edit(
2072 [(
2073 Point::new(2, 0)..Point::new(2, 0),
2074 " // inside first function\n",
2075 )],
2076 None,
2077 cx,
2078 );
2079 buffer.edit(
2080 [(
2081 Point::new(6, 4)..Point::new(6, 4),
2082 "// inside second function ",
2083 )],
2084 None,
2085 cx,
2086 );
2087
2088 assert_eq!(
2089 buffer.text(),
2090 "
2091 // above first function
2092 fn a() {
2093 // inside first function
2094 f1();
2095 }
2096 fn b() {
2097 // inside second function f2();
2098 }
2099 fn c() {
2100 f3();
2101 }
2102 "
2103 .unindent()
2104 );
2105 });
2106
2107 let edits = project
2108 .update(cx, |project, cx| {
2109 project.edits_from_lsp(
2110 &buffer,
2111 vec![
2112 // replace body of first function
2113 lsp::TextEdit {
2114 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2115 new_text: "
2116 fn a() {
2117 f10();
2118 }
2119 "
2120 .unindent(),
2121 },
2122 // edit inside second function
2123 lsp::TextEdit {
2124 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2125 new_text: "00".into(),
2126 },
2127 // edit inside third function via two distinct edits
2128 lsp::TextEdit {
2129 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2130 new_text: "4000".into(),
2131 },
2132 lsp::TextEdit {
2133 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2134 new_text: "".into(),
2135 },
2136 ],
2137 LanguageServerId(0),
2138 Some(lsp_document_version),
2139 cx,
2140 )
2141 })
2142 .await
2143 .unwrap();
2144
2145 buffer.update(cx, |buffer, cx| {
2146 for (range, new_text) in edits {
2147 buffer.edit([(range, new_text)], None, cx);
2148 }
2149 assert_eq!(
2150 buffer.text(),
2151 "
2152 // above first function
2153 fn a() {
2154 // inside first function
2155 f10();
2156 }
2157 fn b() {
2158 // inside second function f200();
2159 }
2160 fn c() {
2161 f4000();
2162 }
2163 "
2164 .unindent()
2165 );
2166 });
2167}
2168
2169#[gpui::test]
2170async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2171 init_test(cx);
2172
2173 let text = "
2174 use a::b;
2175 use a::c;
2176
2177 fn f() {
2178 b();
2179 c();
2180 }
2181 "
2182 .unindent();
2183
2184 let fs = FakeFs::new(cx.executor());
2185 fs.insert_tree(
2186 "/dir",
2187 json!({
2188 "a.rs": text.clone(),
2189 }),
2190 )
2191 .await;
2192
2193 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2194 let buffer = project
2195 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2196 .await
2197 .unwrap();
2198
2199 // Simulate the language server sending us a small edit in the form of a very large diff.
2200 // Rust-analyzer does this when performing a merge-imports code action.
2201 let edits = project
2202 .update(cx, |project, cx| {
2203 project.edits_from_lsp(
2204 &buffer,
2205 [
2206 // Replace the first use statement without editing the semicolon.
2207 lsp::TextEdit {
2208 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2209 new_text: "a::{b, c}".into(),
2210 },
2211 // Reinsert the remainder of the file between the semicolon and the final
2212 // newline of the file.
2213 lsp::TextEdit {
2214 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2215 new_text: "\n\n".into(),
2216 },
2217 lsp::TextEdit {
2218 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2219 new_text: "
2220 fn f() {
2221 b();
2222 c();
2223 }"
2224 .unindent(),
2225 },
2226 // Delete everything after the first newline of the file.
2227 lsp::TextEdit {
2228 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2229 new_text: "".into(),
2230 },
2231 ],
2232 LanguageServerId(0),
2233 None,
2234 cx,
2235 )
2236 })
2237 .await
2238 .unwrap();
2239
2240 buffer.update(cx, |buffer, cx| {
2241 let edits = edits
2242 .into_iter()
2243 .map(|(range, text)| {
2244 (
2245 range.start.to_point(buffer)..range.end.to_point(buffer),
2246 text,
2247 )
2248 })
2249 .collect::<Vec<_>>();
2250
2251 assert_eq!(
2252 edits,
2253 [
2254 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2255 (Point::new(1, 0)..Point::new(2, 0), "".into())
2256 ]
2257 );
2258
2259 for (range, new_text) in edits {
2260 buffer.edit([(range, new_text)], None, cx);
2261 }
2262 assert_eq!(
2263 buffer.text(),
2264 "
2265 use a::{b, c};
2266
2267 fn f() {
2268 b();
2269 c();
2270 }
2271 "
2272 .unindent()
2273 );
2274 });
2275}
2276
2277#[gpui::test]
2278async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let text = "
2282 use a::b;
2283 use a::c;
2284
2285 fn f() {
2286 b();
2287 c();
2288 }
2289 "
2290 .unindent();
2291
2292 let fs = FakeFs::new(cx.executor());
2293 fs.insert_tree(
2294 "/dir",
2295 json!({
2296 "a.rs": text.clone(),
2297 }),
2298 )
2299 .await;
2300
2301 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2302 let buffer = project
2303 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2304 .await
2305 .unwrap();
2306
2307 // Simulate the language server sending us edits in a non-ordered fashion,
2308 // with ranges sometimes being inverted or pointing to invalid locations.
2309 let edits = project
2310 .update(cx, |project, cx| {
2311 project.edits_from_lsp(
2312 &buffer,
2313 [
2314 lsp::TextEdit {
2315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2316 new_text: "\n\n".into(),
2317 },
2318 lsp::TextEdit {
2319 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2320 new_text: "a::{b, c}".into(),
2321 },
2322 lsp::TextEdit {
2323 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2324 new_text: "".into(),
2325 },
2326 lsp::TextEdit {
2327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2328 new_text: "
2329 fn f() {
2330 b();
2331 c();
2332 }"
2333 .unindent(),
2334 },
2335 ],
2336 LanguageServerId(0),
2337 None,
2338 cx,
2339 )
2340 })
2341 .await
2342 .unwrap();
2343
2344 buffer.update(cx, |buffer, cx| {
2345 let edits = edits
2346 .into_iter()
2347 .map(|(range, text)| {
2348 (
2349 range.start.to_point(buffer)..range.end.to_point(buffer),
2350 text,
2351 )
2352 })
2353 .collect::<Vec<_>>();
2354
2355 assert_eq!(
2356 edits,
2357 [
2358 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2359 (Point::new(1, 0)..Point::new(2, 0), "".into())
2360 ]
2361 );
2362
2363 for (range, new_text) in edits {
2364 buffer.edit([(range, new_text)], None, cx);
2365 }
2366 assert_eq!(
2367 buffer.text(),
2368 "
2369 use a::{b, c};
2370
2371 fn f() {
2372 b();
2373 c();
2374 }
2375 "
2376 .unindent()
2377 );
2378 });
2379}
2380
2381fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2382 buffer: &Buffer,
2383 range: Range<T>,
2384) -> Vec<(String, Option<DiagnosticSeverity>)> {
2385 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2386 for chunk in buffer.snapshot().chunks(range, true) {
2387 if chunks.last().map_or(false, |prev_chunk| {
2388 prev_chunk.1 == chunk.diagnostic_severity
2389 }) {
2390 chunks.last_mut().unwrap().0.push_str(chunk.text);
2391 } else {
2392 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2393 }
2394 }
2395 chunks
2396}
2397
2398#[gpui::test(iterations = 10)]
2399async fn test_definition(cx: &mut gpui::TestAppContext) {
2400 init_test(cx);
2401
2402 let fs = FakeFs::new(cx.executor());
2403 fs.insert_tree(
2404 "/dir",
2405 json!({
2406 "a.rs": "const fn a() { A }",
2407 "b.rs": "const y: i32 = crate::a()",
2408 }),
2409 )
2410 .await;
2411
2412 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2413
2414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2415 language_registry.add(rust_lang());
2416 let mut fake_servers =
2417 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2418
2419 let buffer = project
2420 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2421 .await
2422 .unwrap();
2423
2424 let fake_server = fake_servers.next().await.unwrap();
2425 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2426 let params = params.text_document_position_params;
2427 assert_eq!(
2428 params.text_document.uri.to_file_path().unwrap(),
2429 Path::new("/dir/b.rs"),
2430 );
2431 assert_eq!(params.position, lsp::Position::new(0, 22));
2432
2433 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2434 lsp::Location::new(
2435 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2436 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2437 ),
2438 )))
2439 });
2440
2441 let mut definitions = project
2442 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2443 .await
2444 .unwrap();
2445
2446 // Assert no new language server started
2447 cx.executor().run_until_parked();
2448 assert!(fake_servers.try_next().is_err());
2449
2450 assert_eq!(definitions.len(), 1);
2451 let definition = definitions.pop().unwrap();
2452 cx.update(|cx| {
2453 let target_buffer = definition.target.buffer.read(cx);
2454 assert_eq!(
2455 target_buffer
2456 .file()
2457 .unwrap()
2458 .as_local()
2459 .unwrap()
2460 .abs_path(cx),
2461 Path::new("/dir/a.rs"),
2462 );
2463 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2464 assert_eq!(
2465 list_worktrees(&project, cx),
2466 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2467 );
2468
2469 drop(definition);
2470 });
2471 cx.update(|cx| {
2472 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2473 });
2474
2475 fn list_worktrees<'a>(
2476 project: &'a Model<Project>,
2477 cx: &'a AppContext,
2478 ) -> Vec<(&'a Path, bool)> {
2479 project
2480 .read(cx)
2481 .worktrees(cx)
2482 .map(|worktree| {
2483 let worktree = worktree.read(cx);
2484 (
2485 worktree.as_local().unwrap().abs_path().as_ref(),
2486 worktree.is_visible(),
2487 )
2488 })
2489 .collect::<Vec<_>>()
2490 }
2491}
2492
2493#[gpui::test]
2494async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2495 init_test(cx);
2496
2497 let fs = FakeFs::new(cx.executor());
2498 fs.insert_tree(
2499 "/dir",
2500 json!({
2501 "a.ts": "",
2502 }),
2503 )
2504 .await;
2505
2506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2507
2508 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2509 language_registry.add(typescript_lang());
2510 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2511 "TypeScript",
2512 FakeLspAdapter {
2513 capabilities: lsp::ServerCapabilities {
2514 completion_provider: Some(lsp::CompletionOptions {
2515 trigger_characters: Some(vec![":".to_string()]),
2516 ..Default::default()
2517 }),
2518 ..Default::default()
2519 },
2520 ..Default::default()
2521 },
2522 );
2523
2524 let buffer = project
2525 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2526 .await
2527 .unwrap();
2528
2529 let fake_server = fake_language_servers.next().await.unwrap();
2530
2531 let text = "let a = b.fqn";
2532 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2533 let completions = project.update(cx, |project, cx| {
2534 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2535 });
2536
2537 fake_server
2538 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2539 Ok(Some(lsp::CompletionResponse::Array(vec![
2540 lsp::CompletionItem {
2541 label: "fullyQualifiedName?".into(),
2542 insert_text: Some("fullyQualifiedName".into()),
2543 ..Default::default()
2544 },
2545 ])))
2546 })
2547 .next()
2548 .await;
2549 let completions = completions.await.unwrap();
2550 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2551 assert_eq!(completions.len(), 1);
2552 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2553 assert_eq!(
2554 completions[0].old_range.to_offset(&snapshot),
2555 text.len() - 3..text.len()
2556 );
2557
2558 let text = "let a = \"atoms/cmp\"";
2559 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2560 let completions = project.update(cx, |project, cx| {
2561 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2562 });
2563
2564 fake_server
2565 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2566 Ok(Some(lsp::CompletionResponse::Array(vec![
2567 lsp::CompletionItem {
2568 label: "component".into(),
2569 ..Default::default()
2570 },
2571 ])))
2572 })
2573 .next()
2574 .await;
2575 let completions = completions.await.unwrap();
2576 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2577 assert_eq!(completions.len(), 1);
2578 assert_eq!(completions[0].new_text, "component");
2579 assert_eq!(
2580 completions[0].old_range.to_offset(&snapshot),
2581 text.len() - 4..text.len() - 1
2582 );
2583}
2584
2585#[gpui::test]
2586async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2587 init_test(cx);
2588
2589 let fs = FakeFs::new(cx.executor());
2590 fs.insert_tree(
2591 "/dir",
2592 json!({
2593 "a.ts": "",
2594 }),
2595 )
2596 .await;
2597
2598 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2599
2600 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2601 language_registry.add(typescript_lang());
2602 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2603 "TypeScript",
2604 FakeLspAdapter {
2605 capabilities: lsp::ServerCapabilities {
2606 completion_provider: Some(lsp::CompletionOptions {
2607 trigger_characters: Some(vec![":".to_string()]),
2608 ..Default::default()
2609 }),
2610 ..Default::default()
2611 },
2612 ..Default::default()
2613 },
2614 );
2615
2616 let buffer = project
2617 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2618 .await
2619 .unwrap();
2620
2621 let fake_server = fake_language_servers.next().await.unwrap();
2622
2623 let text = "let a = b.fqn";
2624 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2625 let completions = project.update(cx, |project, cx| {
2626 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2627 });
2628
2629 fake_server
2630 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2631 Ok(Some(lsp::CompletionResponse::Array(vec![
2632 lsp::CompletionItem {
2633 label: "fullyQualifiedName?".into(),
2634 insert_text: Some("fully\rQualified\r\nName".into()),
2635 ..Default::default()
2636 },
2637 ])))
2638 })
2639 .next()
2640 .await;
2641 let completions = completions.await.unwrap();
2642 assert_eq!(completions.len(), 1);
2643 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2644}
2645
2646#[gpui::test(iterations = 10)]
2647async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2648 init_test(cx);
2649
2650 let fs = FakeFs::new(cx.executor());
2651 fs.insert_tree(
2652 "/dir",
2653 json!({
2654 "a.ts": "a",
2655 }),
2656 )
2657 .await;
2658
2659 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2660
2661 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2662 language_registry.add(typescript_lang());
2663 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2664 "TypeScript",
2665 FakeLspAdapter {
2666 capabilities: lsp::ServerCapabilities {
2667 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2668 lsp::CodeActionOptions {
2669 resolve_provider: Some(true),
2670 ..lsp::CodeActionOptions::default()
2671 },
2672 )),
2673 ..lsp::ServerCapabilities::default()
2674 },
2675 ..FakeLspAdapter::default()
2676 },
2677 );
2678
2679 let buffer = project
2680 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2681 .await
2682 .unwrap();
2683
2684 let fake_server = fake_language_servers.next().await.unwrap();
2685
2686 // Language server returns code actions that contain commands, and not edits.
2687 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2688 fake_server
2689 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2690 Ok(Some(vec![
2691 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2692 title: "The code action".into(),
2693 data: Some(serde_json::json!({
2694 "command": "_the/command",
2695 })),
2696 ..lsp::CodeAction::default()
2697 }),
2698 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2699 title: "two".into(),
2700 ..lsp::CodeAction::default()
2701 }),
2702 ]))
2703 })
2704 .next()
2705 .await;
2706
2707 let action = actions.await[0].clone();
2708 let apply = project.update(cx, |project, cx| {
2709 project.apply_code_action(buffer.clone(), action, true, cx)
2710 });
2711
2712 // Resolving the code action does not populate its edits. In absence of
2713 // edits, we must execute the given command.
2714 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2715 |mut action, _| async move {
2716 if action.data.is_some() {
2717 action.command = Some(lsp::Command {
2718 title: "The command".into(),
2719 command: "_the/command".into(),
2720 arguments: Some(vec![json!("the-argument")]),
2721 });
2722 }
2723 Ok(action)
2724 },
2725 );
2726
2727 // While executing the command, the language server sends the editor
2728 // a `workspaceEdit` request.
2729 fake_server
2730 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2731 let fake = fake_server.clone();
2732 move |params, _| {
2733 assert_eq!(params.command, "_the/command");
2734 let fake = fake.clone();
2735 async move {
2736 fake.server
2737 .request::<lsp::request::ApplyWorkspaceEdit>(
2738 lsp::ApplyWorkspaceEditParams {
2739 label: None,
2740 edit: lsp::WorkspaceEdit {
2741 changes: Some(
2742 [(
2743 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2744 vec![lsp::TextEdit {
2745 range: lsp::Range::new(
2746 lsp::Position::new(0, 0),
2747 lsp::Position::new(0, 0),
2748 ),
2749 new_text: "X".into(),
2750 }],
2751 )]
2752 .into_iter()
2753 .collect(),
2754 ),
2755 ..Default::default()
2756 },
2757 },
2758 )
2759 .await
2760 .unwrap();
2761 Ok(Some(json!(null)))
2762 }
2763 }
2764 })
2765 .next()
2766 .await;
2767
2768 // Applying the code action returns a project transaction containing the edits
2769 // sent by the language server in its `workspaceEdit` request.
2770 let transaction = apply.await.unwrap();
2771 assert!(transaction.0.contains_key(&buffer));
2772 buffer.update(cx, |buffer, cx| {
2773 assert_eq!(buffer.text(), "Xa");
2774 buffer.undo(cx);
2775 assert_eq!(buffer.text(), "a");
2776 });
2777}
2778
2779#[gpui::test(iterations = 10)]
2780async fn test_save_file(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree(
2785 "/dir",
2786 json!({
2787 "file1": "the old contents",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2793 let buffer = project
2794 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2795 .await
2796 .unwrap();
2797 buffer.update(cx, |buffer, cx| {
2798 assert_eq!(buffer.text(), "the old contents");
2799 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2800 });
2801
2802 project
2803 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2804 .await
2805 .unwrap();
2806
2807 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2808 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2809}
2810
2811#[gpui::test(iterations = 30)]
2812async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2813 init_test(cx);
2814
2815 let fs = FakeFs::new(cx.executor().clone());
2816 fs.insert_tree(
2817 "/dir",
2818 json!({
2819 "file1": "the original contents",
2820 }),
2821 )
2822 .await;
2823
2824 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2825 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2826 let buffer = project
2827 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2828 .await
2829 .unwrap();
2830
2831 // Simulate buffer diffs being slow, so that they don't complete before
2832 // the next file change occurs.
2833 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2834
2835 // Change the buffer's file on disk, and then wait for the file change
2836 // to be detected by the worktree, so that the buffer starts reloading.
2837 fs.save(
2838 "/dir/file1".as_ref(),
2839 &"the first contents".into(),
2840 Default::default(),
2841 )
2842 .await
2843 .unwrap();
2844 worktree.next_event(cx).await;
2845
2846 // Change the buffer's file again. Depending on the random seed, the
2847 // previous file change may still be in progress.
2848 fs.save(
2849 "/dir/file1".as_ref(),
2850 &"the second contents".into(),
2851 Default::default(),
2852 )
2853 .await
2854 .unwrap();
2855 worktree.next_event(cx).await;
2856
2857 cx.executor().run_until_parked();
2858 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2859 buffer.read_with(cx, |buffer, _| {
2860 assert_eq!(buffer.text(), on_disk_text);
2861 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2862 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2863 });
2864}
2865
2866#[gpui::test(iterations = 30)]
2867async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2868 init_test(cx);
2869
2870 let fs = FakeFs::new(cx.executor().clone());
2871 fs.insert_tree(
2872 "/dir",
2873 json!({
2874 "file1": "the original contents",
2875 }),
2876 )
2877 .await;
2878
2879 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2880 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2881 let buffer = project
2882 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2883 .await
2884 .unwrap();
2885
2886 // Simulate buffer diffs being slow, so that they don't complete before
2887 // the next file change occurs.
2888 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2889
2890 // Change the buffer's file on disk, and then wait for the file change
2891 // to be detected by the worktree, so that the buffer starts reloading.
2892 fs.save(
2893 "/dir/file1".as_ref(),
2894 &"the first contents".into(),
2895 Default::default(),
2896 )
2897 .await
2898 .unwrap();
2899 worktree.next_event(cx).await;
2900
2901 cx.executor()
2902 .spawn(cx.executor().simulate_random_delay())
2903 .await;
2904
2905 // Perform a noop edit, causing the buffer's version to increase.
2906 buffer.update(cx, |buffer, cx| {
2907 buffer.edit([(0..0, " ")], None, cx);
2908 buffer.undo(cx);
2909 });
2910
2911 cx.executor().run_until_parked();
2912 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2913 buffer.read_with(cx, |buffer, _| {
2914 let buffer_text = buffer.text();
2915 if buffer_text == on_disk_text {
2916 assert!(
2917 !buffer.is_dirty() && !buffer.has_conflict(),
2918 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2919 );
2920 }
2921 // If the file change occurred while the buffer was processing the first
2922 // change, the buffer will be in a conflicting state.
2923 else {
2924 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2925 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2926 }
2927 });
2928}
2929
2930#[gpui::test]
2931async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2932 init_test(cx);
2933
2934 let fs = FakeFs::new(cx.executor());
2935 fs.insert_tree(
2936 "/dir",
2937 json!({
2938 "file1": "the old contents",
2939 }),
2940 )
2941 .await;
2942
2943 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2944 let buffer = project
2945 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2946 .await
2947 .unwrap();
2948 buffer.update(cx, |buffer, cx| {
2949 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2950 });
2951
2952 project
2953 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2954 .await
2955 .unwrap();
2956
2957 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2958 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2959}
2960
2961#[gpui::test]
2962async fn test_save_as(cx: &mut gpui::TestAppContext) {
2963 init_test(cx);
2964
2965 let fs = FakeFs::new(cx.executor());
2966 fs.insert_tree("/dir", json!({})).await;
2967
2968 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2969
2970 let languages = project.update(cx, |project, _| project.languages().clone());
2971 languages.add(rust_lang());
2972
2973 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2974 buffer.update(cx, |buffer, cx| {
2975 buffer.edit([(0..0, "abc")], None, cx);
2976 assert!(buffer.is_dirty());
2977 assert!(!buffer.has_conflict());
2978 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2979 });
2980 project
2981 .update(cx, |project, cx| {
2982 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
2983 let path = ProjectPath {
2984 worktree_id,
2985 path: Arc::from(Path::new("file1.rs")),
2986 };
2987 project.save_buffer_as(buffer.clone(), path, cx)
2988 })
2989 .await
2990 .unwrap();
2991 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2992
2993 cx.executor().run_until_parked();
2994 buffer.update(cx, |buffer, cx| {
2995 assert_eq!(
2996 buffer.file().unwrap().full_path(cx),
2997 Path::new("dir/file1.rs")
2998 );
2999 assert!(!buffer.is_dirty());
3000 assert!(!buffer.has_conflict());
3001 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
3002 });
3003
3004 let opened_buffer = project
3005 .update(cx, |project, cx| {
3006 project.open_local_buffer("/dir/file1.rs", cx)
3007 })
3008 .await
3009 .unwrap();
3010 assert_eq!(opened_buffer, buffer);
3011}
3012
3013#[gpui::test(retries = 5)]
3014async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3015 use worktree::WorktreeModelHandle as _;
3016
3017 init_test(cx);
3018 cx.executor().allow_parking();
3019
3020 let dir = temp_tree(json!({
3021 "a": {
3022 "file1": "",
3023 "file2": "",
3024 "file3": "",
3025 },
3026 "b": {
3027 "c": {
3028 "file4": "",
3029 "file5": "",
3030 }
3031 }
3032 }));
3033
3034 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3035
3036 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3037 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3038 async move { buffer.await.unwrap() }
3039 };
3040 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3041 project.update(cx, |project, cx| {
3042 let tree = project.worktrees(cx).next().unwrap();
3043 tree.read(cx)
3044 .entry_for_path(path)
3045 .unwrap_or_else(|| panic!("no entry for path {}", path))
3046 .id
3047 })
3048 };
3049
3050 let buffer2 = buffer_for_path("a/file2", cx).await;
3051 let buffer3 = buffer_for_path("a/file3", cx).await;
3052 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3053 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3054
3055 let file2_id = id_for_path("a/file2", cx);
3056 let file3_id = id_for_path("a/file3", cx);
3057 let file4_id = id_for_path("b/c/file4", cx);
3058
3059 // Create a remote copy of this worktree.
3060 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3061 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3062
3063 let updates = Arc::new(Mutex::new(Vec::new()));
3064 tree.update(cx, |tree, cx| {
3065 let updates = updates.clone();
3066 tree.observe_updates(0, cx, move |update| {
3067 updates.lock().push(update);
3068 async { true }
3069 });
3070 });
3071
3072 let remote =
3073 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3074
3075 cx.executor().run_until_parked();
3076
3077 cx.update(|cx| {
3078 assert!(!buffer2.read(cx).is_dirty());
3079 assert!(!buffer3.read(cx).is_dirty());
3080 assert!(!buffer4.read(cx).is_dirty());
3081 assert!(!buffer5.read(cx).is_dirty());
3082 });
3083
3084 // Rename and delete files and directories.
3085 tree.flush_fs_events(cx).await;
3086 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3087 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3088 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3089 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3090 tree.flush_fs_events(cx).await;
3091
3092 let expected_paths = vec![
3093 "a",
3094 "a/file1",
3095 "a/file2.new",
3096 "b",
3097 "d",
3098 "d/file3",
3099 "d/file4",
3100 ];
3101
3102 cx.update(|app| {
3103 assert_eq!(
3104 tree.read(app)
3105 .paths()
3106 .map(|p| p.to_str().unwrap())
3107 .collect::<Vec<_>>(),
3108 expected_paths
3109 );
3110 });
3111
3112 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3113 assert_eq!(id_for_path("d/file3", cx), file3_id);
3114 assert_eq!(id_for_path("d/file4", cx), file4_id);
3115
3116 cx.update(|cx| {
3117 assert_eq!(
3118 buffer2.read(cx).file().unwrap().path().as_ref(),
3119 Path::new("a/file2.new")
3120 );
3121 assert_eq!(
3122 buffer3.read(cx).file().unwrap().path().as_ref(),
3123 Path::new("d/file3")
3124 );
3125 assert_eq!(
3126 buffer4.read(cx).file().unwrap().path().as_ref(),
3127 Path::new("d/file4")
3128 );
3129 assert_eq!(
3130 buffer5.read(cx).file().unwrap().path().as_ref(),
3131 Path::new("b/c/file5")
3132 );
3133
3134 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3135 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3136 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3137 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3138 });
3139
3140 // Update the remote worktree. Check that it becomes consistent with the
3141 // local worktree.
3142 cx.executor().run_until_parked();
3143
3144 remote.update(cx, |remote, _| {
3145 for update in updates.lock().drain(..) {
3146 remote.as_remote_mut().unwrap().update_from_remote(update);
3147 }
3148 });
3149 cx.executor().run_until_parked();
3150 remote.update(cx, |remote, _| {
3151 assert_eq!(
3152 remote
3153 .paths()
3154 .map(|p| p.to_str().unwrap())
3155 .collect::<Vec<_>>(),
3156 expected_paths
3157 );
3158 });
3159}
3160
3161#[gpui::test(iterations = 10)]
3162async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3163 init_test(cx);
3164
3165 let fs = FakeFs::new(cx.executor());
3166 fs.insert_tree(
3167 "/dir",
3168 json!({
3169 "a": {
3170 "file1": "",
3171 }
3172 }),
3173 )
3174 .await;
3175
3176 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3177 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3178 let tree_id = tree.update(cx, |tree, _| tree.id());
3179
3180 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3181 project.update(cx, |project, cx| {
3182 let tree = project.worktrees(cx).next().unwrap();
3183 tree.read(cx)
3184 .entry_for_path(path)
3185 .unwrap_or_else(|| panic!("no entry for path {}", path))
3186 .id
3187 })
3188 };
3189
3190 let dir_id = id_for_path("a", cx);
3191 let file_id = id_for_path("a/file1", cx);
3192 let buffer = project
3193 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3194 .await
3195 .unwrap();
3196 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3197
3198 project
3199 .update(cx, |project, cx| {
3200 project.rename_entry(dir_id, Path::new("b"), cx)
3201 })
3202 .unwrap()
3203 .await
3204 .to_included()
3205 .unwrap();
3206 cx.executor().run_until_parked();
3207
3208 assert_eq!(id_for_path("b", cx), dir_id);
3209 assert_eq!(id_for_path("b/file1", cx), file_id);
3210 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3211}
3212
3213#[gpui::test]
3214async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3215 init_test(cx);
3216
3217 let fs = FakeFs::new(cx.executor());
3218 fs.insert_tree(
3219 "/dir",
3220 json!({
3221 "a.txt": "a-contents",
3222 "b.txt": "b-contents",
3223 }),
3224 )
3225 .await;
3226
3227 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3228
3229 // Spawn multiple tasks to open paths, repeating some paths.
3230 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3231 (
3232 p.open_local_buffer("/dir/a.txt", cx),
3233 p.open_local_buffer("/dir/b.txt", cx),
3234 p.open_local_buffer("/dir/a.txt", cx),
3235 )
3236 });
3237
3238 let buffer_a_1 = buffer_a_1.await.unwrap();
3239 let buffer_a_2 = buffer_a_2.await.unwrap();
3240 let buffer_b = buffer_b.await.unwrap();
3241 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3242 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3243
3244 // There is only one buffer per path.
3245 let buffer_a_id = buffer_a_1.entity_id();
3246 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3247
3248 // Open the same path again while it is still open.
3249 drop(buffer_a_1);
3250 let buffer_a_3 = project
3251 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3252 .await
3253 .unwrap();
3254
3255 // There's still only one buffer per path.
3256 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3257}
3258
3259#[gpui::test]
3260async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3261 init_test(cx);
3262
3263 let fs = FakeFs::new(cx.executor());
3264 fs.insert_tree(
3265 "/dir",
3266 json!({
3267 "file1": "abc",
3268 "file2": "def",
3269 "file3": "ghi",
3270 }),
3271 )
3272 .await;
3273
3274 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3275
3276 let buffer1 = project
3277 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3278 .await
3279 .unwrap();
3280 let events = Arc::new(Mutex::new(Vec::new()));
3281
3282 // initially, the buffer isn't dirty.
3283 buffer1.update(cx, |buffer, cx| {
3284 cx.subscribe(&buffer1, {
3285 let events = events.clone();
3286 move |_, _, event, _| match event {
3287 BufferEvent::Operation(_) => {}
3288 _ => events.lock().push(event.clone()),
3289 }
3290 })
3291 .detach();
3292
3293 assert!(!buffer.is_dirty());
3294 assert!(events.lock().is_empty());
3295
3296 buffer.edit([(1..2, "")], None, cx);
3297 });
3298
3299 // after the first edit, the buffer is dirty, and emits a dirtied event.
3300 buffer1.update(cx, |buffer, cx| {
3301 assert!(buffer.text() == "ac");
3302 assert!(buffer.is_dirty());
3303 assert_eq!(
3304 *events.lock(),
3305 &[language::Event::Edited, language::Event::DirtyChanged]
3306 );
3307 events.lock().clear();
3308 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3309 });
3310
3311 // after saving, the buffer is not dirty, and emits a saved event.
3312 buffer1.update(cx, |buffer, cx| {
3313 assert!(!buffer.is_dirty());
3314 assert_eq!(*events.lock(), &[language::Event::Saved]);
3315 events.lock().clear();
3316
3317 buffer.edit([(1..1, "B")], None, cx);
3318 buffer.edit([(2..2, "D")], None, cx);
3319 });
3320
3321 // after editing again, the buffer is dirty, and emits another dirty event.
3322 buffer1.update(cx, |buffer, cx| {
3323 assert!(buffer.text() == "aBDc");
3324 assert!(buffer.is_dirty());
3325 assert_eq!(
3326 *events.lock(),
3327 &[
3328 language::Event::Edited,
3329 language::Event::DirtyChanged,
3330 language::Event::Edited,
3331 ],
3332 );
3333 events.lock().clear();
3334
3335 // After restoring the buffer to its previously-saved state,
3336 // the buffer is not considered dirty anymore.
3337 buffer.edit([(1..3, "")], None, cx);
3338 assert!(buffer.text() == "ac");
3339 assert!(!buffer.is_dirty());
3340 });
3341
3342 assert_eq!(
3343 *events.lock(),
3344 &[language::Event::Edited, language::Event::DirtyChanged]
3345 );
3346
3347 // When a file is deleted, the buffer is considered dirty.
3348 let events = Arc::new(Mutex::new(Vec::new()));
3349 let buffer2 = project
3350 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3351 .await
3352 .unwrap();
3353 buffer2.update(cx, |_, cx| {
3354 cx.subscribe(&buffer2, {
3355 let events = events.clone();
3356 move |_, _, event, _| events.lock().push(event.clone())
3357 })
3358 .detach();
3359 });
3360
3361 fs.remove_file("/dir/file2".as_ref(), Default::default())
3362 .await
3363 .unwrap();
3364 cx.executor().run_until_parked();
3365 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3366 assert_eq!(
3367 *events.lock(),
3368 &[
3369 language::Event::DirtyChanged,
3370 language::Event::FileHandleChanged
3371 ]
3372 );
3373
3374 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3375 let events = Arc::new(Mutex::new(Vec::new()));
3376 let buffer3 = project
3377 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3378 .await
3379 .unwrap();
3380 buffer3.update(cx, |_, cx| {
3381 cx.subscribe(&buffer3, {
3382 let events = events.clone();
3383 move |_, _, event, _| events.lock().push(event.clone())
3384 })
3385 .detach();
3386 });
3387
3388 buffer3.update(cx, |buffer, cx| {
3389 buffer.edit([(0..0, "x")], None, cx);
3390 });
3391 events.lock().clear();
3392 fs.remove_file("/dir/file3".as_ref(), Default::default())
3393 .await
3394 .unwrap();
3395 cx.executor().run_until_parked();
3396 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3397 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3398}
3399
3400#[gpui::test]
3401async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3402 init_test(cx);
3403
3404 let initial_contents = "aaa\nbbbbb\nc\n";
3405 let fs = FakeFs::new(cx.executor());
3406 fs.insert_tree(
3407 "/dir",
3408 json!({
3409 "the-file": initial_contents,
3410 }),
3411 )
3412 .await;
3413 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3414 let buffer = project
3415 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3416 .await
3417 .unwrap();
3418
3419 let anchors = (0..3)
3420 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3421 .collect::<Vec<_>>();
3422
3423 // Change the file on disk, adding two new lines of text, and removing
3424 // one line.
3425 buffer.update(cx, |buffer, _| {
3426 assert!(!buffer.is_dirty());
3427 assert!(!buffer.has_conflict());
3428 });
3429 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3430 fs.save(
3431 "/dir/the-file".as_ref(),
3432 &new_contents.into(),
3433 LineEnding::Unix,
3434 )
3435 .await
3436 .unwrap();
3437
3438 // Because the buffer was not modified, it is reloaded from disk. Its
3439 // contents are edited according to the diff between the old and new
3440 // file contents.
3441 cx.executor().run_until_parked();
3442 buffer.update(cx, |buffer, _| {
3443 assert_eq!(buffer.text(), new_contents);
3444 assert!(!buffer.is_dirty());
3445 assert!(!buffer.has_conflict());
3446
3447 let anchor_positions = anchors
3448 .iter()
3449 .map(|anchor| anchor.to_point(&*buffer))
3450 .collect::<Vec<_>>();
3451 assert_eq!(
3452 anchor_positions,
3453 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3454 );
3455 });
3456
3457 // Modify the buffer
3458 buffer.update(cx, |buffer, cx| {
3459 buffer.edit([(0..0, " ")], None, cx);
3460 assert!(buffer.is_dirty());
3461 assert!(!buffer.has_conflict());
3462 });
3463
3464 // Change the file on disk again, adding blank lines to the beginning.
3465 fs.save(
3466 "/dir/the-file".as_ref(),
3467 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3468 LineEnding::Unix,
3469 )
3470 .await
3471 .unwrap();
3472
3473 // Because the buffer is modified, it doesn't reload from disk, but is
3474 // marked as having a conflict.
3475 cx.executor().run_until_parked();
3476 buffer.update(cx, |buffer, _| {
3477 assert!(buffer.has_conflict());
3478 });
3479}
3480
3481#[gpui::test]
3482async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3483 init_test(cx);
3484
3485 let fs = FakeFs::new(cx.executor());
3486 fs.insert_tree(
3487 "/dir",
3488 json!({
3489 "file1": "a\nb\nc\n",
3490 "file2": "one\r\ntwo\r\nthree\r\n",
3491 }),
3492 )
3493 .await;
3494
3495 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3496 let buffer1 = project
3497 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3498 .await
3499 .unwrap();
3500 let buffer2 = project
3501 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3502 .await
3503 .unwrap();
3504
3505 buffer1.update(cx, |buffer, _| {
3506 assert_eq!(buffer.text(), "a\nb\nc\n");
3507 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3508 });
3509 buffer2.update(cx, |buffer, _| {
3510 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3511 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3512 });
3513
3514 // Change a file's line endings on disk from unix to windows. The buffer's
3515 // state updates correctly.
3516 fs.save(
3517 "/dir/file1".as_ref(),
3518 &"aaa\nb\nc\n".into(),
3519 LineEnding::Windows,
3520 )
3521 .await
3522 .unwrap();
3523 cx.executor().run_until_parked();
3524 buffer1.update(cx, |buffer, _| {
3525 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3526 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3527 });
3528
3529 // Save a file with windows line endings. The file is written correctly.
3530 buffer2.update(cx, |buffer, cx| {
3531 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3532 });
3533 project
3534 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3535 .await
3536 .unwrap();
3537 assert_eq!(
3538 fs.load("/dir/file2".as_ref()).await.unwrap(),
3539 "one\r\ntwo\r\nthree\r\nfour\r\n",
3540 );
3541}
3542
3543#[gpui::test]
3544async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3545 init_test(cx);
3546
3547 let fs = FakeFs::new(cx.executor());
3548 fs.insert_tree(
3549 "/the-dir",
3550 json!({
3551 "a.rs": "
3552 fn foo(mut v: Vec<usize>) {
3553 for x in &v {
3554 v.push(1);
3555 }
3556 }
3557 "
3558 .unindent(),
3559 }),
3560 )
3561 .await;
3562
3563 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3564 let buffer = project
3565 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3566 .await
3567 .unwrap();
3568
3569 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3570 let message = lsp::PublishDiagnosticsParams {
3571 uri: buffer_uri.clone(),
3572 diagnostics: vec![
3573 lsp::Diagnostic {
3574 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3575 severity: Some(DiagnosticSeverity::WARNING),
3576 message: "error 1".to_string(),
3577 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3578 location: lsp::Location {
3579 uri: buffer_uri.clone(),
3580 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3581 },
3582 message: "error 1 hint 1".to_string(),
3583 }]),
3584 ..Default::default()
3585 },
3586 lsp::Diagnostic {
3587 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3588 severity: Some(DiagnosticSeverity::HINT),
3589 message: "error 1 hint 1".to_string(),
3590 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3591 location: lsp::Location {
3592 uri: buffer_uri.clone(),
3593 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3594 },
3595 message: "original diagnostic".to_string(),
3596 }]),
3597 ..Default::default()
3598 },
3599 lsp::Diagnostic {
3600 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3601 severity: Some(DiagnosticSeverity::ERROR),
3602 message: "error 2".to_string(),
3603 related_information: Some(vec![
3604 lsp::DiagnosticRelatedInformation {
3605 location: lsp::Location {
3606 uri: buffer_uri.clone(),
3607 range: lsp::Range::new(
3608 lsp::Position::new(1, 13),
3609 lsp::Position::new(1, 15),
3610 ),
3611 },
3612 message: "error 2 hint 1".to_string(),
3613 },
3614 lsp::DiagnosticRelatedInformation {
3615 location: lsp::Location {
3616 uri: buffer_uri.clone(),
3617 range: lsp::Range::new(
3618 lsp::Position::new(1, 13),
3619 lsp::Position::new(1, 15),
3620 ),
3621 },
3622 message: "error 2 hint 2".to_string(),
3623 },
3624 ]),
3625 ..Default::default()
3626 },
3627 lsp::Diagnostic {
3628 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3629 severity: Some(DiagnosticSeverity::HINT),
3630 message: "error 2 hint 1".to_string(),
3631 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3632 location: lsp::Location {
3633 uri: buffer_uri.clone(),
3634 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3635 },
3636 message: "original diagnostic".to_string(),
3637 }]),
3638 ..Default::default()
3639 },
3640 lsp::Diagnostic {
3641 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3642 severity: Some(DiagnosticSeverity::HINT),
3643 message: "error 2 hint 2".to_string(),
3644 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3645 location: lsp::Location {
3646 uri: buffer_uri,
3647 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3648 },
3649 message: "original diagnostic".to_string(),
3650 }]),
3651 ..Default::default()
3652 },
3653 ],
3654 version: None,
3655 };
3656
3657 project
3658 .update(cx, |p, cx| {
3659 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3660 })
3661 .unwrap();
3662 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3663
3664 assert_eq!(
3665 buffer
3666 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3667 .collect::<Vec<_>>(),
3668 &[
3669 DiagnosticEntry {
3670 range: Point::new(1, 8)..Point::new(1, 9),
3671 diagnostic: Diagnostic {
3672 severity: DiagnosticSeverity::WARNING,
3673 message: "error 1".to_string(),
3674 group_id: 1,
3675 is_primary: true,
3676 ..Default::default()
3677 }
3678 },
3679 DiagnosticEntry {
3680 range: Point::new(1, 8)..Point::new(1, 9),
3681 diagnostic: Diagnostic {
3682 severity: DiagnosticSeverity::HINT,
3683 message: "error 1 hint 1".to_string(),
3684 group_id: 1,
3685 is_primary: false,
3686 ..Default::default()
3687 }
3688 },
3689 DiagnosticEntry {
3690 range: Point::new(1, 13)..Point::new(1, 15),
3691 diagnostic: Diagnostic {
3692 severity: DiagnosticSeverity::HINT,
3693 message: "error 2 hint 1".to_string(),
3694 group_id: 0,
3695 is_primary: false,
3696 ..Default::default()
3697 }
3698 },
3699 DiagnosticEntry {
3700 range: Point::new(1, 13)..Point::new(1, 15),
3701 diagnostic: Diagnostic {
3702 severity: DiagnosticSeverity::HINT,
3703 message: "error 2 hint 2".to_string(),
3704 group_id: 0,
3705 is_primary: false,
3706 ..Default::default()
3707 }
3708 },
3709 DiagnosticEntry {
3710 range: Point::new(2, 8)..Point::new(2, 17),
3711 diagnostic: Diagnostic {
3712 severity: DiagnosticSeverity::ERROR,
3713 message: "error 2".to_string(),
3714 group_id: 0,
3715 is_primary: true,
3716 ..Default::default()
3717 }
3718 }
3719 ]
3720 );
3721
3722 assert_eq!(
3723 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3724 &[
3725 DiagnosticEntry {
3726 range: Point::new(1, 13)..Point::new(1, 15),
3727 diagnostic: Diagnostic {
3728 severity: DiagnosticSeverity::HINT,
3729 message: "error 2 hint 1".to_string(),
3730 group_id: 0,
3731 is_primary: false,
3732 ..Default::default()
3733 }
3734 },
3735 DiagnosticEntry {
3736 range: Point::new(1, 13)..Point::new(1, 15),
3737 diagnostic: Diagnostic {
3738 severity: DiagnosticSeverity::HINT,
3739 message: "error 2 hint 2".to_string(),
3740 group_id: 0,
3741 is_primary: false,
3742 ..Default::default()
3743 }
3744 },
3745 DiagnosticEntry {
3746 range: Point::new(2, 8)..Point::new(2, 17),
3747 diagnostic: Diagnostic {
3748 severity: DiagnosticSeverity::ERROR,
3749 message: "error 2".to_string(),
3750 group_id: 0,
3751 is_primary: true,
3752 ..Default::default()
3753 }
3754 }
3755 ]
3756 );
3757
3758 assert_eq!(
3759 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3760 &[
3761 DiagnosticEntry {
3762 range: Point::new(1, 8)..Point::new(1, 9),
3763 diagnostic: Diagnostic {
3764 severity: DiagnosticSeverity::WARNING,
3765 message: "error 1".to_string(),
3766 group_id: 1,
3767 is_primary: true,
3768 ..Default::default()
3769 }
3770 },
3771 DiagnosticEntry {
3772 range: Point::new(1, 8)..Point::new(1, 9),
3773 diagnostic: Diagnostic {
3774 severity: DiagnosticSeverity::HINT,
3775 message: "error 1 hint 1".to_string(),
3776 group_id: 1,
3777 is_primary: false,
3778 ..Default::default()
3779 }
3780 },
3781 ]
3782 );
3783}
3784
3785#[gpui::test]
3786async fn test_rename(cx: &mut gpui::TestAppContext) {
3787 // hi
3788 init_test(cx);
3789
3790 let fs = FakeFs::new(cx.executor());
3791 fs.insert_tree(
3792 "/dir",
3793 json!({
3794 "one.rs": "const ONE: usize = 1;",
3795 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3796 }),
3797 )
3798 .await;
3799
3800 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3801
3802 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3803 language_registry.add(rust_lang());
3804 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3805 "Rust",
3806 FakeLspAdapter {
3807 capabilities: lsp::ServerCapabilities {
3808 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3809 prepare_provider: Some(true),
3810 work_done_progress_options: Default::default(),
3811 })),
3812 ..Default::default()
3813 },
3814 ..Default::default()
3815 },
3816 );
3817
3818 let buffer = project
3819 .update(cx, |project, cx| {
3820 project.open_local_buffer("/dir/one.rs", cx)
3821 })
3822 .await
3823 .unwrap();
3824
3825 let fake_server = fake_servers.next().await.unwrap();
3826
3827 let response = project.update(cx, |project, cx| {
3828 project.prepare_rename(buffer.clone(), 7, cx)
3829 });
3830 fake_server
3831 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3832 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3833 assert_eq!(params.position, lsp::Position::new(0, 7));
3834 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3835 lsp::Position::new(0, 6),
3836 lsp::Position::new(0, 9),
3837 ))))
3838 })
3839 .next()
3840 .await
3841 .unwrap();
3842 let range = response.await.unwrap().unwrap();
3843 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3844 assert_eq!(range, 6..9);
3845
3846 let response = project.update(cx, |project, cx| {
3847 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3848 });
3849 fake_server
3850 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3851 assert_eq!(
3852 params.text_document_position.text_document.uri.as_str(),
3853 "file:///dir/one.rs"
3854 );
3855 assert_eq!(
3856 params.text_document_position.position,
3857 lsp::Position::new(0, 7)
3858 );
3859 assert_eq!(params.new_name, "THREE");
3860 Ok(Some(lsp::WorkspaceEdit {
3861 changes: Some(
3862 [
3863 (
3864 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3865 vec![lsp::TextEdit::new(
3866 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3867 "THREE".to_string(),
3868 )],
3869 ),
3870 (
3871 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3872 vec![
3873 lsp::TextEdit::new(
3874 lsp::Range::new(
3875 lsp::Position::new(0, 24),
3876 lsp::Position::new(0, 27),
3877 ),
3878 "THREE".to_string(),
3879 ),
3880 lsp::TextEdit::new(
3881 lsp::Range::new(
3882 lsp::Position::new(0, 35),
3883 lsp::Position::new(0, 38),
3884 ),
3885 "THREE".to_string(),
3886 ),
3887 ],
3888 ),
3889 ]
3890 .into_iter()
3891 .collect(),
3892 ),
3893 ..Default::default()
3894 }))
3895 })
3896 .next()
3897 .await
3898 .unwrap();
3899 let mut transaction = response.await.unwrap().0;
3900 assert_eq!(transaction.len(), 2);
3901 assert_eq!(
3902 transaction
3903 .remove_entry(&buffer)
3904 .unwrap()
3905 .0
3906 .update(cx, |buffer, _| buffer.text()),
3907 "const THREE: usize = 1;"
3908 );
3909 assert_eq!(
3910 transaction
3911 .into_keys()
3912 .next()
3913 .unwrap()
3914 .update(cx, |buffer, _| buffer.text()),
3915 "const TWO: usize = one::THREE + one::THREE;"
3916 );
3917}
3918
3919#[gpui::test]
3920async fn test_search(cx: &mut gpui::TestAppContext) {
3921 init_test(cx);
3922
3923 let fs = FakeFs::new(cx.executor());
3924 fs.insert_tree(
3925 "/dir",
3926 json!({
3927 "one.rs": "const ONE: usize = 1;",
3928 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3929 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3930 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3931 }),
3932 )
3933 .await;
3934 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3935 assert_eq!(
3936 search(
3937 &project,
3938 SearchQuery::text(
3939 "TWO",
3940 false,
3941 true,
3942 false,
3943 Default::default(),
3944 Default::default(),
3945 None
3946 )
3947 .unwrap(),
3948 cx
3949 )
3950 .await
3951 .unwrap(),
3952 HashMap::from_iter([
3953 ("dir/two.rs".to_string(), vec![6..9]),
3954 ("dir/three.rs".to_string(), vec![37..40])
3955 ])
3956 );
3957
3958 let buffer_4 = project
3959 .update(cx, |project, cx| {
3960 project.open_local_buffer("/dir/four.rs", cx)
3961 })
3962 .await
3963 .unwrap();
3964 buffer_4.update(cx, |buffer, cx| {
3965 let text = "two::TWO";
3966 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3967 });
3968
3969 assert_eq!(
3970 search(
3971 &project,
3972 SearchQuery::text(
3973 "TWO",
3974 false,
3975 true,
3976 false,
3977 Default::default(),
3978 Default::default(),
3979 None,
3980 )
3981 .unwrap(),
3982 cx
3983 )
3984 .await
3985 .unwrap(),
3986 HashMap::from_iter([
3987 ("dir/two.rs".to_string(), vec![6..9]),
3988 ("dir/three.rs".to_string(), vec![37..40]),
3989 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3990 ])
3991 );
3992}
3993
3994#[gpui::test]
3995async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3996 init_test(cx);
3997
3998 let search_query = "file";
3999
4000 let fs = FakeFs::new(cx.executor());
4001 fs.insert_tree(
4002 "/dir",
4003 json!({
4004 "one.rs": r#"// Rust file one"#,
4005 "one.ts": r#"// TypeScript file one"#,
4006 "two.rs": r#"// Rust file two"#,
4007 "two.ts": r#"// TypeScript file two"#,
4008 }),
4009 )
4010 .await;
4011 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4012
4013 assert!(
4014 search(
4015 &project,
4016 SearchQuery::text(
4017 search_query,
4018 false,
4019 true,
4020 false,
4021 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4022 Default::default(),
4023 None
4024 )
4025 .unwrap(),
4026 cx
4027 )
4028 .await
4029 .unwrap()
4030 .is_empty(),
4031 "If no inclusions match, no files should be returned"
4032 );
4033
4034 assert_eq!(
4035 search(
4036 &project,
4037 SearchQuery::text(
4038 search_query,
4039 false,
4040 true,
4041 false,
4042 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4043 Default::default(),
4044 None
4045 )
4046 .unwrap(),
4047 cx
4048 )
4049 .await
4050 .unwrap(),
4051 HashMap::from_iter([
4052 ("dir/one.rs".to_string(), vec![8..12]),
4053 ("dir/two.rs".to_string(), vec![8..12]),
4054 ]),
4055 "Rust only search should give only Rust files"
4056 );
4057
4058 assert_eq!(
4059 search(
4060 &project,
4061 SearchQuery::text(
4062 search_query,
4063 false,
4064 true,
4065 false,
4066
4067 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4068
4069 Default::default(),
4070 None,
4071 ).unwrap(),
4072 cx
4073 )
4074 .await
4075 .unwrap(),
4076 HashMap::from_iter([
4077 ("dir/one.ts".to_string(), vec![14..18]),
4078 ("dir/two.ts".to_string(), vec![14..18]),
4079 ]),
4080 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4081 );
4082
4083 assert_eq!(
4084 search(
4085 &project,
4086 SearchQuery::text(
4087 search_query,
4088 false,
4089 true,
4090 false,
4091
4092 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4093
4094 Default::default(),
4095 None,
4096 ).unwrap(),
4097 cx
4098 )
4099 .await
4100 .unwrap(),
4101 HashMap::from_iter([
4102 ("dir/two.ts".to_string(), vec![14..18]),
4103 ("dir/one.rs".to_string(), vec![8..12]),
4104 ("dir/one.ts".to_string(), vec![14..18]),
4105 ("dir/two.rs".to_string(), vec![8..12]),
4106 ]),
4107 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4108 );
4109}
4110
4111#[gpui::test]
4112async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4113 init_test(cx);
4114
4115 let search_query = "file";
4116
4117 let fs = FakeFs::new(cx.executor());
4118 fs.insert_tree(
4119 "/dir",
4120 json!({
4121 "one.rs": r#"// Rust file one"#,
4122 "one.ts": r#"// TypeScript file one"#,
4123 "two.rs": r#"// Rust file two"#,
4124 "two.ts": r#"// TypeScript file two"#,
4125 }),
4126 )
4127 .await;
4128 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4129
4130 assert_eq!(
4131 search(
4132 &project,
4133 SearchQuery::text(
4134 search_query,
4135 false,
4136 true,
4137 false,
4138 Default::default(),
4139 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4140 None,
4141 )
4142 .unwrap(),
4143 cx
4144 )
4145 .await
4146 .unwrap(),
4147 HashMap::from_iter([
4148 ("dir/one.rs".to_string(), vec![8..12]),
4149 ("dir/one.ts".to_string(), vec![14..18]),
4150 ("dir/two.rs".to_string(), vec![8..12]),
4151 ("dir/two.ts".to_string(), vec![14..18]),
4152 ]),
4153 "If no exclusions match, all files should be returned"
4154 );
4155
4156 assert_eq!(
4157 search(
4158 &project,
4159 SearchQuery::text(
4160 search_query,
4161 false,
4162 true,
4163 false,
4164 Default::default(),
4165 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4166 None,
4167 )
4168 .unwrap(),
4169 cx
4170 )
4171 .await
4172 .unwrap(),
4173 HashMap::from_iter([
4174 ("dir/one.ts".to_string(), vec![14..18]),
4175 ("dir/two.ts".to_string(), vec![14..18]),
4176 ]),
4177 "Rust exclusion search should give only TypeScript files"
4178 );
4179
4180 assert_eq!(
4181 search(
4182 &project,
4183 SearchQuery::text(
4184 search_query,
4185 false,
4186 true,
4187 false,
4188 Default::default(),
4189
4190 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4191 None,
4192
4193 ).unwrap(),
4194 cx
4195 )
4196 .await
4197 .unwrap(),
4198 HashMap::from_iter([
4199 ("dir/one.rs".to_string(), vec![8..12]),
4200 ("dir/two.rs".to_string(), vec![8..12]),
4201 ]),
4202 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4203 );
4204
4205 assert!(
4206 search(
4207 &project,
4208 SearchQuery::text(
4209 search_query,
4210 false,
4211 true,
4212 false,
4213 Default::default(),
4214
4215 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4216 None,
4217
4218 ).unwrap(),
4219 cx
4220 )
4221 .await
4222 .unwrap().is_empty(),
4223 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4224 );
4225}
4226
4227#[gpui::test]
4228async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4229 init_test(cx);
4230
4231 let search_query = "file";
4232
4233 let fs = FakeFs::new(cx.executor());
4234 fs.insert_tree(
4235 "/dir",
4236 json!({
4237 "one.rs": r#"// Rust file one"#,
4238 "one.ts": r#"// TypeScript file one"#,
4239 "two.rs": r#"// Rust file two"#,
4240 "two.ts": r#"// TypeScript file two"#,
4241 }),
4242 )
4243 .await;
4244 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4245
4246 assert!(
4247 search(
4248 &project,
4249 SearchQuery::text(
4250 search_query,
4251 false,
4252 true,
4253 false,
4254 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4255 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4256 None,
4257 )
4258 .unwrap(),
4259 cx
4260 )
4261 .await
4262 .unwrap()
4263 .is_empty(),
4264 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4265 );
4266
4267 assert!(
4268 search(
4269 &project,
4270 SearchQuery::text(
4271 search_query,
4272 false,
4273 true,
4274 false,
4275 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4276 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4277 None,
4278 ).unwrap(),
4279 cx
4280 )
4281 .await
4282 .unwrap()
4283 .is_empty(),
4284 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4285 );
4286
4287 assert!(
4288 search(
4289 &project,
4290 SearchQuery::text(
4291 search_query,
4292 false,
4293 true,
4294 false,
4295 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4296 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4297 None,
4298 )
4299 .unwrap(),
4300 cx
4301 )
4302 .await
4303 .unwrap()
4304 .is_empty(),
4305 "Non-matching inclusions and exclusions should not change that."
4306 );
4307
4308 assert_eq!(
4309 search(
4310 &project,
4311 SearchQuery::text(
4312 search_query,
4313 false,
4314 true,
4315 false,
4316 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4317 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4318 None,
4319 )
4320 .unwrap(),
4321 cx
4322 )
4323 .await
4324 .unwrap(),
4325 HashMap::from_iter([
4326 ("dir/one.ts".to_string(), vec![14..18]),
4327 ("dir/two.ts".to_string(), vec![14..18]),
4328 ]),
4329 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4330 );
4331}
4332
4333#[gpui::test]
4334async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4335 init_test(cx);
4336
4337 let fs = FakeFs::new(cx.executor());
4338 fs.insert_tree(
4339 "/worktree-a",
4340 json!({
4341 "haystack.rs": r#"// NEEDLE"#,
4342 "haystack.ts": r#"// NEEDLE"#,
4343 }),
4344 )
4345 .await;
4346 fs.insert_tree(
4347 "/worktree-b",
4348 json!({
4349 "haystack.rs": r#"// NEEDLE"#,
4350 "haystack.ts": r#"// NEEDLE"#,
4351 }),
4352 )
4353 .await;
4354
4355 let project = Project::test(
4356 fs.clone(),
4357 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4358 cx,
4359 )
4360 .await;
4361
4362 assert_eq!(
4363 search(
4364 &project,
4365 SearchQuery::text(
4366 "NEEDLE",
4367 false,
4368 true,
4369 false,
4370 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4371 Default::default(),
4372 None,
4373 )
4374 .unwrap(),
4375 cx
4376 )
4377 .await
4378 .unwrap(),
4379 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4380 "should only return results from included worktree"
4381 );
4382 assert_eq!(
4383 search(
4384 &project,
4385 SearchQuery::text(
4386 "NEEDLE",
4387 false,
4388 true,
4389 false,
4390 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4391 Default::default(),
4392 None,
4393 )
4394 .unwrap(),
4395 cx
4396 )
4397 .await
4398 .unwrap(),
4399 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4400 "should only return results from included worktree"
4401 );
4402
4403 assert_eq!(
4404 search(
4405 &project,
4406 SearchQuery::text(
4407 "NEEDLE",
4408 false,
4409 true,
4410 false,
4411 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4412 Default::default(),
4413 None,
4414 )
4415 .unwrap(),
4416 cx
4417 )
4418 .await
4419 .unwrap(),
4420 HashMap::from_iter([
4421 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4422 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4423 ]),
4424 "should return results from both worktrees"
4425 );
4426}
4427
4428#[gpui::test]
4429async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4430 init_test(cx);
4431
4432 let fs = FakeFs::new(cx.background_executor.clone());
4433 fs.insert_tree(
4434 "/dir",
4435 json!({
4436 ".git": {},
4437 ".gitignore": "**/target\n/node_modules\n",
4438 "target": {
4439 "index.txt": "index_key:index_value"
4440 },
4441 "node_modules": {
4442 "eslint": {
4443 "index.ts": "const eslint_key = 'eslint value'",
4444 "package.json": r#"{ "some_key": "some value" }"#,
4445 },
4446 "prettier": {
4447 "index.ts": "const prettier_key = 'prettier value'",
4448 "package.json": r#"{ "other_key": "other value" }"#,
4449 },
4450 },
4451 "package.json": r#"{ "main_key": "main value" }"#,
4452 }),
4453 )
4454 .await;
4455 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4456
4457 let query = "key";
4458 assert_eq!(
4459 search(
4460 &project,
4461 SearchQuery::text(
4462 query,
4463 false,
4464 false,
4465 false,
4466 Default::default(),
4467 Default::default(),
4468 None,
4469 )
4470 .unwrap(),
4471 cx
4472 )
4473 .await
4474 .unwrap(),
4475 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4476 "Only one non-ignored file should have the query"
4477 );
4478
4479 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4480 assert_eq!(
4481 search(
4482 &project,
4483 SearchQuery::text(
4484 query,
4485 false,
4486 false,
4487 true,
4488 Default::default(),
4489 Default::default(),
4490 None,
4491 )
4492 .unwrap(),
4493 cx
4494 )
4495 .await
4496 .unwrap(),
4497 HashMap::from_iter([
4498 ("dir/package.json".to_string(), vec![8..11]),
4499 ("dir/target/index.txt".to_string(), vec![6..9]),
4500 (
4501 "dir/node_modules/prettier/package.json".to_string(),
4502 vec![9..12]
4503 ),
4504 (
4505 "dir/node_modules/prettier/index.ts".to_string(),
4506 vec![15..18]
4507 ),
4508 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4509 (
4510 "dir/node_modules/eslint/package.json".to_string(),
4511 vec![8..11]
4512 ),
4513 ]),
4514 "Unrestricted search with ignored directories should find every file with the query"
4515 );
4516
4517 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4518 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4519 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4520 assert_eq!(
4521 search(
4522 &project,
4523 SearchQuery::text(
4524 query,
4525 false,
4526 false,
4527 true,
4528 files_to_include,
4529 files_to_exclude,
4530 None,
4531 )
4532 .unwrap(),
4533 cx
4534 )
4535 .await
4536 .unwrap(),
4537 HashMap::from_iter([(
4538 "dir/node_modules/prettier/package.json".to_string(),
4539 vec![9..12]
4540 )]),
4541 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4542 );
4543}
4544
4545#[gpui::test]
4546async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4547 init_test(cx);
4548
4549 let fs = FakeFs::new(cx.background_executor.clone());
4550 fs.insert_tree(
4551 "/dir",
4552 json!({
4553 ".git": {},
4554 ".gitignore": "**/target\n/node_modules\n",
4555 "aaa.txt": "key:value",
4556 "bbb": {
4557 "index.txt": "index_key:index_value"
4558 },
4559 "node_modules": {
4560 "10 eleven": "key",
4561 "1 two": "key"
4562 },
4563 }),
4564 )
4565 .await;
4566 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4567
4568 let mut search = project.update(cx, |project, cx| {
4569 project.search(
4570 SearchQuery::text(
4571 "key",
4572 false,
4573 false,
4574 true,
4575 Default::default(),
4576 Default::default(),
4577 None,
4578 )
4579 .unwrap(),
4580 cx,
4581 )
4582 });
4583
4584 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4585 match search_result.unwrap() {
4586 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4587 buffer.file().unwrap().path().to_string_lossy().to_string()
4588 }),
4589 _ => panic!("Expected buffer"),
4590 }
4591 }
4592
4593 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4594 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4595 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4596 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4597 assert!(search.next().await.is_none())
4598}
4599
4600#[test]
4601fn test_glob_literal_prefix() {
4602 assert_eq!(glob_literal_prefix("**/*.js"), "");
4603 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4604 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4605 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4606}
4607
4608#[gpui::test]
4609async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4610 init_test(cx);
4611
4612 let fs = FakeFs::new(cx.executor().clone());
4613 fs.insert_tree(
4614 "/one/two",
4615 json!({
4616 "three": {
4617 "a.txt": "",
4618 "four": {}
4619 },
4620 "c.rs": ""
4621 }),
4622 )
4623 .await;
4624
4625 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4626 project
4627 .update(cx, |project, cx| {
4628 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4629 project.create_entry((id, "b.."), true, cx)
4630 })
4631 .unwrap()
4632 .await
4633 .to_included()
4634 .unwrap();
4635
4636 // Can't create paths outside the project
4637 let result = project
4638 .update(cx, |project, cx| {
4639 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4640 project.create_entry((id, "../../boop"), true, cx)
4641 })
4642 .await;
4643 assert!(result.is_err());
4644
4645 // Can't create paths with '..'
4646 let result = project
4647 .update(cx, |project, cx| {
4648 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4649 project.create_entry((id, "four/../beep"), true, cx)
4650 })
4651 .await;
4652 assert!(result.is_err());
4653
4654 assert_eq!(
4655 fs.paths(true),
4656 vec![
4657 PathBuf::from("/"),
4658 PathBuf::from("/one"),
4659 PathBuf::from("/one/two"),
4660 PathBuf::from("/one/two/c.rs"),
4661 PathBuf::from("/one/two/three"),
4662 PathBuf::from("/one/two/three/a.txt"),
4663 PathBuf::from("/one/two/three/b.."),
4664 PathBuf::from("/one/two/three/four"),
4665 ]
4666 );
4667
4668 // And we cannot open buffers with '..'
4669 let result = project
4670 .update(cx, |project, cx| {
4671 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4672 project.open_buffer((id, "../c.rs"), cx)
4673 })
4674 .await;
4675 assert!(result.is_err())
4676}
4677
4678#[gpui::test]
4679async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4680 init_test(cx);
4681
4682 let fs = FakeFs::new(cx.executor());
4683 fs.insert_tree(
4684 "/dir",
4685 json!({
4686 "a.tsx": "a",
4687 }),
4688 )
4689 .await;
4690
4691 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4692
4693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4694 language_registry.add(tsx_lang());
4695 let language_server_names = [
4696 "TypeScriptServer",
4697 "TailwindServer",
4698 "ESLintServer",
4699 "NoHoverCapabilitiesServer",
4700 ];
4701 let mut fake_tsx_language_servers = language_registry.register_fake_lsp_adapter(
4702 "tsx",
4703 FakeLspAdapter {
4704 name: &language_server_names[0],
4705 capabilities: lsp::ServerCapabilities {
4706 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4707 ..lsp::ServerCapabilities::default()
4708 },
4709 ..FakeLspAdapter::default()
4710 },
4711 );
4712 let _a = language_registry.register_fake_lsp_adapter(
4713 "tsx",
4714 FakeLspAdapter {
4715 name: &language_server_names[1],
4716 capabilities: lsp::ServerCapabilities {
4717 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4718 ..lsp::ServerCapabilities::default()
4719 },
4720 ..FakeLspAdapter::default()
4721 },
4722 );
4723 let _b = language_registry.register_fake_lsp_adapter(
4724 "tsx",
4725 FakeLspAdapter {
4726 name: &language_server_names[2],
4727 capabilities: lsp::ServerCapabilities {
4728 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4729 ..lsp::ServerCapabilities::default()
4730 },
4731 ..FakeLspAdapter::default()
4732 },
4733 );
4734 let _c = language_registry.register_fake_lsp_adapter(
4735 "tsx",
4736 FakeLspAdapter {
4737 name: &language_server_names[3],
4738 capabilities: lsp::ServerCapabilities {
4739 hover_provider: None,
4740 ..lsp::ServerCapabilities::default()
4741 },
4742 ..FakeLspAdapter::default()
4743 },
4744 );
4745
4746 let buffer = project
4747 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4748 .await
4749 .unwrap();
4750 cx.executor().run_until_parked();
4751
4752 let mut servers_with_hover_requests = HashMap::default();
4753 for i in 0..language_server_names.len() {
4754 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4755 panic!(
4756 "Failed to get language server #{i} with name {}",
4757 &language_server_names[i]
4758 )
4759 });
4760 let new_server_name = new_server.server.name();
4761 assert!(
4762 !servers_with_hover_requests.contains_key(new_server_name),
4763 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4764 );
4765 let new_server_name = new_server_name.to_string();
4766 match new_server_name.as_str() {
4767 "TailwindServer" | "TypeScriptServer" => {
4768 servers_with_hover_requests.insert(
4769 new_server_name.clone(),
4770 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4771 let name = new_server_name.clone();
4772 async move {
4773 Ok(Some(lsp::Hover {
4774 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4775 format!("{name} hover"),
4776 )),
4777 range: None,
4778 }))
4779 }
4780 }),
4781 );
4782 }
4783 "ESLintServer" => {
4784 servers_with_hover_requests.insert(
4785 new_server_name,
4786 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4787 |_, _| async move { Ok(None) },
4788 ),
4789 );
4790 }
4791 "NoHoverCapabilitiesServer" => {
4792 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4793 |_, _| async move {
4794 panic!(
4795 "Should not call for hovers server with no corresponding capabilities"
4796 )
4797 },
4798 );
4799 }
4800 unexpected => panic!("Unexpected server name: {unexpected}"),
4801 }
4802 }
4803
4804 let hover_task = project.update(cx, |project, cx| {
4805 project.hover(&buffer, Point::new(0, 0), cx)
4806 });
4807 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4808 |mut hover_request| async move {
4809 hover_request
4810 .next()
4811 .await
4812 .expect("All hover requests should have been triggered")
4813 },
4814 ))
4815 .await;
4816 assert_eq!(
4817 vec!["TailwindServer hover", "TypeScriptServer hover"],
4818 hover_task
4819 .await
4820 .into_iter()
4821 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4822 .sorted()
4823 .collect::<Vec<_>>(),
4824 "Should receive hover responses from all related servers with hover capabilities"
4825 );
4826}
4827
4828#[gpui::test]
4829async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4830 init_test(cx);
4831
4832 let fs = FakeFs::new(cx.executor());
4833 fs.insert_tree(
4834 "/dir",
4835 json!({
4836 "a.ts": "a",
4837 }),
4838 )
4839 .await;
4840
4841 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4842
4843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4844 language_registry.add(typescript_lang());
4845 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4846 "TypeScript",
4847 FakeLspAdapter {
4848 capabilities: lsp::ServerCapabilities {
4849 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4850 ..lsp::ServerCapabilities::default()
4851 },
4852 ..FakeLspAdapter::default()
4853 },
4854 );
4855
4856 let buffer = project
4857 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4858 .await
4859 .unwrap();
4860 cx.executor().run_until_parked();
4861
4862 let fake_server = fake_language_servers
4863 .next()
4864 .await
4865 .expect("failed to get the language server");
4866
4867 let mut request_handled =
4868 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4869 Ok(Some(lsp::Hover {
4870 contents: lsp::HoverContents::Array(vec![
4871 lsp::MarkedString::String("".to_string()),
4872 lsp::MarkedString::String(" ".to_string()),
4873 lsp::MarkedString::String("\n\n\n".to_string()),
4874 ]),
4875 range: None,
4876 }))
4877 });
4878
4879 let hover_task = project.update(cx, |project, cx| {
4880 project.hover(&buffer, Point::new(0, 0), cx)
4881 });
4882 let () = request_handled
4883 .next()
4884 .await
4885 .expect("All hover requests should have been triggered");
4886 assert_eq!(
4887 Vec::<String>::new(),
4888 hover_task
4889 .await
4890 .into_iter()
4891 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4892 .sorted()
4893 .collect::<Vec<_>>(),
4894 "Empty hover parts should be ignored"
4895 );
4896}
4897
4898#[gpui::test]
4899async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4900 init_test(cx);
4901
4902 let fs = FakeFs::new(cx.executor());
4903 fs.insert_tree(
4904 "/dir",
4905 json!({
4906 "a.tsx": "a",
4907 }),
4908 )
4909 .await;
4910
4911 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4912
4913 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4914 language_registry.add(tsx_lang());
4915 let language_server_names = [
4916 "TypeScriptServer",
4917 "TailwindServer",
4918 "ESLintServer",
4919 "NoActionsCapabilitiesServer",
4920 ];
4921 let mut fake_tsx_language_servers = language_registry.register_fake_lsp_adapter(
4922 "tsx",
4923 FakeLspAdapter {
4924 name: &language_server_names[0],
4925 capabilities: lsp::ServerCapabilities {
4926 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4927 ..lsp::ServerCapabilities::default()
4928 },
4929 ..FakeLspAdapter::default()
4930 },
4931 );
4932 let _a = language_registry.register_fake_lsp_adapter(
4933 "tsx",
4934 FakeLspAdapter {
4935 name: &language_server_names[1],
4936 capabilities: lsp::ServerCapabilities {
4937 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4938 ..lsp::ServerCapabilities::default()
4939 },
4940 ..FakeLspAdapter::default()
4941 },
4942 );
4943 let _b = language_registry.register_fake_lsp_adapter(
4944 "tsx",
4945 FakeLspAdapter {
4946 name: &language_server_names[2],
4947 capabilities: lsp::ServerCapabilities {
4948 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4949 ..lsp::ServerCapabilities::default()
4950 },
4951 ..FakeLspAdapter::default()
4952 },
4953 );
4954 let _c = language_registry.register_fake_lsp_adapter(
4955 "tsx",
4956 FakeLspAdapter {
4957 name: &language_server_names[3],
4958 capabilities: lsp::ServerCapabilities {
4959 code_action_provider: None,
4960 ..lsp::ServerCapabilities::default()
4961 },
4962 ..FakeLspAdapter::default()
4963 },
4964 );
4965
4966 let buffer = project
4967 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4968 .await
4969 .unwrap();
4970 cx.executor().run_until_parked();
4971
4972 let mut servers_with_actions_requests = HashMap::default();
4973 for i in 0..language_server_names.len() {
4974 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4975 panic!(
4976 "Failed to get language server #{i} with name {}",
4977 &language_server_names[i]
4978 )
4979 });
4980 let new_server_name = new_server.server.name();
4981 assert!(
4982 !servers_with_actions_requests.contains_key(new_server_name),
4983 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4984 );
4985 let new_server_name = new_server_name.to_string();
4986 match new_server_name.as_str() {
4987 "TailwindServer" | "TypeScriptServer" => {
4988 servers_with_actions_requests.insert(
4989 new_server_name.clone(),
4990 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4991 move |_, _| {
4992 let name = new_server_name.clone();
4993 async move {
4994 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4995 lsp::CodeAction {
4996 title: format!("{name} code action"),
4997 ..lsp::CodeAction::default()
4998 },
4999 )]))
5000 }
5001 },
5002 ),
5003 );
5004 }
5005 "ESLintServer" => {
5006 servers_with_actions_requests.insert(
5007 new_server_name,
5008 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5009 |_, _| async move { Ok(None) },
5010 ),
5011 );
5012 }
5013 "NoActionsCapabilitiesServer" => {
5014 let _never_handled = new_server
5015 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5016 panic!(
5017 "Should not call for code actions server with no corresponding capabilities"
5018 )
5019 });
5020 }
5021 unexpected => panic!("Unexpected server name: {unexpected}"),
5022 }
5023 }
5024
5025 let code_actions_task = project.update(cx, |project, cx| {
5026 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5027 });
5028 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5029 |mut code_actions_request| async move {
5030 code_actions_request
5031 .next()
5032 .await
5033 .expect("All code actions requests should have been triggered")
5034 },
5035 ))
5036 .await;
5037 assert_eq!(
5038 vec!["TailwindServer code action", "TypeScriptServer code action"],
5039 code_actions_task
5040 .await
5041 .into_iter()
5042 .map(|code_action| code_action.lsp_action.title)
5043 .sorted()
5044 .collect::<Vec<_>>(),
5045 "Should receive code actions responses from all related servers with hover capabilities"
5046 );
5047}
5048
5049#[gpui::test]
5050async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5051 init_test(cx);
5052
5053 let fs = FakeFs::new(cx.executor());
5054 fs.insert_tree(
5055 "/dir",
5056 json!({
5057 "a.rs": "let a = 1;",
5058 "b.rs": "let b = 2;",
5059 "c.rs": "let c = 2;",
5060 }),
5061 )
5062 .await;
5063
5064 let project = Project::test(
5065 fs,
5066 [
5067 "/dir/a.rs".as_ref(),
5068 "/dir/b.rs".as_ref(),
5069 "/dir/c.rs".as_ref(),
5070 ],
5071 cx,
5072 )
5073 .await;
5074
5075 // check the initial state and get the worktrees
5076 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5077 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5078 assert_eq!(worktrees.len(), 3);
5079
5080 let worktree_a = worktrees[0].read(cx);
5081 let worktree_b = worktrees[1].read(cx);
5082 let worktree_c = worktrees[2].read(cx);
5083
5084 // check they start in the right order
5085 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5086 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5087 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5088
5089 (
5090 worktrees[0].clone(),
5091 worktrees[1].clone(),
5092 worktrees[2].clone(),
5093 )
5094 });
5095
5096 // move first worktree to after the second
5097 // [a, b, c] -> [b, a, c]
5098 project
5099 .update(cx, |project, cx| {
5100 let first = worktree_a.read(cx);
5101 let second = worktree_b.read(cx);
5102 project.move_worktree(first.id(), second.id(), cx)
5103 })
5104 .expect("moving first after second");
5105
5106 // check the state after moving
5107 project.update(cx, |project, cx| {
5108 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5109 assert_eq!(worktrees.len(), 3);
5110
5111 let first = worktrees[0].read(cx);
5112 let second = worktrees[1].read(cx);
5113 let third = worktrees[2].read(cx);
5114
5115 // check they are now in the right order
5116 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5117 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5118 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5119 });
5120
5121 // move the second worktree to before the first
5122 // [b, a, c] -> [a, b, c]
5123 project
5124 .update(cx, |project, cx| {
5125 let second = worktree_a.read(cx);
5126 let first = worktree_b.read(cx);
5127 project.move_worktree(first.id(), second.id(), cx)
5128 })
5129 .expect("moving second before first");
5130
5131 // check the state after moving
5132 project.update(cx, |project, cx| {
5133 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5134 assert_eq!(worktrees.len(), 3);
5135
5136 let first = worktrees[0].read(cx);
5137 let second = worktrees[1].read(cx);
5138 let third = worktrees[2].read(cx);
5139
5140 // check they are now in the right order
5141 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5142 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5143 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5144 });
5145
5146 // move the second worktree to after the third
5147 // [a, b, c] -> [a, c, b]
5148 project
5149 .update(cx, |project, cx| {
5150 let second = worktree_b.read(cx);
5151 let third = worktree_c.read(cx);
5152 project.move_worktree(second.id(), third.id(), cx)
5153 })
5154 .expect("moving second after third");
5155
5156 // check the state after moving
5157 project.update(cx, |project, cx| {
5158 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5159 assert_eq!(worktrees.len(), 3);
5160
5161 let first = worktrees[0].read(cx);
5162 let second = worktrees[1].read(cx);
5163 let third = worktrees[2].read(cx);
5164
5165 // check they are now in the right order
5166 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5167 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5168 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5169 });
5170
5171 // move the third worktree to before the second
5172 // [a, c, b] -> [a, b, c]
5173 project
5174 .update(cx, |project, cx| {
5175 let third = worktree_c.read(cx);
5176 let second = worktree_b.read(cx);
5177 project.move_worktree(third.id(), second.id(), cx)
5178 })
5179 .expect("moving third before second");
5180
5181 // check the state after moving
5182 project.update(cx, |project, cx| {
5183 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5184 assert_eq!(worktrees.len(), 3);
5185
5186 let first = worktrees[0].read(cx);
5187 let second = worktrees[1].read(cx);
5188 let third = worktrees[2].read(cx);
5189
5190 // check they are now in the right order
5191 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5192 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5193 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5194 });
5195
5196 // move the first worktree to after the third
5197 // [a, b, c] -> [b, c, a]
5198 project
5199 .update(cx, |project, cx| {
5200 let first = worktree_a.read(cx);
5201 let third = worktree_c.read(cx);
5202 project.move_worktree(first.id(), third.id(), cx)
5203 })
5204 .expect("moving first after third");
5205
5206 // check the state after moving
5207 project.update(cx, |project, cx| {
5208 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5209 assert_eq!(worktrees.len(), 3);
5210
5211 let first = worktrees[0].read(cx);
5212 let second = worktrees[1].read(cx);
5213 let third = worktrees[2].read(cx);
5214
5215 // check they are now in the right order
5216 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5217 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5218 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5219 });
5220
5221 // move the third worktree to before the first
5222 // [b, c, a] -> [a, b, c]
5223 project
5224 .update(cx, |project, cx| {
5225 let third = worktree_a.read(cx);
5226 let first = worktree_b.read(cx);
5227 project.move_worktree(third.id(), first.id(), cx)
5228 })
5229 .expect("moving third before first");
5230
5231 // check the state after moving
5232 project.update(cx, |project, cx| {
5233 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5234 assert_eq!(worktrees.len(), 3);
5235
5236 let first = worktrees[0].read(cx);
5237 let second = worktrees[1].read(cx);
5238 let third = worktrees[2].read(cx);
5239
5240 // check they are now in the right order
5241 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5242 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5243 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5244 });
5245}
5246
5247async fn search(
5248 project: &Model<Project>,
5249 query: SearchQuery,
5250 cx: &mut gpui::TestAppContext,
5251) -> Result<HashMap<String, Vec<Range<usize>>>> {
5252 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5253 let mut results = HashMap::default();
5254 while let Some(search_result) = search_rx.next().await {
5255 match search_result {
5256 SearchResult::Buffer { buffer, ranges } => {
5257 results.entry(buffer).or_insert(ranges);
5258 }
5259 SearchResult::LimitReached => {}
5260 }
5261 }
5262 Ok(results
5263 .into_iter()
5264 .map(|(buffer, ranges)| {
5265 buffer.update(cx, |buffer, cx| {
5266 let path = buffer
5267 .file()
5268 .unwrap()
5269 .full_path(cx)
5270 .to_string_lossy()
5271 .to_string();
5272 let ranges = ranges
5273 .into_iter()
5274 .map(|range| range.to_offset(buffer))
5275 .collect::<Vec<_>>();
5276 (path, ranges)
5277 })
5278 })
5279 .collect())
5280}
5281
5282pub fn init_test(cx: &mut gpui::TestAppContext) {
5283 if std::env::var("RUST_LOG").is_ok() {
5284 env_logger::try_init().ok();
5285 }
5286
5287 cx.update(|cx| {
5288 let settings_store = SettingsStore::test(cx);
5289 cx.set_global(settings_store);
5290 release_channel::init(SemanticVersion::default(), cx);
5291 language::init(cx);
5292 Project::init_settings(cx);
5293 });
5294}
5295
5296fn json_lang() -> Arc<Language> {
5297 Arc::new(Language::new(
5298 LanguageConfig {
5299 name: "JSON".into(),
5300 matcher: LanguageMatcher {
5301 path_suffixes: vec!["json".to_string()],
5302 ..Default::default()
5303 },
5304 ..Default::default()
5305 },
5306 None,
5307 ))
5308}
5309
5310fn js_lang() -> Arc<Language> {
5311 Arc::new(Language::new(
5312 LanguageConfig {
5313 name: Arc::from("JavaScript"),
5314 matcher: LanguageMatcher {
5315 path_suffixes: vec!["js".to_string()],
5316 ..Default::default()
5317 },
5318 ..Default::default()
5319 },
5320 None,
5321 ))
5322}
5323
5324fn rust_lang() -> Arc<Language> {
5325 Arc::new(Language::new(
5326 LanguageConfig {
5327 name: "Rust".into(),
5328 matcher: LanguageMatcher {
5329 path_suffixes: vec!["rs".to_string()],
5330 ..Default::default()
5331 },
5332 ..Default::default()
5333 },
5334 Some(tree_sitter_rust::language()),
5335 ))
5336}
5337
5338fn typescript_lang() -> Arc<Language> {
5339 Arc::new(Language::new(
5340 LanguageConfig {
5341 name: "TypeScript".into(),
5342 matcher: LanguageMatcher {
5343 path_suffixes: vec!["ts".to_string()],
5344 ..Default::default()
5345 },
5346 ..Default::default()
5347 },
5348 Some(tree_sitter_typescript::language_typescript()),
5349 ))
5350}
5351
5352fn tsx_lang() -> Arc<Language> {
5353 Arc::new(Language::new(
5354 LanguageConfig {
5355 name: "tsx".into(),
5356 matcher: LanguageMatcher {
5357 path_suffixes: vec!["tsx".to_string()],
5358 ..Default::default()
5359 },
5360 ..Default::default()
5361 },
5362 Some(tree_sitter_typescript::language_tsx()),
5363 ))
5364}
5365
5366fn get_all_tasks(
5367 project: &Model<Project>,
5368 worktree_id: Option<WorktreeId>,
5369 task_context: &TaskContext,
5370 cx: &mut AppContext,
5371) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5372 let resolved_tasks = project.update(cx, |project, cx| {
5373 project
5374 .task_inventory()
5375 .read(cx)
5376 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5377 });
5378
5379 cx.spawn(|_| async move {
5380 let (mut old, new) = resolved_tasks.await;
5381 old.extend(new);
5382 old
5383 })
5384}