1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let fs = FakeFs::new(cx.executor());
99 fs.insert_tree(
100 "/the-root",
101 json!({
102 ".zed": {
103 "settings.json": r#"{ "tab_size": 8 }"#,
104 "tasks.json": r#"[{
105 "label": "cargo check",
106 "command": "cargo",
107 "args": ["check", "--all"]
108 },]"#,
109 },
110 "a": {
111 "a.rs": "fn a() {\n A\n}"
112 },
113 "b": {
114 ".zed": {
115 "settings.json": r#"{ "tab_size": 2 }"#,
116 "tasks.json": r#"[{
117 "label": "cargo check",
118 "command": "cargo",
119 "args": ["check"]
120 },]"#,
121 },
122 "b.rs": "fn b() {\n B\n}"
123 }
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
130 let task_context = TaskContext::default();
131
132 cx.executor().run_until_parked();
133 let worktree_id = cx.update(|cx| {
134 project.update(cx, |project, cx| {
135 project.worktrees(cx).next().unwrap().read(cx).id()
136 })
137 });
138 let global_task_source_kind = TaskSourceKind::Worktree {
139 id: worktree_id,
140 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
141 id_base: "local_tasks_for_worktree".into(),
142 };
143
144 let all_tasks = cx
145 .update(|cx| {
146 let tree = worktree.read(cx);
147
148 let settings_a = language_settings(
149 None,
150 Some(
151 &(File::for_entry(
152 tree.entry_for_path("a/a.rs").unwrap().clone(),
153 worktree.clone(),
154 ) as _),
155 ),
156 cx,
157 );
158 let settings_b = language_settings(
159 None,
160 Some(
161 &(File::for_entry(
162 tree.entry_for_path("b/b.rs").unwrap().clone(),
163 worktree.clone(),
164 ) as _),
165 ),
166 cx,
167 );
168
169 assert_eq!(settings_a.tab_size.get(), 8);
170 assert_eq!(settings_b.tab_size.get(), 2);
171
172 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
173 })
174 .await
175 .into_iter()
176 .map(|(source_kind, task)| {
177 let resolved = task.resolved.unwrap();
178 (
179 source_kind,
180 task.resolved_label,
181 resolved.args,
182 resolved.env,
183 )
184 })
185 .collect::<Vec<_>>();
186 assert_eq!(
187 all_tasks,
188 vec![
189 (
190 global_task_source_kind.clone(),
191 "cargo check".to_string(),
192 vec!["check".to_string(), "--all".to_string()],
193 HashMap::default(),
194 ),
195 (
196 TaskSourceKind::Worktree {
197 id: worktree_id,
198 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
199 id_base: "local_tasks_for_worktree".into(),
200 },
201 "cargo check".to_string(),
202 vec!["check".to_string()],
203 HashMap::default(),
204 ),
205 ]
206 );
207
208 let (_, resolved_task) = cx
209 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
210 .await
211 .into_iter()
212 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
213 .expect("should have one global task");
214 project.update(cx, |project, cx| {
215 project.task_inventory().update(cx, |inventory, _| {
216 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
217 });
218 });
219
220 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
221 label: "cargo check".to_string(),
222 command: "cargo".to_string(),
223 args: vec![
224 "check".to_string(),
225 "--all".to_string(),
226 "--all-targets".to_string(),
227 ],
228 env: HashMap::from_iter(Some((
229 "RUSTFLAGS".to_string(),
230 "-Zunstable-options".to_string(),
231 ))),
232 ..TaskTemplate::default()
233 }]))
234 .unwrap();
235 let (tx, rx) = futures::channel::mpsc::unbounded();
236 cx.update(|cx| {
237 project.update(cx, |project, cx| {
238 project.task_inventory().update(cx, |inventory, cx| {
239 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
240 inventory.add_source(
241 global_task_source_kind.clone(),
242 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
243 cx,
244 );
245 });
246 })
247 });
248 tx.unbounded_send(tasks).unwrap();
249
250 cx.run_until_parked();
251 let all_tasks = cx
252 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
253 .await
254 .into_iter()
255 .map(|(source_kind, task)| {
256 let resolved = task.resolved.unwrap();
257 (
258 source_kind,
259 task.resolved_label,
260 resolved.args,
261 resolved.env,
262 )
263 })
264 .collect::<Vec<_>>();
265 assert_eq!(
266 all_tasks,
267 vec![
268 (
269 TaskSourceKind::Worktree {
270 id: worktree_id,
271 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
272 id_base: "local_tasks_for_worktree".into(),
273 },
274 "cargo check".to_string(),
275 vec![
276 "check".to_string(),
277 "--all".to_string(),
278 "--all-targets".to_string()
279 ],
280 HashMap::from_iter(Some((
281 "RUSTFLAGS".to_string(),
282 "-Zunstable-options".to_string()
283 ))),
284 ),
285 (
286 TaskSourceKind::Worktree {
287 id: worktree_id,
288 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
289 id_base: "local_tasks_for_worktree".into(),
290 },
291 "cargo check".to_string(),
292 vec!["check".to_string()],
293 HashMap::default(),
294 ),
295 ]
296 );
297}
298
299#[gpui::test]
300async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
301 init_test(cx);
302
303 let fs = FakeFs::new(cx.executor());
304 fs.insert_tree(
305 "/the-root",
306 json!({
307 "test.rs": "const A: i32 = 1;",
308 "test2.rs": "",
309 "Cargo.toml": "a = 1",
310 "package.json": "{\"a\": 1}",
311 }),
312 )
313 .await;
314
315 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
316 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
317
318 let mut fake_rust_servers = language_registry.register_fake_lsp(
319 "Rust",
320 FakeLspAdapter {
321 name: "the-rust-language-server",
322 capabilities: lsp::ServerCapabilities {
323 completion_provider: Some(lsp::CompletionOptions {
324 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
325 ..Default::default()
326 }),
327 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
328 lsp::TextDocumentSyncOptions {
329 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
330 ..Default::default()
331 },
332 )),
333 ..Default::default()
334 },
335 ..Default::default()
336 },
337 );
338 let mut fake_json_servers = language_registry.register_fake_lsp(
339 "JSON",
340 FakeLspAdapter {
341 name: "the-json-language-server",
342 capabilities: lsp::ServerCapabilities {
343 completion_provider: Some(lsp::CompletionOptions {
344 trigger_characters: Some(vec![":".to_string()]),
345 ..Default::default()
346 }),
347 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
348 lsp::TextDocumentSyncOptions {
349 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
350 ..Default::default()
351 },
352 )),
353 ..Default::default()
354 },
355 ..Default::default()
356 },
357 );
358
359 // Open a buffer without an associated language server.
360 let toml_buffer = project
361 .update(cx, |project, cx| {
362 project.open_local_buffer("/the-root/Cargo.toml", cx)
363 })
364 .await
365 .unwrap();
366
367 // Open a buffer with an associated language server before the language for it has been loaded.
368 let rust_buffer = project
369 .update(cx, |project, cx| {
370 project.open_local_buffer("/the-root/test.rs", cx)
371 })
372 .await
373 .unwrap();
374 rust_buffer.update(cx, |buffer, _| {
375 assert_eq!(buffer.language().map(|l| l.name()), None);
376 });
377
378 // Now we add the languages to the project, and ensure they get assigned to all
379 // the relevant open buffers.
380 language_registry.add(json_lang());
381 language_registry.add(rust_lang());
382 cx.executor().run_until_parked();
383 rust_buffer.update(cx, |buffer, _| {
384 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
385 });
386
387 // A server is started up, and it is notified about Rust files.
388 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
389 assert_eq!(
390 fake_rust_server
391 .receive_notification::<lsp::notification::DidOpenTextDocument>()
392 .await
393 .text_document,
394 lsp::TextDocumentItem {
395 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
396 version: 0,
397 text: "const A: i32 = 1;".to_string(),
398 language_id: "rust".to_string(),
399 }
400 );
401
402 // The buffer is configured based on the language server's capabilities.
403 rust_buffer.update(cx, |buffer, _| {
404 assert_eq!(
405 buffer.completion_triggers(),
406 &[".".to_string(), "::".to_string()]
407 );
408 });
409 toml_buffer.update(cx, |buffer, _| {
410 assert!(buffer.completion_triggers().is_empty());
411 });
412
413 // Edit a buffer. The changes are reported to the language server.
414 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
415 assert_eq!(
416 fake_rust_server
417 .receive_notification::<lsp::notification::DidChangeTextDocument>()
418 .await
419 .text_document,
420 lsp::VersionedTextDocumentIdentifier::new(
421 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
422 1
423 )
424 );
425
426 // Open a third buffer with a different associated language server.
427 let json_buffer = project
428 .update(cx, |project, cx| {
429 project.open_local_buffer("/the-root/package.json", cx)
430 })
431 .await
432 .unwrap();
433
434 // A json language server is started up and is only notified about the json buffer.
435 let mut fake_json_server = fake_json_servers.next().await.unwrap();
436 assert_eq!(
437 fake_json_server
438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
439 .await
440 .text_document,
441 lsp::TextDocumentItem {
442 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
443 version: 0,
444 text: "{\"a\": 1}".to_string(),
445 language_id: "json".to_string(),
446 }
447 );
448
449 // This buffer is configured based on the second language server's
450 // capabilities.
451 json_buffer.update(cx, |buffer, _| {
452 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
453 });
454
455 // When opening another buffer whose language server is already running,
456 // it is also configured based on the existing language server's capabilities.
457 let rust_buffer2 = project
458 .update(cx, |project, cx| {
459 project.open_local_buffer("/the-root/test2.rs", cx)
460 })
461 .await
462 .unwrap();
463 rust_buffer2.update(cx, |buffer, _| {
464 assert_eq!(
465 buffer.completion_triggers(),
466 &[".".to_string(), "::".to_string()]
467 );
468 });
469
470 // Changes are reported only to servers matching the buffer's language.
471 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
472 rust_buffer2.update(cx, |buffer, cx| {
473 buffer.edit([(0..0, "let x = 1;")], None, cx)
474 });
475 assert_eq!(
476 fake_rust_server
477 .receive_notification::<lsp::notification::DidChangeTextDocument>()
478 .await
479 .text_document,
480 lsp::VersionedTextDocumentIdentifier::new(
481 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
482 1
483 )
484 );
485
486 // Save notifications are reported to all servers.
487 project
488 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
489 .await
490 .unwrap();
491 assert_eq!(
492 fake_rust_server
493 .receive_notification::<lsp::notification::DidSaveTextDocument>()
494 .await
495 .text_document,
496 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
497 );
498 assert_eq!(
499 fake_json_server
500 .receive_notification::<lsp::notification::DidSaveTextDocument>()
501 .await
502 .text_document,
503 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
504 );
505
506 // Renames are reported only to servers matching the buffer's language.
507 fs.rename(
508 Path::new("/the-root/test2.rs"),
509 Path::new("/the-root/test3.rs"),
510 Default::default(),
511 )
512 .await
513 .unwrap();
514 assert_eq!(
515 fake_rust_server
516 .receive_notification::<lsp::notification::DidCloseTextDocument>()
517 .await
518 .text_document,
519 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
520 );
521 assert_eq!(
522 fake_rust_server
523 .receive_notification::<lsp::notification::DidOpenTextDocument>()
524 .await
525 .text_document,
526 lsp::TextDocumentItem {
527 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
528 version: 0,
529 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
530 language_id: "rust".to_string(),
531 },
532 );
533
534 rust_buffer2.update(cx, |buffer, cx| {
535 buffer.update_diagnostics(
536 LanguageServerId(0),
537 DiagnosticSet::from_sorted_entries(
538 vec![DiagnosticEntry {
539 diagnostic: Default::default(),
540 range: Anchor::MIN..Anchor::MAX,
541 }],
542 &buffer.snapshot(),
543 ),
544 cx,
545 );
546 assert_eq!(
547 buffer
548 .snapshot()
549 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
550 .count(),
551 1
552 );
553 });
554
555 // When the rename changes the extension of the file, the buffer gets closed on the old
556 // language server and gets opened on the new one.
557 fs.rename(
558 Path::new("/the-root/test3.rs"),
559 Path::new("/the-root/test3.json"),
560 Default::default(),
561 )
562 .await
563 .unwrap();
564 assert_eq!(
565 fake_rust_server
566 .receive_notification::<lsp::notification::DidCloseTextDocument>()
567 .await
568 .text_document,
569 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
570 );
571 assert_eq!(
572 fake_json_server
573 .receive_notification::<lsp::notification::DidOpenTextDocument>()
574 .await
575 .text_document,
576 lsp::TextDocumentItem {
577 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
578 version: 0,
579 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
580 language_id: "json".to_string(),
581 },
582 );
583
584 // We clear the diagnostics, since the language has changed.
585 rust_buffer2.update(cx, |buffer, _| {
586 assert_eq!(
587 buffer
588 .snapshot()
589 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
590 .count(),
591 0
592 );
593 });
594
595 // The renamed file's version resets after changing language server.
596 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
597 assert_eq!(
598 fake_json_server
599 .receive_notification::<lsp::notification::DidChangeTextDocument>()
600 .await
601 .text_document,
602 lsp::VersionedTextDocumentIdentifier::new(
603 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
604 1
605 )
606 );
607
608 // Restart language servers
609 project.update(cx, |project, cx| {
610 project.restart_language_servers_for_buffers(
611 vec![rust_buffer.clone(), json_buffer.clone()],
612 cx,
613 );
614 });
615
616 let mut rust_shutdown_requests = fake_rust_server
617 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
618 let mut json_shutdown_requests = fake_json_server
619 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
620 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
621
622 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
623 let mut fake_json_server = fake_json_servers.next().await.unwrap();
624
625 // Ensure rust document is reopened in new rust language server
626 assert_eq!(
627 fake_rust_server
628 .receive_notification::<lsp::notification::DidOpenTextDocument>()
629 .await
630 .text_document,
631 lsp::TextDocumentItem {
632 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
633 version: 0,
634 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
635 language_id: "rust".to_string(),
636 }
637 );
638
639 // Ensure json documents are reopened in new json language server
640 assert_set_eq!(
641 [
642 fake_json_server
643 .receive_notification::<lsp::notification::DidOpenTextDocument>()
644 .await
645 .text_document,
646 fake_json_server
647 .receive_notification::<lsp::notification::DidOpenTextDocument>()
648 .await
649 .text_document,
650 ],
651 [
652 lsp::TextDocumentItem {
653 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
654 version: 0,
655 text: json_buffer.update(cx, |buffer, _| buffer.text()),
656 language_id: "json".to_string(),
657 },
658 lsp::TextDocumentItem {
659 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
660 version: 0,
661 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
662 language_id: "json".to_string(),
663 }
664 ]
665 );
666
667 // Close notifications are reported only to servers matching the buffer's language.
668 cx.update(|_| drop(json_buffer));
669 let close_message = lsp::DidCloseTextDocumentParams {
670 text_document: lsp::TextDocumentIdentifier::new(
671 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
672 ),
673 };
674 assert_eq!(
675 fake_json_server
676 .receive_notification::<lsp::notification::DidCloseTextDocument>()
677 .await,
678 close_message,
679 );
680}
681
682#[gpui::test]
683async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
684 init_test(cx);
685
686 let fs = FakeFs::new(cx.executor());
687 fs.insert_tree(
688 "/the-root",
689 json!({
690 ".gitignore": "target\n",
691 "src": {
692 "a.rs": "",
693 "b.rs": "",
694 },
695 "target": {
696 "x": {
697 "out": {
698 "x.rs": ""
699 }
700 },
701 "y": {
702 "out": {
703 "y.rs": "",
704 }
705 },
706 "z": {
707 "out": {
708 "z.rs": ""
709 }
710 }
711 }
712 }),
713 )
714 .await;
715
716 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
717 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
718 language_registry.add(rust_lang());
719 let mut fake_servers = language_registry.register_fake_lsp(
720 "Rust",
721 FakeLspAdapter {
722 name: "the-language-server",
723 ..Default::default()
724 },
725 );
726
727 cx.executor().run_until_parked();
728
729 // Start the language server by opening a buffer with a compatible file extension.
730 let _buffer = project
731 .update(cx, |project, cx| {
732 project.open_local_buffer("/the-root/src/a.rs", cx)
733 })
734 .await
735 .unwrap();
736
737 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
738 project.update(cx, |project, cx| {
739 let worktree = project.worktrees(cx).next().unwrap();
740 assert_eq!(
741 worktree
742 .read(cx)
743 .snapshot()
744 .entries(true, 0)
745 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
746 .collect::<Vec<_>>(),
747 &[
748 (Path::new(""), false),
749 (Path::new(".gitignore"), false),
750 (Path::new("src"), false),
751 (Path::new("src/a.rs"), false),
752 (Path::new("src/b.rs"), false),
753 (Path::new("target"), true),
754 ]
755 );
756 });
757
758 let prev_read_dir_count = fs.read_dir_call_count();
759
760 // Keep track of the FS events reported to the language server.
761 let fake_server = fake_servers.next().await.unwrap();
762 let file_changes = Arc::new(Mutex::new(Vec::new()));
763 fake_server
764 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
765 registrations: vec![lsp::Registration {
766 id: Default::default(),
767 method: "workspace/didChangeWatchedFiles".to_string(),
768 register_options: serde_json::to_value(
769 lsp::DidChangeWatchedFilesRegistrationOptions {
770 watchers: vec![
771 lsp::FileSystemWatcher {
772 glob_pattern: lsp::GlobPattern::String(
773 "/the-root/Cargo.toml".to_string(),
774 ),
775 kind: None,
776 },
777 lsp::FileSystemWatcher {
778 glob_pattern: lsp::GlobPattern::String(
779 "/the-root/src/*.{rs,c}".to_string(),
780 ),
781 kind: None,
782 },
783 lsp::FileSystemWatcher {
784 glob_pattern: lsp::GlobPattern::String(
785 "/the-root/target/y/**/*.rs".to_string(),
786 ),
787 kind: None,
788 },
789 ],
790 },
791 )
792 .ok(),
793 }],
794 })
795 .await
796 .unwrap();
797 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
798 let file_changes = file_changes.clone();
799 move |params, _| {
800 let mut file_changes = file_changes.lock();
801 file_changes.extend(params.changes);
802 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
803 }
804 });
805
806 cx.executor().run_until_parked();
807 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
808 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
809
810 // Now the language server has asked us to watch an ignored directory path,
811 // so we recursively load it.
812 project.update(cx, |project, cx| {
813 let worktree = project.worktrees(cx).next().unwrap();
814 assert_eq!(
815 worktree
816 .read(cx)
817 .snapshot()
818 .entries(true, 0)
819 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
820 .collect::<Vec<_>>(),
821 &[
822 (Path::new(""), false),
823 (Path::new(".gitignore"), false),
824 (Path::new("src"), false),
825 (Path::new("src/a.rs"), false),
826 (Path::new("src/b.rs"), false),
827 (Path::new("target"), true),
828 (Path::new("target/x"), true),
829 (Path::new("target/y"), true),
830 (Path::new("target/y/out"), true),
831 (Path::new("target/y/out/y.rs"), true),
832 (Path::new("target/z"), true),
833 ]
834 );
835 });
836
837 // Perform some file system mutations, two of which match the watched patterns,
838 // and one of which does not.
839 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
840 .await
841 .unwrap();
842 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
843 .await
844 .unwrap();
845 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
846 .await
847 .unwrap();
848 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
849 .await
850 .unwrap();
851 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
852 .await
853 .unwrap();
854
855 // The language server receives events for the FS mutations that match its watch patterns.
856 cx.executor().run_until_parked();
857 assert_eq!(
858 &*file_changes.lock(),
859 &[
860 lsp::FileEvent {
861 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
862 typ: lsp::FileChangeType::DELETED,
863 },
864 lsp::FileEvent {
865 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
866 typ: lsp::FileChangeType::CREATED,
867 },
868 lsp::FileEvent {
869 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
870 typ: lsp::FileChangeType::CREATED,
871 },
872 ]
873 );
874}
875
876#[gpui::test]
877async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
878 init_test(cx);
879
880 let fs = FakeFs::new(cx.executor());
881 fs.insert_tree(
882 "/dir",
883 json!({
884 "a.rs": "let a = 1;",
885 "b.rs": "let b = 2;"
886 }),
887 )
888 .await;
889
890 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
891
892 let buffer_a = project
893 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
894 .await
895 .unwrap();
896 let buffer_b = project
897 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
898 .await
899 .unwrap();
900
901 project.update(cx, |project, cx| {
902 project
903 .update_diagnostics(
904 LanguageServerId(0),
905 lsp::PublishDiagnosticsParams {
906 uri: Url::from_file_path("/dir/a.rs").unwrap(),
907 version: None,
908 diagnostics: vec![lsp::Diagnostic {
909 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
910 severity: Some(lsp::DiagnosticSeverity::ERROR),
911 message: "error 1".to_string(),
912 ..Default::default()
913 }],
914 },
915 &[],
916 cx,
917 )
918 .unwrap();
919 project
920 .update_diagnostics(
921 LanguageServerId(0),
922 lsp::PublishDiagnosticsParams {
923 uri: Url::from_file_path("/dir/b.rs").unwrap(),
924 version: None,
925 diagnostics: vec![lsp::Diagnostic {
926 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
927 severity: Some(DiagnosticSeverity::WARNING),
928 message: "error 2".to_string(),
929 ..Default::default()
930 }],
931 },
932 &[],
933 cx,
934 )
935 .unwrap();
936 });
937
938 buffer_a.update(cx, |buffer, _| {
939 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
940 assert_eq!(
941 chunks
942 .iter()
943 .map(|(s, d)| (s.as_str(), *d))
944 .collect::<Vec<_>>(),
945 &[
946 ("let ", None),
947 ("a", Some(DiagnosticSeverity::ERROR)),
948 (" = 1;", None),
949 ]
950 );
951 });
952 buffer_b.update(cx, |buffer, _| {
953 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
954 assert_eq!(
955 chunks
956 .iter()
957 .map(|(s, d)| (s.as_str(), *d))
958 .collect::<Vec<_>>(),
959 &[
960 ("let ", None),
961 ("b", Some(DiagnosticSeverity::WARNING)),
962 (" = 2;", None),
963 ]
964 );
965 });
966}
967
968#[gpui::test]
969async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
970 init_test(cx);
971
972 let fs = FakeFs::new(cx.executor());
973 fs.insert_tree(
974 "/root",
975 json!({
976 "dir": {
977 ".git": {
978 "HEAD": "ref: refs/heads/main",
979 },
980 ".gitignore": "b.rs",
981 "a.rs": "let a = 1;",
982 "b.rs": "let b = 2;",
983 },
984 "other.rs": "let b = c;"
985 }),
986 )
987 .await;
988
989 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
990 let (worktree, _) = project
991 .update(cx, |project, cx| {
992 project.find_or_create_worktree("/root/dir", true, cx)
993 })
994 .await
995 .unwrap();
996 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
997
998 let (worktree, _) = project
999 .update(cx, |project, cx| {
1000 project.find_or_create_worktree("/root/other.rs", false, cx)
1001 })
1002 .await
1003 .unwrap();
1004 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1005
1006 let server_id = LanguageServerId(0);
1007 project.update(cx, |project, cx| {
1008 project
1009 .update_diagnostics(
1010 server_id,
1011 lsp::PublishDiagnosticsParams {
1012 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1013 version: None,
1014 diagnostics: vec![lsp::Diagnostic {
1015 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1016 severity: Some(lsp::DiagnosticSeverity::ERROR),
1017 message: "unused variable 'b'".to_string(),
1018 ..Default::default()
1019 }],
1020 },
1021 &[],
1022 cx,
1023 )
1024 .unwrap();
1025 project
1026 .update_diagnostics(
1027 server_id,
1028 lsp::PublishDiagnosticsParams {
1029 uri: Url::from_file_path("/root/other.rs").unwrap(),
1030 version: None,
1031 diagnostics: vec![lsp::Diagnostic {
1032 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1033 severity: Some(lsp::DiagnosticSeverity::ERROR),
1034 message: "unknown variable 'c'".to_string(),
1035 ..Default::default()
1036 }],
1037 },
1038 &[],
1039 cx,
1040 )
1041 .unwrap();
1042 });
1043
1044 let main_ignored_buffer = project
1045 .update(cx, |project, cx| {
1046 project.open_buffer((main_worktree_id, "b.rs"), cx)
1047 })
1048 .await
1049 .unwrap();
1050 main_ignored_buffer.update(cx, |buffer, _| {
1051 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1052 assert_eq!(
1053 chunks
1054 .iter()
1055 .map(|(s, d)| (s.as_str(), *d))
1056 .collect::<Vec<_>>(),
1057 &[
1058 ("let ", None),
1059 ("b", Some(DiagnosticSeverity::ERROR)),
1060 (" = 2;", None),
1061 ],
1062 "Gigitnored buffers should still get in-buffer diagnostics",
1063 );
1064 });
1065 let other_buffer = project
1066 .update(cx, |project, cx| {
1067 project.open_buffer((other_worktree_id, ""), cx)
1068 })
1069 .await
1070 .unwrap();
1071 other_buffer.update(cx, |buffer, _| {
1072 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1073 assert_eq!(
1074 chunks
1075 .iter()
1076 .map(|(s, d)| (s.as_str(), *d))
1077 .collect::<Vec<_>>(),
1078 &[
1079 ("let b = ", None),
1080 ("c", Some(DiagnosticSeverity::ERROR)),
1081 (";", None),
1082 ],
1083 "Buffers from hidden projects should still get in-buffer diagnostics"
1084 );
1085 });
1086
1087 project.update(cx, |project, cx| {
1088 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1089 assert_eq!(
1090 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1091 vec![(
1092 ProjectPath {
1093 worktree_id: main_worktree_id,
1094 path: Arc::from(Path::new("b.rs")),
1095 },
1096 server_id,
1097 DiagnosticSummary {
1098 error_count: 1,
1099 warning_count: 0,
1100 }
1101 )]
1102 );
1103 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1104 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1105 });
1106}
1107
1108#[gpui::test]
1109async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1110 init_test(cx);
1111
1112 let progress_token = "the-progress-token";
1113
1114 let fs = FakeFs::new(cx.executor());
1115 fs.insert_tree(
1116 "/dir",
1117 json!({
1118 "a.rs": "fn a() { A }",
1119 "b.rs": "const y: i32 = 1",
1120 }),
1121 )
1122 .await;
1123
1124 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1126
1127 language_registry.add(rust_lang());
1128 let mut fake_servers = language_registry.register_fake_lsp(
1129 "Rust",
1130 FakeLspAdapter {
1131 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1132 disk_based_diagnostics_sources: vec!["disk".into()],
1133 ..Default::default()
1134 },
1135 );
1136
1137 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1138
1139 // Cause worktree to start the fake language server
1140 let _buffer = project
1141 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1142 .await
1143 .unwrap();
1144
1145 let mut events = cx.events(&project);
1146
1147 let fake_server = fake_servers.next().await.unwrap();
1148 assert_eq!(
1149 events.next().await.unwrap(),
1150 Event::LanguageServerAdded(LanguageServerId(0)),
1151 );
1152
1153 fake_server
1154 .start_progress(format!("{}/0", progress_token))
1155 .await;
1156 assert_eq!(
1157 events.next().await.unwrap(),
1158 Event::DiskBasedDiagnosticsStarted {
1159 language_server_id: LanguageServerId(0),
1160 }
1161 );
1162
1163 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1164 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1165 version: None,
1166 diagnostics: vec![lsp::Diagnostic {
1167 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1168 severity: Some(lsp::DiagnosticSeverity::ERROR),
1169 message: "undefined variable 'A'".to_string(),
1170 ..Default::default()
1171 }],
1172 });
1173 assert_eq!(
1174 events.next().await.unwrap(),
1175 Event::DiagnosticsUpdated {
1176 language_server_id: LanguageServerId(0),
1177 path: (worktree_id, Path::new("a.rs")).into()
1178 }
1179 );
1180
1181 fake_server.end_progress(format!("{}/0", progress_token));
1182 assert_eq!(
1183 events.next().await.unwrap(),
1184 Event::DiskBasedDiagnosticsFinished {
1185 language_server_id: LanguageServerId(0)
1186 }
1187 );
1188
1189 let buffer = project
1190 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1191 .await
1192 .unwrap();
1193
1194 buffer.update(cx, |buffer, _| {
1195 let snapshot = buffer.snapshot();
1196 let diagnostics = snapshot
1197 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1198 .collect::<Vec<_>>();
1199 assert_eq!(
1200 diagnostics,
1201 &[DiagnosticEntry {
1202 range: Point::new(0, 9)..Point::new(0, 10),
1203 diagnostic: Diagnostic {
1204 severity: lsp::DiagnosticSeverity::ERROR,
1205 message: "undefined variable 'A'".to_string(),
1206 group_id: 0,
1207 is_primary: true,
1208 ..Default::default()
1209 }
1210 }]
1211 )
1212 });
1213
1214 // Ensure publishing empty diagnostics twice only results in one update event.
1215 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1216 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1217 version: None,
1218 diagnostics: Default::default(),
1219 });
1220 assert_eq!(
1221 events.next().await.unwrap(),
1222 Event::DiagnosticsUpdated {
1223 language_server_id: LanguageServerId(0),
1224 path: (worktree_id, Path::new("a.rs")).into()
1225 }
1226 );
1227
1228 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1229 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1230 version: None,
1231 diagnostics: Default::default(),
1232 });
1233 cx.executor().run_until_parked();
1234 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1235}
1236
1237#[gpui::test]
1238async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1239 init_test(cx);
1240
1241 let progress_token = "the-progress-token";
1242
1243 let fs = FakeFs::new(cx.executor());
1244 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1245
1246 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1247
1248 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1249 language_registry.add(rust_lang());
1250 let mut fake_servers = language_registry.register_fake_lsp(
1251 "Rust",
1252 FakeLspAdapter {
1253 name: "the-language-server",
1254 disk_based_diagnostics_sources: vec!["disk".into()],
1255 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1256 ..Default::default()
1257 },
1258 );
1259
1260 let buffer = project
1261 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1262 .await
1263 .unwrap();
1264
1265 // Simulate diagnostics starting to update.
1266 let fake_server = fake_servers.next().await.unwrap();
1267 fake_server.start_progress(progress_token).await;
1268
1269 // Restart the server before the diagnostics finish updating.
1270 project.update(cx, |project, cx| {
1271 project.restart_language_servers_for_buffers([buffer], cx);
1272 });
1273 let mut events = cx.events(&project);
1274
1275 // Simulate the newly started server sending more diagnostics.
1276 let fake_server = fake_servers.next().await.unwrap();
1277 assert_eq!(
1278 events.next().await.unwrap(),
1279 Event::LanguageServerAdded(LanguageServerId(1))
1280 );
1281 fake_server.start_progress(progress_token).await;
1282 assert_eq!(
1283 events.next().await.unwrap(),
1284 Event::DiskBasedDiagnosticsStarted {
1285 language_server_id: LanguageServerId(1)
1286 }
1287 );
1288 project.update(cx, |project, cx| {
1289 assert_eq!(
1290 project
1291 .language_servers_running_disk_based_diagnostics(cx)
1292 .collect::<Vec<_>>(),
1293 [LanguageServerId(1)]
1294 );
1295 });
1296
1297 // All diagnostics are considered done, despite the old server's diagnostic
1298 // task never completing.
1299 fake_server.end_progress(progress_token);
1300 assert_eq!(
1301 events.next().await.unwrap(),
1302 Event::DiskBasedDiagnosticsFinished {
1303 language_server_id: LanguageServerId(1)
1304 }
1305 );
1306 project.update(cx, |project, cx| {
1307 assert_eq!(
1308 project
1309 .language_servers_running_disk_based_diagnostics(cx)
1310 .collect::<Vec<_>>(),
1311 [] as [language::LanguageServerId; 0]
1312 );
1313 });
1314}
1315
1316#[gpui::test]
1317async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1318 init_test(cx);
1319
1320 let fs = FakeFs::new(cx.executor());
1321 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1322
1323 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1324
1325 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1326 language_registry.add(rust_lang());
1327 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1328
1329 let buffer = project
1330 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1331 .await
1332 .unwrap();
1333
1334 // Publish diagnostics
1335 let fake_server = fake_servers.next().await.unwrap();
1336 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1337 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1338 version: None,
1339 diagnostics: vec![lsp::Diagnostic {
1340 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1341 severity: Some(lsp::DiagnosticSeverity::ERROR),
1342 message: "the message".to_string(),
1343 ..Default::default()
1344 }],
1345 });
1346
1347 cx.executor().run_until_parked();
1348 buffer.update(cx, |buffer, _| {
1349 assert_eq!(
1350 buffer
1351 .snapshot()
1352 .diagnostics_in_range::<_, usize>(0..1, false)
1353 .map(|entry| entry.diagnostic.message.clone())
1354 .collect::<Vec<_>>(),
1355 ["the message".to_string()]
1356 );
1357 });
1358 project.update(cx, |project, cx| {
1359 assert_eq!(
1360 project.diagnostic_summary(false, cx),
1361 DiagnosticSummary {
1362 error_count: 1,
1363 warning_count: 0,
1364 }
1365 );
1366 });
1367
1368 project.update(cx, |project, cx| {
1369 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1370 });
1371
1372 // The diagnostics are cleared.
1373 cx.executor().run_until_parked();
1374 buffer.update(cx, |buffer, _| {
1375 assert_eq!(
1376 buffer
1377 .snapshot()
1378 .diagnostics_in_range::<_, usize>(0..1, false)
1379 .map(|entry| entry.diagnostic.message.clone())
1380 .collect::<Vec<_>>(),
1381 Vec::<String>::new(),
1382 );
1383 });
1384 project.update(cx, |project, cx| {
1385 assert_eq!(
1386 project.diagnostic_summary(false, cx),
1387 DiagnosticSummary {
1388 error_count: 0,
1389 warning_count: 0,
1390 }
1391 );
1392 });
1393}
1394
1395#[gpui::test]
1396async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1397 init_test(cx);
1398
1399 let fs = FakeFs::new(cx.executor());
1400 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1401
1402 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1403 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1404
1405 language_registry.add(rust_lang());
1406 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1407
1408 let buffer = project
1409 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1410 .await
1411 .unwrap();
1412
1413 // Before restarting the server, report diagnostics with an unknown buffer version.
1414 let fake_server = fake_servers.next().await.unwrap();
1415 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1416 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1417 version: Some(10000),
1418 diagnostics: Vec::new(),
1419 });
1420 cx.executor().run_until_parked();
1421
1422 project.update(cx, |project, cx| {
1423 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1424 });
1425 let mut fake_server = fake_servers.next().await.unwrap();
1426 let notification = fake_server
1427 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1428 .await
1429 .text_document;
1430 assert_eq!(notification.version, 0);
1431}
1432
1433#[gpui::test]
1434async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1435 init_test(cx);
1436
1437 let progress_token = "the-progress-token";
1438
1439 let fs = FakeFs::new(cx.executor());
1440 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1441
1442 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1443
1444 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1445 language_registry.add(rust_lang());
1446 let mut fake_servers = language_registry.register_fake_lsp(
1447 "Rust",
1448 FakeLspAdapter {
1449 name: "the-language-server",
1450 disk_based_diagnostics_sources: vec!["disk".into()],
1451 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1452 ..Default::default()
1453 },
1454 );
1455
1456 let buffer = project
1457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1458 .await
1459 .unwrap();
1460
1461 // Simulate diagnostics starting to update.
1462 let mut fake_server = fake_servers.next().await.unwrap();
1463 fake_server
1464 .start_progress_with(
1465 "another-token",
1466 lsp::WorkDoneProgressBegin {
1467 cancellable: Some(false),
1468 ..Default::default()
1469 },
1470 )
1471 .await;
1472 fake_server
1473 .start_progress_with(
1474 progress_token,
1475 lsp::WorkDoneProgressBegin {
1476 cancellable: Some(true),
1477 ..Default::default()
1478 },
1479 )
1480 .await;
1481 cx.executor().run_until_parked();
1482
1483 project.update(cx, |project, cx| {
1484 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1485 });
1486
1487 let cancel_notification = fake_server
1488 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1489 .await;
1490 assert_eq!(
1491 cancel_notification.token,
1492 NumberOrString::String(progress_token.into())
1493 );
1494}
1495
1496#[gpui::test]
1497async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1498 init_test(cx);
1499
1500 let fs = FakeFs::new(cx.executor());
1501 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1502 .await;
1503
1504 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1506
1507 let mut fake_rust_servers = language_registry.register_fake_lsp(
1508 "Rust",
1509 FakeLspAdapter {
1510 name: "rust-lsp",
1511 ..Default::default()
1512 },
1513 );
1514 let mut fake_js_servers = language_registry.register_fake_lsp(
1515 "JavaScript",
1516 FakeLspAdapter {
1517 name: "js-lsp",
1518 ..Default::default()
1519 },
1520 );
1521 language_registry.add(rust_lang());
1522 language_registry.add(js_lang());
1523
1524 let _rs_buffer = project
1525 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1526 .await
1527 .unwrap();
1528 let _js_buffer = project
1529 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1530 .await
1531 .unwrap();
1532
1533 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1534 assert_eq!(
1535 fake_rust_server_1
1536 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1537 .await
1538 .text_document
1539 .uri
1540 .as_str(),
1541 "file:///dir/a.rs"
1542 );
1543
1544 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1545 assert_eq!(
1546 fake_js_server
1547 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1548 .await
1549 .text_document
1550 .uri
1551 .as_str(),
1552 "file:///dir/b.js"
1553 );
1554
1555 // Disable Rust language server, ensuring only that server gets stopped.
1556 cx.update(|cx| {
1557 SettingsStore::update_global(cx, |settings, cx| {
1558 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1559 settings.languages.insert(
1560 "Rust".into(),
1561 LanguageSettingsContent {
1562 enable_language_server: Some(false),
1563 ..Default::default()
1564 },
1565 );
1566 });
1567 })
1568 });
1569 fake_rust_server_1
1570 .receive_notification::<lsp::notification::Exit>()
1571 .await;
1572
1573 // Enable Rust and disable JavaScript language servers, ensuring that the
1574 // former gets started again and that the latter stops.
1575 cx.update(|cx| {
1576 SettingsStore::update_global(cx, |settings, cx| {
1577 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1578 settings.languages.insert(
1579 LanguageName::new("Rust"),
1580 LanguageSettingsContent {
1581 enable_language_server: Some(true),
1582 ..Default::default()
1583 },
1584 );
1585 settings.languages.insert(
1586 LanguageName::new("JavaScript"),
1587 LanguageSettingsContent {
1588 enable_language_server: Some(false),
1589 ..Default::default()
1590 },
1591 );
1592 });
1593 })
1594 });
1595 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1596 assert_eq!(
1597 fake_rust_server_2
1598 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1599 .await
1600 .text_document
1601 .uri
1602 .as_str(),
1603 "file:///dir/a.rs"
1604 );
1605 fake_js_server
1606 .receive_notification::<lsp::notification::Exit>()
1607 .await;
1608}
1609
1610#[gpui::test(iterations = 3)]
1611async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1612 init_test(cx);
1613
1614 let text = "
1615 fn a() { A }
1616 fn b() { BB }
1617 fn c() { CCC }
1618 "
1619 .unindent();
1620
1621 let fs = FakeFs::new(cx.executor());
1622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1623
1624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1626
1627 language_registry.add(rust_lang());
1628 let mut fake_servers = language_registry.register_fake_lsp(
1629 "Rust",
1630 FakeLspAdapter {
1631 disk_based_diagnostics_sources: vec!["disk".into()],
1632 ..Default::default()
1633 },
1634 );
1635
1636 let buffer = project
1637 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1638 .await
1639 .unwrap();
1640
1641 let mut fake_server = fake_servers.next().await.unwrap();
1642 let open_notification = fake_server
1643 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1644 .await;
1645
1646 // Edit the buffer, moving the content down
1647 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1648 let change_notification_1 = fake_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await;
1651 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1652
1653 // Report some diagnostics for the initial version of the buffer
1654 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1655 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1656 version: Some(open_notification.text_document.version),
1657 diagnostics: vec![
1658 lsp::Diagnostic {
1659 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1660 severity: Some(DiagnosticSeverity::ERROR),
1661 message: "undefined variable 'A'".to_string(),
1662 source: Some("disk".to_string()),
1663 ..Default::default()
1664 },
1665 lsp::Diagnostic {
1666 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1667 severity: Some(DiagnosticSeverity::ERROR),
1668 message: "undefined variable 'BB'".to_string(),
1669 source: Some("disk".to_string()),
1670 ..Default::default()
1671 },
1672 lsp::Diagnostic {
1673 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1674 severity: Some(DiagnosticSeverity::ERROR),
1675 source: Some("disk".to_string()),
1676 message: "undefined variable 'CCC'".to_string(),
1677 ..Default::default()
1678 },
1679 ],
1680 });
1681
1682 // The diagnostics have moved down since they were created.
1683 cx.executor().run_until_parked();
1684 buffer.update(cx, |buffer, _| {
1685 assert_eq!(
1686 buffer
1687 .snapshot()
1688 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1689 .collect::<Vec<_>>(),
1690 &[
1691 DiagnosticEntry {
1692 range: Point::new(3, 9)..Point::new(3, 11),
1693 diagnostic: Diagnostic {
1694 source: Some("disk".into()),
1695 severity: DiagnosticSeverity::ERROR,
1696 message: "undefined variable 'BB'".to_string(),
1697 is_disk_based: true,
1698 group_id: 1,
1699 is_primary: true,
1700 ..Default::default()
1701 },
1702 },
1703 DiagnosticEntry {
1704 range: Point::new(4, 9)..Point::new(4, 12),
1705 diagnostic: Diagnostic {
1706 source: Some("disk".into()),
1707 severity: DiagnosticSeverity::ERROR,
1708 message: "undefined variable 'CCC'".to_string(),
1709 is_disk_based: true,
1710 group_id: 2,
1711 is_primary: true,
1712 ..Default::default()
1713 }
1714 }
1715 ]
1716 );
1717 assert_eq!(
1718 chunks_with_diagnostics(buffer, 0..buffer.len()),
1719 [
1720 ("\n\nfn a() { ".to_string(), None),
1721 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1722 (" }\nfn b() { ".to_string(), None),
1723 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1724 (" }\nfn c() { ".to_string(), None),
1725 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1726 (" }\n".to_string(), None),
1727 ]
1728 );
1729 assert_eq!(
1730 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1731 [
1732 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1733 (" }\nfn c() { ".to_string(), None),
1734 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1735 ]
1736 );
1737 });
1738
1739 // Ensure overlapping diagnostics are highlighted correctly.
1740 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1741 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1742 version: Some(open_notification.text_document.version),
1743 diagnostics: vec![
1744 lsp::Diagnostic {
1745 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1746 severity: Some(DiagnosticSeverity::ERROR),
1747 message: "undefined variable 'A'".to_string(),
1748 source: Some("disk".to_string()),
1749 ..Default::default()
1750 },
1751 lsp::Diagnostic {
1752 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1753 severity: Some(DiagnosticSeverity::WARNING),
1754 message: "unreachable statement".to_string(),
1755 source: Some("disk".to_string()),
1756 ..Default::default()
1757 },
1758 ],
1759 });
1760
1761 cx.executor().run_until_parked();
1762 buffer.update(cx, |buffer, _| {
1763 assert_eq!(
1764 buffer
1765 .snapshot()
1766 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1767 .collect::<Vec<_>>(),
1768 &[
1769 DiagnosticEntry {
1770 range: Point::new(2, 9)..Point::new(2, 12),
1771 diagnostic: Diagnostic {
1772 source: Some("disk".into()),
1773 severity: DiagnosticSeverity::WARNING,
1774 message: "unreachable statement".to_string(),
1775 is_disk_based: true,
1776 group_id: 4,
1777 is_primary: true,
1778 ..Default::default()
1779 }
1780 },
1781 DiagnosticEntry {
1782 range: Point::new(2, 9)..Point::new(2, 10),
1783 diagnostic: Diagnostic {
1784 source: Some("disk".into()),
1785 severity: DiagnosticSeverity::ERROR,
1786 message: "undefined variable 'A'".to_string(),
1787 is_disk_based: true,
1788 group_id: 3,
1789 is_primary: true,
1790 ..Default::default()
1791 },
1792 }
1793 ]
1794 );
1795 assert_eq!(
1796 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1797 [
1798 ("fn a() { ".to_string(), None),
1799 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1800 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1801 ("\n".to_string(), None),
1802 ]
1803 );
1804 assert_eq!(
1805 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1806 [
1807 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1808 ("\n".to_string(), None),
1809 ]
1810 );
1811 });
1812
1813 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1814 // changes since the last save.
1815 buffer.update(cx, |buffer, cx| {
1816 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1817 buffer.edit(
1818 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1819 None,
1820 cx,
1821 );
1822 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1823 });
1824 let change_notification_2 = fake_server
1825 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1826 .await;
1827 assert!(
1828 change_notification_2.text_document.version > change_notification_1.text_document.version
1829 );
1830
1831 // Handle out-of-order diagnostics
1832 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1833 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1834 version: Some(change_notification_2.text_document.version),
1835 diagnostics: vec![
1836 lsp::Diagnostic {
1837 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1838 severity: Some(DiagnosticSeverity::ERROR),
1839 message: "undefined variable 'BB'".to_string(),
1840 source: Some("disk".to_string()),
1841 ..Default::default()
1842 },
1843 lsp::Diagnostic {
1844 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1845 severity: Some(DiagnosticSeverity::WARNING),
1846 message: "undefined variable 'A'".to_string(),
1847 source: Some("disk".to_string()),
1848 ..Default::default()
1849 },
1850 ],
1851 });
1852
1853 cx.executor().run_until_parked();
1854 buffer.update(cx, |buffer, _| {
1855 assert_eq!(
1856 buffer
1857 .snapshot()
1858 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1859 .collect::<Vec<_>>(),
1860 &[
1861 DiagnosticEntry {
1862 range: Point::new(2, 21)..Point::new(2, 22),
1863 diagnostic: Diagnostic {
1864 source: Some("disk".into()),
1865 severity: DiagnosticSeverity::WARNING,
1866 message: "undefined variable 'A'".to_string(),
1867 is_disk_based: true,
1868 group_id: 6,
1869 is_primary: true,
1870 ..Default::default()
1871 }
1872 },
1873 DiagnosticEntry {
1874 range: Point::new(3, 9)..Point::new(3, 14),
1875 diagnostic: Diagnostic {
1876 source: Some("disk".into()),
1877 severity: DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'BB'".to_string(),
1879 is_disk_based: true,
1880 group_id: 5,
1881 is_primary: true,
1882 ..Default::default()
1883 },
1884 }
1885 ]
1886 );
1887 });
1888}
1889
1890#[gpui::test]
1891async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1892 init_test(cx);
1893
1894 let text = concat!(
1895 "let one = ;\n", //
1896 "let two = \n",
1897 "let three = 3;\n",
1898 );
1899
1900 let fs = FakeFs::new(cx.executor());
1901 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1902
1903 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1904 let buffer = project
1905 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1906 .await
1907 .unwrap();
1908
1909 project.update(cx, |project, cx| {
1910 project.lsp_store.update(cx, |lsp_store, cx| {
1911 lsp_store
1912 .update_buffer_diagnostics(
1913 &buffer,
1914 LanguageServerId(0),
1915 None,
1916 vec![
1917 DiagnosticEntry {
1918 range: Unclipped(PointUtf16::new(0, 10))
1919 ..Unclipped(PointUtf16::new(0, 10)),
1920 diagnostic: Diagnostic {
1921 severity: DiagnosticSeverity::ERROR,
1922 message: "syntax error 1".to_string(),
1923 ..Default::default()
1924 },
1925 },
1926 DiagnosticEntry {
1927 range: Unclipped(PointUtf16::new(1, 10))
1928 ..Unclipped(PointUtf16::new(1, 10)),
1929 diagnostic: Diagnostic {
1930 severity: DiagnosticSeverity::ERROR,
1931 message: "syntax error 2".to_string(),
1932 ..Default::default()
1933 },
1934 },
1935 ],
1936 cx,
1937 )
1938 .unwrap();
1939 })
1940 });
1941
1942 // An empty range is extended forward to include the following character.
1943 // At the end of a line, an empty range is extended backward to include
1944 // the preceding character.
1945 buffer.update(cx, |buffer, _| {
1946 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1947 assert_eq!(
1948 chunks
1949 .iter()
1950 .map(|(s, d)| (s.as_str(), *d))
1951 .collect::<Vec<_>>(),
1952 &[
1953 ("let one = ", None),
1954 (";", Some(DiagnosticSeverity::ERROR)),
1955 ("\nlet two =", None),
1956 (" ", Some(DiagnosticSeverity::ERROR)),
1957 ("\nlet three = 3;\n", None)
1958 ]
1959 );
1960 });
1961}
1962
1963#[gpui::test]
1964async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1965 init_test(cx);
1966
1967 let fs = FakeFs::new(cx.executor());
1968 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1969 .await;
1970
1971 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1972
1973 project.update(cx, |project, cx| {
1974 project
1975 .update_diagnostic_entries(
1976 LanguageServerId(0),
1977 Path::new("/dir/a.rs").to_owned(),
1978 None,
1979 vec![DiagnosticEntry {
1980 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1981 diagnostic: Diagnostic {
1982 severity: DiagnosticSeverity::ERROR,
1983 is_primary: true,
1984 message: "syntax error a1".to_string(),
1985 ..Default::default()
1986 },
1987 }],
1988 cx,
1989 )
1990 .unwrap();
1991 project
1992 .update_diagnostic_entries(
1993 LanguageServerId(1),
1994 Path::new("/dir/a.rs").to_owned(),
1995 None,
1996 vec![DiagnosticEntry {
1997 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1998 diagnostic: Diagnostic {
1999 severity: DiagnosticSeverity::ERROR,
2000 is_primary: true,
2001 message: "syntax error b1".to_string(),
2002 ..Default::default()
2003 },
2004 }],
2005 cx,
2006 )
2007 .unwrap();
2008
2009 assert_eq!(
2010 project.diagnostic_summary(false, cx),
2011 DiagnosticSummary {
2012 error_count: 2,
2013 warning_count: 0,
2014 }
2015 );
2016 });
2017}
2018
2019#[gpui::test]
2020async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2021 init_test(cx);
2022
2023 let text = "
2024 fn a() {
2025 f1();
2026 }
2027 fn b() {
2028 f2();
2029 }
2030 fn c() {
2031 f3();
2032 }
2033 "
2034 .unindent();
2035
2036 let fs = FakeFs::new(cx.executor());
2037 fs.insert_tree(
2038 "/dir",
2039 json!({
2040 "a.rs": text.clone(),
2041 }),
2042 )
2043 .await;
2044
2045 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2046 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2047
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2051
2052 let buffer = project
2053 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2054 .await
2055 .unwrap();
2056
2057 let mut fake_server = fake_servers.next().await.unwrap();
2058 let lsp_document_version = fake_server
2059 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2060 .await
2061 .text_document
2062 .version;
2063
2064 // Simulate editing the buffer after the language server computes some edits.
2065 buffer.update(cx, |buffer, cx| {
2066 buffer.edit(
2067 [(
2068 Point::new(0, 0)..Point::new(0, 0),
2069 "// above first function\n",
2070 )],
2071 None,
2072 cx,
2073 );
2074 buffer.edit(
2075 [(
2076 Point::new(2, 0)..Point::new(2, 0),
2077 " // inside first function\n",
2078 )],
2079 None,
2080 cx,
2081 );
2082 buffer.edit(
2083 [(
2084 Point::new(6, 4)..Point::new(6, 4),
2085 "// inside second function ",
2086 )],
2087 None,
2088 cx,
2089 );
2090
2091 assert_eq!(
2092 buffer.text(),
2093 "
2094 // above first function
2095 fn a() {
2096 // inside first function
2097 f1();
2098 }
2099 fn b() {
2100 // inside second function f2();
2101 }
2102 fn c() {
2103 f3();
2104 }
2105 "
2106 .unindent()
2107 );
2108 });
2109
2110 let edits = lsp_store
2111 .update(cx, |lsp_store, cx| {
2112 lsp_store.edits_from_lsp(
2113 &buffer,
2114 vec![
2115 // replace body of first function
2116 lsp::TextEdit {
2117 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2118 new_text: "
2119 fn a() {
2120 f10();
2121 }
2122 "
2123 .unindent(),
2124 },
2125 // edit inside second function
2126 lsp::TextEdit {
2127 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2128 new_text: "00".into(),
2129 },
2130 // edit inside third function via two distinct edits
2131 lsp::TextEdit {
2132 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2133 new_text: "4000".into(),
2134 },
2135 lsp::TextEdit {
2136 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2137 new_text: "".into(),
2138 },
2139 ],
2140 LanguageServerId(0),
2141 Some(lsp_document_version),
2142 cx,
2143 )
2144 })
2145 .await
2146 .unwrap();
2147
2148 buffer.update(cx, |buffer, cx| {
2149 for (range, new_text) in edits {
2150 buffer.edit([(range, new_text)], None, cx);
2151 }
2152 assert_eq!(
2153 buffer.text(),
2154 "
2155 // above first function
2156 fn a() {
2157 // inside first function
2158 f10();
2159 }
2160 fn b() {
2161 // inside second function f200();
2162 }
2163 fn c() {
2164 f4000();
2165 }
2166 "
2167 .unindent()
2168 );
2169 });
2170}
2171
2172#[gpui::test]
2173async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2174 init_test(cx);
2175
2176 let text = "
2177 use a::b;
2178 use a::c;
2179
2180 fn f() {
2181 b();
2182 c();
2183 }
2184 "
2185 .unindent();
2186
2187 let fs = FakeFs::new(cx.executor());
2188 fs.insert_tree(
2189 "/dir",
2190 json!({
2191 "a.rs": text.clone(),
2192 }),
2193 )
2194 .await;
2195
2196 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2197 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2198 let buffer = project
2199 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2200 .await
2201 .unwrap();
2202
2203 // Simulate the language server sending us a small edit in the form of a very large diff.
2204 // Rust-analyzer does this when performing a merge-imports code action.
2205 let edits = lsp_store
2206 .update(cx, |lsp_store, cx| {
2207 lsp_store.edits_from_lsp(
2208 &buffer,
2209 [
2210 // Replace the first use statement without editing the semicolon.
2211 lsp::TextEdit {
2212 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2213 new_text: "a::{b, c}".into(),
2214 },
2215 // Reinsert the remainder of the file between the semicolon and the final
2216 // newline of the file.
2217 lsp::TextEdit {
2218 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2219 new_text: "\n\n".into(),
2220 },
2221 lsp::TextEdit {
2222 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2223 new_text: "
2224 fn f() {
2225 b();
2226 c();
2227 }"
2228 .unindent(),
2229 },
2230 // Delete everything after the first newline of the file.
2231 lsp::TextEdit {
2232 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2233 new_text: "".into(),
2234 },
2235 ],
2236 LanguageServerId(0),
2237 None,
2238 cx,
2239 )
2240 })
2241 .await
2242 .unwrap();
2243
2244 buffer.update(cx, |buffer, cx| {
2245 let edits = edits
2246 .into_iter()
2247 .map(|(range, text)| {
2248 (
2249 range.start.to_point(buffer)..range.end.to_point(buffer),
2250 text,
2251 )
2252 })
2253 .collect::<Vec<_>>();
2254
2255 assert_eq!(
2256 edits,
2257 [
2258 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2259 (Point::new(1, 0)..Point::new(2, 0), "".into())
2260 ]
2261 );
2262
2263 for (range, new_text) in edits {
2264 buffer.edit([(range, new_text)], None, cx);
2265 }
2266 assert_eq!(
2267 buffer.text(),
2268 "
2269 use a::{b, c};
2270
2271 fn f() {
2272 b();
2273 c();
2274 }
2275 "
2276 .unindent()
2277 );
2278 });
2279}
2280
2281#[gpui::test]
2282async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2283 init_test(cx);
2284
2285 let text = "
2286 use a::b;
2287 use a::c;
2288
2289 fn f() {
2290 b();
2291 c();
2292 }
2293 "
2294 .unindent();
2295
2296 let fs = FakeFs::new(cx.executor());
2297 fs.insert_tree(
2298 "/dir",
2299 json!({
2300 "a.rs": text.clone(),
2301 }),
2302 )
2303 .await;
2304
2305 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2307 let buffer = project
2308 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2309 .await
2310 .unwrap();
2311
2312 // Simulate the language server sending us edits in a non-ordered fashion,
2313 // with ranges sometimes being inverted or pointing to invalid locations.
2314 let edits = lsp_store
2315 .update(cx, |lsp_store, cx| {
2316 lsp_store.edits_from_lsp(
2317 &buffer,
2318 [
2319 lsp::TextEdit {
2320 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2321 new_text: "\n\n".into(),
2322 },
2323 lsp::TextEdit {
2324 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2325 new_text: "a::{b, c}".into(),
2326 },
2327 lsp::TextEdit {
2328 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2329 new_text: "".into(),
2330 },
2331 lsp::TextEdit {
2332 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2333 new_text: "
2334 fn f() {
2335 b();
2336 c();
2337 }"
2338 .unindent(),
2339 },
2340 ],
2341 LanguageServerId(0),
2342 None,
2343 cx,
2344 )
2345 })
2346 .await
2347 .unwrap();
2348
2349 buffer.update(cx, |buffer, cx| {
2350 let edits = edits
2351 .into_iter()
2352 .map(|(range, text)| {
2353 (
2354 range.start.to_point(buffer)..range.end.to_point(buffer),
2355 text,
2356 )
2357 })
2358 .collect::<Vec<_>>();
2359
2360 assert_eq!(
2361 edits,
2362 [
2363 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2364 (Point::new(1, 0)..Point::new(2, 0), "".into())
2365 ]
2366 );
2367
2368 for (range, new_text) in edits {
2369 buffer.edit([(range, new_text)], None, cx);
2370 }
2371 assert_eq!(
2372 buffer.text(),
2373 "
2374 use a::{b, c};
2375
2376 fn f() {
2377 b();
2378 c();
2379 }
2380 "
2381 .unindent()
2382 );
2383 });
2384}
2385
2386fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2387 buffer: &Buffer,
2388 range: Range<T>,
2389) -> Vec<(String, Option<DiagnosticSeverity>)> {
2390 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2391 for chunk in buffer.snapshot().chunks(range, true) {
2392 if chunks.last().map_or(false, |prev_chunk| {
2393 prev_chunk.1 == chunk.diagnostic_severity
2394 }) {
2395 chunks.last_mut().unwrap().0.push_str(chunk.text);
2396 } else {
2397 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2398 }
2399 }
2400 chunks
2401}
2402
2403#[gpui::test(iterations = 10)]
2404async fn test_definition(cx: &mut gpui::TestAppContext) {
2405 init_test(cx);
2406
2407 let fs = FakeFs::new(cx.executor());
2408 fs.insert_tree(
2409 "/dir",
2410 json!({
2411 "a.rs": "const fn a() { A }",
2412 "b.rs": "const y: i32 = crate::a()",
2413 }),
2414 )
2415 .await;
2416
2417 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2418
2419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2420 language_registry.add(rust_lang());
2421 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2422
2423 let buffer = project
2424 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2425 .await
2426 .unwrap();
2427
2428 let fake_server = fake_servers.next().await.unwrap();
2429 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2430 let params = params.text_document_position_params;
2431 assert_eq!(
2432 params.text_document.uri.to_file_path().unwrap(),
2433 Path::new("/dir/b.rs"),
2434 );
2435 assert_eq!(params.position, lsp::Position::new(0, 22));
2436
2437 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2438 lsp::Location::new(
2439 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2440 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2441 ),
2442 )))
2443 });
2444
2445 let mut definitions = project
2446 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2447 .await
2448 .unwrap();
2449
2450 // Assert no new language server started
2451 cx.executor().run_until_parked();
2452 assert!(fake_servers.try_next().is_err());
2453
2454 assert_eq!(definitions.len(), 1);
2455 let definition = definitions.pop().unwrap();
2456 cx.update(|cx| {
2457 let target_buffer = definition.target.buffer.read(cx);
2458 assert_eq!(
2459 target_buffer
2460 .file()
2461 .unwrap()
2462 .as_local()
2463 .unwrap()
2464 .abs_path(cx),
2465 Path::new("/dir/a.rs"),
2466 );
2467 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2468 assert_eq!(
2469 list_worktrees(&project, cx),
2470 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2471 );
2472
2473 drop(definition);
2474 });
2475 cx.update(|cx| {
2476 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2477 });
2478
2479 fn list_worktrees<'a>(
2480 project: &'a Model<Project>,
2481 cx: &'a AppContext,
2482 ) -> Vec<(&'a Path, bool)> {
2483 project
2484 .read(cx)
2485 .worktrees(cx)
2486 .map(|worktree| {
2487 let worktree = worktree.read(cx);
2488 (
2489 worktree.as_local().unwrap().abs_path().as_ref(),
2490 worktree.is_visible(),
2491 )
2492 })
2493 .collect::<Vec<_>>()
2494 }
2495}
2496
2497#[gpui::test]
2498async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2499 init_test(cx);
2500
2501 let fs = FakeFs::new(cx.executor());
2502 fs.insert_tree(
2503 "/dir",
2504 json!({
2505 "a.ts": "",
2506 }),
2507 )
2508 .await;
2509
2510 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2511
2512 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2513 language_registry.add(typescript_lang());
2514 let mut fake_language_servers = language_registry.register_fake_lsp(
2515 "TypeScript",
2516 FakeLspAdapter {
2517 capabilities: lsp::ServerCapabilities {
2518 completion_provider: Some(lsp::CompletionOptions {
2519 trigger_characters: Some(vec![":".to_string()]),
2520 ..Default::default()
2521 }),
2522 ..Default::default()
2523 },
2524 ..Default::default()
2525 },
2526 );
2527
2528 let buffer = project
2529 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2530 .await
2531 .unwrap();
2532
2533 let fake_server = fake_language_servers.next().await.unwrap();
2534
2535 let text = "let a = b.fqn";
2536 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2537 let completions = project.update(cx, |project, cx| {
2538 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2539 });
2540
2541 fake_server
2542 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2543 Ok(Some(lsp::CompletionResponse::Array(vec![
2544 lsp::CompletionItem {
2545 label: "fullyQualifiedName?".into(),
2546 insert_text: Some("fullyQualifiedName".into()),
2547 ..Default::default()
2548 },
2549 ])))
2550 })
2551 .next()
2552 .await;
2553 let completions = completions.await.unwrap();
2554 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2555 assert_eq!(completions.len(), 1);
2556 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2557 assert_eq!(
2558 completions[0].old_range.to_offset(&snapshot),
2559 text.len() - 3..text.len()
2560 );
2561
2562 let text = "let a = \"atoms/cmp\"";
2563 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2564 let completions = project.update(cx, |project, cx| {
2565 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2566 });
2567
2568 fake_server
2569 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2570 Ok(Some(lsp::CompletionResponse::Array(vec![
2571 lsp::CompletionItem {
2572 label: "component".into(),
2573 ..Default::default()
2574 },
2575 ])))
2576 })
2577 .next()
2578 .await;
2579 let completions = completions.await.unwrap();
2580 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2581 assert_eq!(completions.len(), 1);
2582 assert_eq!(completions[0].new_text, "component");
2583 assert_eq!(
2584 completions[0].old_range.to_offset(&snapshot),
2585 text.len() - 4..text.len() - 1
2586 );
2587}
2588
2589#[gpui::test]
2590async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2591 init_test(cx);
2592
2593 let fs = FakeFs::new(cx.executor());
2594 fs.insert_tree(
2595 "/dir",
2596 json!({
2597 "a.ts": "",
2598 }),
2599 )
2600 .await;
2601
2602 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2603
2604 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2605 language_registry.add(typescript_lang());
2606 let mut fake_language_servers = language_registry.register_fake_lsp(
2607 "TypeScript",
2608 FakeLspAdapter {
2609 capabilities: lsp::ServerCapabilities {
2610 completion_provider: Some(lsp::CompletionOptions {
2611 trigger_characters: Some(vec![":".to_string()]),
2612 ..Default::default()
2613 }),
2614 ..Default::default()
2615 },
2616 ..Default::default()
2617 },
2618 );
2619
2620 let buffer = project
2621 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2622 .await
2623 .unwrap();
2624
2625 let fake_server = fake_language_servers.next().await.unwrap();
2626
2627 let text = "let a = b.fqn";
2628 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2629 let completions = project.update(cx, |project, cx| {
2630 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2631 });
2632
2633 fake_server
2634 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2635 Ok(Some(lsp::CompletionResponse::Array(vec![
2636 lsp::CompletionItem {
2637 label: "fullyQualifiedName?".into(),
2638 insert_text: Some("fully\rQualified\r\nName".into()),
2639 ..Default::default()
2640 },
2641 ])))
2642 })
2643 .next()
2644 .await;
2645 let completions = completions.await.unwrap();
2646 assert_eq!(completions.len(), 1);
2647 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2648}
2649
2650#[gpui::test(iterations = 10)]
2651async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2652 init_test(cx);
2653
2654 let fs = FakeFs::new(cx.executor());
2655 fs.insert_tree(
2656 "/dir",
2657 json!({
2658 "a.ts": "a",
2659 }),
2660 )
2661 .await;
2662
2663 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2664
2665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2666 language_registry.add(typescript_lang());
2667 let mut fake_language_servers = language_registry.register_fake_lsp(
2668 "TypeScript",
2669 FakeLspAdapter {
2670 capabilities: lsp::ServerCapabilities {
2671 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2672 lsp::CodeActionOptions {
2673 resolve_provider: Some(true),
2674 ..lsp::CodeActionOptions::default()
2675 },
2676 )),
2677 ..lsp::ServerCapabilities::default()
2678 },
2679 ..FakeLspAdapter::default()
2680 },
2681 );
2682
2683 let buffer = project
2684 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2685 .await
2686 .unwrap();
2687
2688 let fake_server = fake_language_servers.next().await.unwrap();
2689
2690 // Language server returns code actions that contain commands, and not edits.
2691 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2692 fake_server
2693 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2694 Ok(Some(vec![
2695 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2696 title: "The code action".into(),
2697 data: Some(serde_json::json!({
2698 "command": "_the/command",
2699 })),
2700 ..lsp::CodeAction::default()
2701 }),
2702 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2703 title: "two".into(),
2704 ..lsp::CodeAction::default()
2705 }),
2706 ]))
2707 })
2708 .next()
2709 .await;
2710
2711 let action = actions.await[0].clone();
2712 let apply = project.update(cx, |project, cx| {
2713 project.apply_code_action(buffer.clone(), action, true, cx)
2714 });
2715
2716 // Resolving the code action does not populate its edits. In absence of
2717 // edits, we must execute the given command.
2718 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2719 |mut action, _| async move {
2720 if action.data.is_some() {
2721 action.command = Some(lsp::Command {
2722 title: "The command".into(),
2723 command: "_the/command".into(),
2724 arguments: Some(vec![json!("the-argument")]),
2725 });
2726 }
2727 Ok(action)
2728 },
2729 );
2730
2731 // While executing the command, the language server sends the editor
2732 // a `workspaceEdit` request.
2733 fake_server
2734 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2735 let fake = fake_server.clone();
2736 move |params, _| {
2737 assert_eq!(params.command, "_the/command");
2738 let fake = fake.clone();
2739 async move {
2740 fake.server
2741 .request::<lsp::request::ApplyWorkspaceEdit>(
2742 lsp::ApplyWorkspaceEditParams {
2743 label: None,
2744 edit: lsp::WorkspaceEdit {
2745 changes: Some(
2746 [(
2747 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2748 vec![lsp::TextEdit {
2749 range: lsp::Range::new(
2750 lsp::Position::new(0, 0),
2751 lsp::Position::new(0, 0),
2752 ),
2753 new_text: "X".into(),
2754 }],
2755 )]
2756 .into_iter()
2757 .collect(),
2758 ),
2759 ..Default::default()
2760 },
2761 },
2762 )
2763 .await
2764 .unwrap();
2765 Ok(Some(json!(null)))
2766 }
2767 }
2768 })
2769 .next()
2770 .await;
2771
2772 // Applying the code action returns a project transaction containing the edits
2773 // sent by the language server in its `workspaceEdit` request.
2774 let transaction = apply.await.unwrap();
2775 assert!(transaction.0.contains_key(&buffer));
2776 buffer.update(cx, |buffer, cx| {
2777 assert_eq!(buffer.text(), "Xa");
2778 buffer.undo(cx);
2779 assert_eq!(buffer.text(), "a");
2780 });
2781}
2782
2783#[gpui::test(iterations = 10)]
2784async fn test_save_file(cx: &mut gpui::TestAppContext) {
2785 init_test(cx);
2786
2787 let fs = FakeFs::new(cx.executor());
2788 fs.insert_tree(
2789 "/dir",
2790 json!({
2791 "file1": "the old contents",
2792 }),
2793 )
2794 .await;
2795
2796 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2797 let buffer = project
2798 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2799 .await
2800 .unwrap();
2801 buffer.update(cx, |buffer, cx| {
2802 assert_eq!(buffer.text(), "the old contents");
2803 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2804 });
2805
2806 project
2807 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2808 .await
2809 .unwrap();
2810
2811 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2812 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2813}
2814
2815#[gpui::test(iterations = 30)]
2816async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2817 init_test(cx);
2818
2819 let fs = FakeFs::new(cx.executor().clone());
2820 fs.insert_tree(
2821 "/dir",
2822 json!({
2823 "file1": "the original contents",
2824 }),
2825 )
2826 .await;
2827
2828 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2829 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2830 let buffer = project
2831 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2832 .await
2833 .unwrap();
2834
2835 // Simulate buffer diffs being slow, so that they don't complete before
2836 // the next file change occurs.
2837 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2838
2839 // Change the buffer's file on disk, and then wait for the file change
2840 // to be detected by the worktree, so that the buffer starts reloading.
2841 fs.save(
2842 "/dir/file1".as_ref(),
2843 &"the first contents".into(),
2844 Default::default(),
2845 )
2846 .await
2847 .unwrap();
2848 worktree.next_event(cx).await;
2849
2850 // Change the buffer's file again. Depending on the random seed, the
2851 // previous file change may still be in progress.
2852 fs.save(
2853 "/dir/file1".as_ref(),
2854 &"the second contents".into(),
2855 Default::default(),
2856 )
2857 .await
2858 .unwrap();
2859 worktree.next_event(cx).await;
2860
2861 cx.executor().run_until_parked();
2862 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2863 buffer.read_with(cx, |buffer, _| {
2864 assert_eq!(buffer.text(), on_disk_text);
2865 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2866 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2867 });
2868}
2869
2870#[gpui::test(iterations = 30)]
2871async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2872 init_test(cx);
2873
2874 let fs = FakeFs::new(cx.executor().clone());
2875 fs.insert_tree(
2876 "/dir",
2877 json!({
2878 "file1": "the original contents",
2879 }),
2880 )
2881 .await;
2882
2883 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2884 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2885 let buffer = project
2886 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2887 .await
2888 .unwrap();
2889
2890 // Simulate buffer diffs being slow, so that they don't complete before
2891 // the next file change occurs.
2892 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2893
2894 // Change the buffer's file on disk, and then wait for the file change
2895 // to be detected by the worktree, so that the buffer starts reloading.
2896 fs.save(
2897 "/dir/file1".as_ref(),
2898 &"the first contents".into(),
2899 Default::default(),
2900 )
2901 .await
2902 .unwrap();
2903 worktree.next_event(cx).await;
2904
2905 cx.executor()
2906 .spawn(cx.executor().simulate_random_delay())
2907 .await;
2908
2909 // Perform a noop edit, causing the buffer's version to increase.
2910 buffer.update(cx, |buffer, cx| {
2911 buffer.edit([(0..0, " ")], None, cx);
2912 buffer.undo(cx);
2913 });
2914
2915 cx.executor().run_until_parked();
2916 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2917 buffer.read_with(cx, |buffer, _| {
2918 let buffer_text = buffer.text();
2919 if buffer_text == on_disk_text {
2920 assert!(
2921 !buffer.is_dirty() && !buffer.has_conflict(),
2922 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2923 );
2924 }
2925 // If the file change occurred while the buffer was processing the first
2926 // change, the buffer will be in a conflicting state.
2927 else {
2928 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2929 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2930 }
2931 });
2932}
2933
2934#[gpui::test]
2935async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2936 init_test(cx);
2937
2938 let fs = FakeFs::new(cx.executor());
2939 fs.insert_tree(
2940 "/dir",
2941 json!({
2942 "file1": "the old contents",
2943 }),
2944 )
2945 .await;
2946
2947 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2948 let buffer = project
2949 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2950 .await
2951 .unwrap();
2952 buffer.update(cx, |buffer, cx| {
2953 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2954 });
2955
2956 project
2957 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2958 .await
2959 .unwrap();
2960
2961 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2962 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2963}
2964
2965#[gpui::test]
2966async fn test_save_as(cx: &mut gpui::TestAppContext) {
2967 init_test(cx);
2968
2969 let fs = FakeFs::new(cx.executor());
2970 fs.insert_tree("/dir", json!({})).await;
2971
2972 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2973
2974 let languages = project.update(cx, |project, _| project.languages().clone());
2975 languages.add(rust_lang());
2976
2977 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2978 buffer.update(cx, |buffer, cx| {
2979 buffer.edit([(0..0, "abc")], None, cx);
2980 assert!(buffer.is_dirty());
2981 assert!(!buffer.has_conflict());
2982 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
2983 });
2984 project
2985 .update(cx, |project, cx| {
2986 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
2987 let path = ProjectPath {
2988 worktree_id,
2989 path: Arc::from(Path::new("file1.rs")),
2990 };
2991 project.save_buffer_as(buffer.clone(), path, cx)
2992 })
2993 .await
2994 .unwrap();
2995 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2996
2997 cx.executor().run_until_parked();
2998 buffer.update(cx, |buffer, cx| {
2999 assert_eq!(
3000 buffer.file().unwrap().full_path(cx),
3001 Path::new("dir/file1.rs")
3002 );
3003 assert!(!buffer.is_dirty());
3004 assert!(!buffer.has_conflict());
3005 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3006 });
3007
3008 let opened_buffer = project
3009 .update(cx, |project, cx| {
3010 project.open_local_buffer("/dir/file1.rs", cx)
3011 })
3012 .await
3013 .unwrap();
3014 assert_eq!(opened_buffer, buffer);
3015}
3016
3017#[gpui::test(retries = 5)]
3018async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3019 use worktree::WorktreeModelHandle as _;
3020
3021 init_test(cx);
3022 cx.executor().allow_parking();
3023
3024 let dir = temp_tree(json!({
3025 "a": {
3026 "file1": "",
3027 "file2": "",
3028 "file3": "",
3029 },
3030 "b": {
3031 "c": {
3032 "file4": "",
3033 "file5": "",
3034 }
3035 }
3036 }));
3037
3038 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3039
3040 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3041 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3042 async move { buffer.await.unwrap() }
3043 };
3044 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3045 project.update(cx, |project, cx| {
3046 let tree = project.worktrees(cx).next().unwrap();
3047 tree.read(cx)
3048 .entry_for_path(path)
3049 .unwrap_or_else(|| panic!("no entry for path {}", path))
3050 .id
3051 })
3052 };
3053
3054 let buffer2 = buffer_for_path("a/file2", cx).await;
3055 let buffer3 = buffer_for_path("a/file3", cx).await;
3056 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3057 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3058
3059 let file2_id = id_for_path("a/file2", cx);
3060 let file3_id = id_for_path("a/file3", cx);
3061 let file4_id = id_for_path("b/c/file4", cx);
3062
3063 // Create a remote copy of this worktree.
3064 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3065 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3066
3067 let updates = Arc::new(Mutex::new(Vec::new()));
3068 tree.update(cx, |tree, cx| {
3069 let updates = updates.clone();
3070 tree.observe_updates(0, cx, move |update| {
3071 updates.lock().push(update);
3072 async { true }
3073 });
3074 });
3075
3076 let remote =
3077 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3078
3079 cx.executor().run_until_parked();
3080
3081 cx.update(|cx| {
3082 assert!(!buffer2.read(cx).is_dirty());
3083 assert!(!buffer3.read(cx).is_dirty());
3084 assert!(!buffer4.read(cx).is_dirty());
3085 assert!(!buffer5.read(cx).is_dirty());
3086 });
3087
3088 // Rename and delete files and directories.
3089 tree.flush_fs_events(cx).await;
3090 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3091 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3092 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3093 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3094 tree.flush_fs_events(cx).await;
3095
3096 let expected_paths = vec![
3097 "a",
3098 "a/file1",
3099 "a/file2.new",
3100 "b",
3101 "d",
3102 "d/file3",
3103 "d/file4",
3104 ];
3105
3106 cx.update(|app| {
3107 assert_eq!(
3108 tree.read(app)
3109 .paths()
3110 .map(|p| p.to_str().unwrap())
3111 .collect::<Vec<_>>(),
3112 expected_paths
3113 );
3114 });
3115
3116 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3117 assert_eq!(id_for_path("d/file3", cx), file3_id);
3118 assert_eq!(id_for_path("d/file4", cx), file4_id);
3119
3120 cx.update(|cx| {
3121 assert_eq!(
3122 buffer2.read(cx).file().unwrap().path().as_ref(),
3123 Path::new("a/file2.new")
3124 );
3125 assert_eq!(
3126 buffer3.read(cx).file().unwrap().path().as_ref(),
3127 Path::new("d/file3")
3128 );
3129 assert_eq!(
3130 buffer4.read(cx).file().unwrap().path().as_ref(),
3131 Path::new("d/file4")
3132 );
3133 assert_eq!(
3134 buffer5.read(cx).file().unwrap().path().as_ref(),
3135 Path::new("b/c/file5")
3136 );
3137
3138 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3139 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3140 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3141 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3142 });
3143
3144 // Update the remote worktree. Check that it becomes consistent with the
3145 // local worktree.
3146 cx.executor().run_until_parked();
3147
3148 remote.update(cx, |remote, _| {
3149 for update in updates.lock().drain(..) {
3150 remote.as_remote_mut().unwrap().update_from_remote(update);
3151 }
3152 });
3153 cx.executor().run_until_parked();
3154 remote.update(cx, |remote, _| {
3155 assert_eq!(
3156 remote
3157 .paths()
3158 .map(|p| p.to_str().unwrap())
3159 .collect::<Vec<_>>(),
3160 expected_paths
3161 );
3162 });
3163}
3164
3165#[gpui::test(iterations = 10)]
3166async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3167 init_test(cx);
3168
3169 let fs = FakeFs::new(cx.executor());
3170 fs.insert_tree(
3171 "/dir",
3172 json!({
3173 "a": {
3174 "file1": "",
3175 }
3176 }),
3177 )
3178 .await;
3179
3180 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3181 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3182 let tree_id = tree.update(cx, |tree, _| tree.id());
3183
3184 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3185 project.update(cx, |project, cx| {
3186 let tree = project.worktrees(cx).next().unwrap();
3187 tree.read(cx)
3188 .entry_for_path(path)
3189 .unwrap_or_else(|| panic!("no entry for path {}", path))
3190 .id
3191 })
3192 };
3193
3194 let dir_id = id_for_path("a", cx);
3195 let file_id = id_for_path("a/file1", cx);
3196 let buffer = project
3197 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3198 .await
3199 .unwrap();
3200 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3201
3202 project
3203 .update(cx, |project, cx| {
3204 project.rename_entry(dir_id, Path::new("b"), cx)
3205 })
3206 .unwrap()
3207 .await
3208 .to_included()
3209 .unwrap();
3210 cx.executor().run_until_parked();
3211
3212 assert_eq!(id_for_path("b", cx), dir_id);
3213 assert_eq!(id_for_path("b/file1", cx), file_id);
3214 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3215}
3216
3217#[gpui::test]
3218async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3219 init_test(cx);
3220
3221 let fs = FakeFs::new(cx.executor());
3222 fs.insert_tree(
3223 "/dir",
3224 json!({
3225 "a.txt": "a-contents",
3226 "b.txt": "b-contents",
3227 }),
3228 )
3229 .await;
3230
3231 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3232
3233 // Spawn multiple tasks to open paths, repeating some paths.
3234 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3235 (
3236 p.open_local_buffer("/dir/a.txt", cx),
3237 p.open_local_buffer("/dir/b.txt", cx),
3238 p.open_local_buffer("/dir/a.txt", cx),
3239 )
3240 });
3241
3242 let buffer_a_1 = buffer_a_1.await.unwrap();
3243 let buffer_a_2 = buffer_a_2.await.unwrap();
3244 let buffer_b = buffer_b.await.unwrap();
3245 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3246 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3247
3248 // There is only one buffer per path.
3249 let buffer_a_id = buffer_a_1.entity_id();
3250 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3251
3252 // Open the same path again while it is still open.
3253 drop(buffer_a_1);
3254 let buffer_a_3 = project
3255 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3256 .await
3257 .unwrap();
3258
3259 // There's still only one buffer per path.
3260 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3261}
3262
3263#[gpui::test]
3264async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3265 init_test(cx);
3266
3267 let fs = FakeFs::new(cx.executor());
3268 fs.insert_tree(
3269 "/dir",
3270 json!({
3271 "file1": "abc",
3272 "file2": "def",
3273 "file3": "ghi",
3274 }),
3275 )
3276 .await;
3277
3278 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3279
3280 let buffer1 = project
3281 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3282 .await
3283 .unwrap();
3284 let events = Arc::new(Mutex::new(Vec::new()));
3285
3286 // initially, the buffer isn't dirty.
3287 buffer1.update(cx, |buffer, cx| {
3288 cx.subscribe(&buffer1, {
3289 let events = events.clone();
3290 move |_, _, event, _| match event {
3291 BufferEvent::Operation(_) => {}
3292 _ => events.lock().push(event.clone()),
3293 }
3294 })
3295 .detach();
3296
3297 assert!(!buffer.is_dirty());
3298 assert!(events.lock().is_empty());
3299
3300 buffer.edit([(1..2, "")], None, cx);
3301 });
3302
3303 // after the first edit, the buffer is dirty, and emits a dirtied event.
3304 buffer1.update(cx, |buffer, cx| {
3305 assert!(buffer.text() == "ac");
3306 assert!(buffer.is_dirty());
3307 assert_eq!(
3308 *events.lock(),
3309 &[
3310 language::BufferEvent::Edited,
3311 language::BufferEvent::DirtyChanged
3312 ]
3313 );
3314 events.lock().clear();
3315 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3316 });
3317
3318 // after saving, the buffer is not dirty, and emits a saved event.
3319 buffer1.update(cx, |buffer, cx| {
3320 assert!(!buffer.is_dirty());
3321 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3322 events.lock().clear();
3323
3324 buffer.edit([(1..1, "B")], None, cx);
3325 buffer.edit([(2..2, "D")], None, cx);
3326 });
3327
3328 // after editing again, the buffer is dirty, and emits another dirty event.
3329 buffer1.update(cx, |buffer, cx| {
3330 assert!(buffer.text() == "aBDc");
3331 assert!(buffer.is_dirty());
3332 assert_eq!(
3333 *events.lock(),
3334 &[
3335 language::BufferEvent::Edited,
3336 language::BufferEvent::DirtyChanged,
3337 language::BufferEvent::Edited,
3338 ],
3339 );
3340 events.lock().clear();
3341
3342 // After restoring the buffer to its previously-saved state,
3343 // the buffer is not considered dirty anymore.
3344 buffer.edit([(1..3, "")], None, cx);
3345 assert!(buffer.text() == "ac");
3346 assert!(!buffer.is_dirty());
3347 });
3348
3349 assert_eq!(
3350 *events.lock(),
3351 &[
3352 language::BufferEvent::Edited,
3353 language::BufferEvent::DirtyChanged
3354 ]
3355 );
3356
3357 // When a file is deleted, the buffer is considered dirty.
3358 let events = Arc::new(Mutex::new(Vec::new()));
3359 let buffer2 = project
3360 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3361 .await
3362 .unwrap();
3363 buffer2.update(cx, |_, cx| {
3364 cx.subscribe(&buffer2, {
3365 let events = events.clone();
3366 move |_, _, event, _| events.lock().push(event.clone())
3367 })
3368 .detach();
3369 });
3370
3371 fs.remove_file("/dir/file2".as_ref(), Default::default())
3372 .await
3373 .unwrap();
3374 cx.executor().run_until_parked();
3375 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3376 assert_eq!(
3377 *events.lock(),
3378 &[
3379 language::BufferEvent::DirtyChanged,
3380 language::BufferEvent::FileHandleChanged
3381 ]
3382 );
3383
3384 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3385 let events = Arc::new(Mutex::new(Vec::new()));
3386 let buffer3 = project
3387 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3388 .await
3389 .unwrap();
3390 buffer3.update(cx, |_, cx| {
3391 cx.subscribe(&buffer3, {
3392 let events = events.clone();
3393 move |_, _, event, _| events.lock().push(event.clone())
3394 })
3395 .detach();
3396 });
3397
3398 buffer3.update(cx, |buffer, cx| {
3399 buffer.edit([(0..0, "x")], None, cx);
3400 });
3401 events.lock().clear();
3402 fs.remove_file("/dir/file3".as_ref(), Default::default())
3403 .await
3404 .unwrap();
3405 cx.executor().run_until_parked();
3406 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3407 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3408}
3409
3410#[gpui::test]
3411async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3412 init_test(cx);
3413
3414 let initial_contents = "aaa\nbbbbb\nc\n";
3415 let fs = FakeFs::new(cx.executor());
3416 fs.insert_tree(
3417 "/dir",
3418 json!({
3419 "the-file": initial_contents,
3420 }),
3421 )
3422 .await;
3423 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3424 let buffer = project
3425 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3426 .await
3427 .unwrap();
3428
3429 let anchors = (0..3)
3430 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3431 .collect::<Vec<_>>();
3432
3433 // Change the file on disk, adding two new lines of text, and removing
3434 // one line.
3435 buffer.update(cx, |buffer, _| {
3436 assert!(!buffer.is_dirty());
3437 assert!(!buffer.has_conflict());
3438 });
3439 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3440 fs.save(
3441 "/dir/the-file".as_ref(),
3442 &new_contents.into(),
3443 LineEnding::Unix,
3444 )
3445 .await
3446 .unwrap();
3447
3448 // Because the buffer was not modified, it is reloaded from disk. Its
3449 // contents are edited according to the diff between the old and new
3450 // file contents.
3451 cx.executor().run_until_parked();
3452 buffer.update(cx, |buffer, _| {
3453 assert_eq!(buffer.text(), new_contents);
3454 assert!(!buffer.is_dirty());
3455 assert!(!buffer.has_conflict());
3456
3457 let anchor_positions = anchors
3458 .iter()
3459 .map(|anchor| anchor.to_point(&*buffer))
3460 .collect::<Vec<_>>();
3461 assert_eq!(
3462 anchor_positions,
3463 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3464 );
3465 });
3466
3467 // Modify the buffer
3468 buffer.update(cx, |buffer, cx| {
3469 buffer.edit([(0..0, " ")], None, cx);
3470 assert!(buffer.is_dirty());
3471 assert!(!buffer.has_conflict());
3472 });
3473
3474 // Change the file on disk again, adding blank lines to the beginning.
3475 fs.save(
3476 "/dir/the-file".as_ref(),
3477 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3478 LineEnding::Unix,
3479 )
3480 .await
3481 .unwrap();
3482
3483 // Because the buffer is modified, it doesn't reload from disk, but is
3484 // marked as having a conflict.
3485 cx.executor().run_until_parked();
3486 buffer.update(cx, |buffer, _| {
3487 assert!(buffer.has_conflict());
3488 });
3489}
3490
3491#[gpui::test]
3492async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3493 init_test(cx);
3494
3495 let fs = FakeFs::new(cx.executor());
3496 fs.insert_tree(
3497 "/dir",
3498 json!({
3499 "file1": "a\nb\nc\n",
3500 "file2": "one\r\ntwo\r\nthree\r\n",
3501 }),
3502 )
3503 .await;
3504
3505 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3506 let buffer1 = project
3507 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3508 .await
3509 .unwrap();
3510 let buffer2 = project
3511 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3512 .await
3513 .unwrap();
3514
3515 buffer1.update(cx, |buffer, _| {
3516 assert_eq!(buffer.text(), "a\nb\nc\n");
3517 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3518 });
3519 buffer2.update(cx, |buffer, _| {
3520 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3521 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3522 });
3523
3524 // Change a file's line endings on disk from unix to windows. The buffer's
3525 // state updates correctly.
3526 fs.save(
3527 "/dir/file1".as_ref(),
3528 &"aaa\nb\nc\n".into(),
3529 LineEnding::Windows,
3530 )
3531 .await
3532 .unwrap();
3533 cx.executor().run_until_parked();
3534 buffer1.update(cx, |buffer, _| {
3535 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3536 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3537 });
3538
3539 // Save a file with windows line endings. The file is written correctly.
3540 buffer2.update(cx, |buffer, cx| {
3541 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3542 });
3543 project
3544 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3545 .await
3546 .unwrap();
3547 assert_eq!(
3548 fs.load("/dir/file2".as_ref()).await.unwrap(),
3549 "one\r\ntwo\r\nthree\r\nfour\r\n",
3550 );
3551}
3552
3553#[gpui::test]
3554async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3555 init_test(cx);
3556
3557 let fs = FakeFs::new(cx.executor());
3558 fs.insert_tree(
3559 "/the-dir",
3560 json!({
3561 "a.rs": "
3562 fn foo(mut v: Vec<usize>) {
3563 for x in &v {
3564 v.push(1);
3565 }
3566 }
3567 "
3568 .unindent(),
3569 }),
3570 )
3571 .await;
3572
3573 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3574 let buffer = project
3575 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3576 .await
3577 .unwrap();
3578
3579 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3580 let message = lsp::PublishDiagnosticsParams {
3581 uri: buffer_uri.clone(),
3582 diagnostics: vec![
3583 lsp::Diagnostic {
3584 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3585 severity: Some(DiagnosticSeverity::WARNING),
3586 message: "error 1".to_string(),
3587 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3588 location: lsp::Location {
3589 uri: buffer_uri.clone(),
3590 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3591 },
3592 message: "error 1 hint 1".to_string(),
3593 }]),
3594 ..Default::default()
3595 },
3596 lsp::Diagnostic {
3597 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3598 severity: Some(DiagnosticSeverity::HINT),
3599 message: "error 1 hint 1".to_string(),
3600 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3601 location: lsp::Location {
3602 uri: buffer_uri.clone(),
3603 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3604 },
3605 message: "original diagnostic".to_string(),
3606 }]),
3607 ..Default::default()
3608 },
3609 lsp::Diagnostic {
3610 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3611 severity: Some(DiagnosticSeverity::ERROR),
3612 message: "error 2".to_string(),
3613 related_information: Some(vec![
3614 lsp::DiagnosticRelatedInformation {
3615 location: lsp::Location {
3616 uri: buffer_uri.clone(),
3617 range: lsp::Range::new(
3618 lsp::Position::new(1, 13),
3619 lsp::Position::new(1, 15),
3620 ),
3621 },
3622 message: "error 2 hint 1".to_string(),
3623 },
3624 lsp::DiagnosticRelatedInformation {
3625 location: lsp::Location {
3626 uri: buffer_uri.clone(),
3627 range: lsp::Range::new(
3628 lsp::Position::new(1, 13),
3629 lsp::Position::new(1, 15),
3630 ),
3631 },
3632 message: "error 2 hint 2".to_string(),
3633 },
3634 ]),
3635 ..Default::default()
3636 },
3637 lsp::Diagnostic {
3638 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3639 severity: Some(DiagnosticSeverity::HINT),
3640 message: "error 2 hint 1".to_string(),
3641 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3642 location: lsp::Location {
3643 uri: buffer_uri.clone(),
3644 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3645 },
3646 message: "original diagnostic".to_string(),
3647 }]),
3648 ..Default::default()
3649 },
3650 lsp::Diagnostic {
3651 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3652 severity: Some(DiagnosticSeverity::HINT),
3653 message: "error 2 hint 2".to_string(),
3654 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3655 location: lsp::Location {
3656 uri: buffer_uri,
3657 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3658 },
3659 message: "original diagnostic".to_string(),
3660 }]),
3661 ..Default::default()
3662 },
3663 ],
3664 version: None,
3665 };
3666
3667 project
3668 .update(cx, |p, cx| {
3669 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3670 })
3671 .unwrap();
3672 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3673
3674 assert_eq!(
3675 buffer
3676 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3677 .collect::<Vec<_>>(),
3678 &[
3679 DiagnosticEntry {
3680 range: Point::new(1, 8)..Point::new(1, 9),
3681 diagnostic: Diagnostic {
3682 severity: DiagnosticSeverity::WARNING,
3683 message: "error 1".to_string(),
3684 group_id: 1,
3685 is_primary: true,
3686 ..Default::default()
3687 }
3688 },
3689 DiagnosticEntry {
3690 range: Point::new(1, 8)..Point::new(1, 9),
3691 diagnostic: Diagnostic {
3692 severity: DiagnosticSeverity::HINT,
3693 message: "error 1 hint 1".to_string(),
3694 group_id: 1,
3695 is_primary: false,
3696 ..Default::default()
3697 }
3698 },
3699 DiagnosticEntry {
3700 range: Point::new(1, 13)..Point::new(1, 15),
3701 diagnostic: Diagnostic {
3702 severity: DiagnosticSeverity::HINT,
3703 message: "error 2 hint 1".to_string(),
3704 group_id: 0,
3705 is_primary: false,
3706 ..Default::default()
3707 }
3708 },
3709 DiagnosticEntry {
3710 range: Point::new(1, 13)..Point::new(1, 15),
3711 diagnostic: Diagnostic {
3712 severity: DiagnosticSeverity::HINT,
3713 message: "error 2 hint 2".to_string(),
3714 group_id: 0,
3715 is_primary: false,
3716 ..Default::default()
3717 }
3718 },
3719 DiagnosticEntry {
3720 range: Point::new(2, 8)..Point::new(2, 17),
3721 diagnostic: Diagnostic {
3722 severity: DiagnosticSeverity::ERROR,
3723 message: "error 2".to_string(),
3724 group_id: 0,
3725 is_primary: true,
3726 ..Default::default()
3727 }
3728 }
3729 ]
3730 );
3731
3732 assert_eq!(
3733 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3734 &[
3735 DiagnosticEntry {
3736 range: Point::new(1, 13)..Point::new(1, 15),
3737 diagnostic: Diagnostic {
3738 severity: DiagnosticSeverity::HINT,
3739 message: "error 2 hint 1".to_string(),
3740 group_id: 0,
3741 is_primary: false,
3742 ..Default::default()
3743 }
3744 },
3745 DiagnosticEntry {
3746 range: Point::new(1, 13)..Point::new(1, 15),
3747 diagnostic: Diagnostic {
3748 severity: DiagnosticSeverity::HINT,
3749 message: "error 2 hint 2".to_string(),
3750 group_id: 0,
3751 is_primary: false,
3752 ..Default::default()
3753 }
3754 },
3755 DiagnosticEntry {
3756 range: Point::new(2, 8)..Point::new(2, 17),
3757 diagnostic: Diagnostic {
3758 severity: DiagnosticSeverity::ERROR,
3759 message: "error 2".to_string(),
3760 group_id: 0,
3761 is_primary: true,
3762 ..Default::default()
3763 }
3764 }
3765 ]
3766 );
3767
3768 assert_eq!(
3769 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3770 &[
3771 DiagnosticEntry {
3772 range: Point::new(1, 8)..Point::new(1, 9),
3773 diagnostic: Diagnostic {
3774 severity: DiagnosticSeverity::WARNING,
3775 message: "error 1".to_string(),
3776 group_id: 1,
3777 is_primary: true,
3778 ..Default::default()
3779 }
3780 },
3781 DiagnosticEntry {
3782 range: Point::new(1, 8)..Point::new(1, 9),
3783 diagnostic: Diagnostic {
3784 severity: DiagnosticSeverity::HINT,
3785 message: "error 1 hint 1".to_string(),
3786 group_id: 1,
3787 is_primary: false,
3788 ..Default::default()
3789 }
3790 },
3791 ]
3792 );
3793}
3794
3795#[gpui::test]
3796async fn test_rename(cx: &mut gpui::TestAppContext) {
3797 // hi
3798 init_test(cx);
3799
3800 let fs = FakeFs::new(cx.executor());
3801 fs.insert_tree(
3802 "/dir",
3803 json!({
3804 "one.rs": "const ONE: usize = 1;",
3805 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3806 }),
3807 )
3808 .await;
3809
3810 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3811
3812 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3813 language_registry.add(rust_lang());
3814 let mut fake_servers = language_registry.register_fake_lsp(
3815 "Rust",
3816 FakeLspAdapter {
3817 capabilities: lsp::ServerCapabilities {
3818 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3819 prepare_provider: Some(true),
3820 work_done_progress_options: Default::default(),
3821 })),
3822 ..Default::default()
3823 },
3824 ..Default::default()
3825 },
3826 );
3827
3828 let buffer = project
3829 .update(cx, |project, cx| {
3830 project.open_local_buffer("/dir/one.rs", cx)
3831 })
3832 .await
3833 .unwrap();
3834
3835 let fake_server = fake_servers.next().await.unwrap();
3836
3837 let response = project.update(cx, |project, cx| {
3838 project.prepare_rename(buffer.clone(), 7, cx)
3839 });
3840 fake_server
3841 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3842 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3843 assert_eq!(params.position, lsp::Position::new(0, 7));
3844 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3845 lsp::Position::new(0, 6),
3846 lsp::Position::new(0, 9),
3847 ))))
3848 })
3849 .next()
3850 .await
3851 .unwrap();
3852 let range = response.await.unwrap().unwrap();
3853 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3854 assert_eq!(range, 6..9);
3855
3856 let response = project.update(cx, |project, cx| {
3857 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3858 });
3859 fake_server
3860 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3861 assert_eq!(
3862 params.text_document_position.text_document.uri.as_str(),
3863 "file:///dir/one.rs"
3864 );
3865 assert_eq!(
3866 params.text_document_position.position,
3867 lsp::Position::new(0, 7)
3868 );
3869 assert_eq!(params.new_name, "THREE");
3870 Ok(Some(lsp::WorkspaceEdit {
3871 changes: Some(
3872 [
3873 (
3874 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3875 vec![lsp::TextEdit::new(
3876 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3877 "THREE".to_string(),
3878 )],
3879 ),
3880 (
3881 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3882 vec![
3883 lsp::TextEdit::new(
3884 lsp::Range::new(
3885 lsp::Position::new(0, 24),
3886 lsp::Position::new(0, 27),
3887 ),
3888 "THREE".to_string(),
3889 ),
3890 lsp::TextEdit::new(
3891 lsp::Range::new(
3892 lsp::Position::new(0, 35),
3893 lsp::Position::new(0, 38),
3894 ),
3895 "THREE".to_string(),
3896 ),
3897 ],
3898 ),
3899 ]
3900 .into_iter()
3901 .collect(),
3902 ),
3903 ..Default::default()
3904 }))
3905 })
3906 .next()
3907 .await
3908 .unwrap();
3909 let mut transaction = response.await.unwrap().0;
3910 assert_eq!(transaction.len(), 2);
3911 assert_eq!(
3912 transaction
3913 .remove_entry(&buffer)
3914 .unwrap()
3915 .0
3916 .update(cx, |buffer, _| buffer.text()),
3917 "const THREE: usize = 1;"
3918 );
3919 assert_eq!(
3920 transaction
3921 .into_keys()
3922 .next()
3923 .unwrap()
3924 .update(cx, |buffer, _| buffer.text()),
3925 "const TWO: usize = one::THREE + one::THREE;"
3926 );
3927}
3928
3929#[gpui::test]
3930async fn test_search(cx: &mut gpui::TestAppContext) {
3931 init_test(cx);
3932
3933 let fs = FakeFs::new(cx.executor());
3934 fs.insert_tree(
3935 "/dir",
3936 json!({
3937 "one.rs": "const ONE: usize = 1;",
3938 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3939 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3940 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3941 }),
3942 )
3943 .await;
3944 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3945 assert_eq!(
3946 search(
3947 &project,
3948 SearchQuery::text(
3949 "TWO",
3950 false,
3951 true,
3952 false,
3953 Default::default(),
3954 Default::default(),
3955 None
3956 )
3957 .unwrap(),
3958 cx
3959 )
3960 .await
3961 .unwrap(),
3962 HashMap::from_iter([
3963 ("dir/two.rs".to_string(), vec![6..9]),
3964 ("dir/three.rs".to_string(), vec![37..40])
3965 ])
3966 );
3967
3968 let buffer_4 = project
3969 .update(cx, |project, cx| {
3970 project.open_local_buffer("/dir/four.rs", cx)
3971 })
3972 .await
3973 .unwrap();
3974 buffer_4.update(cx, |buffer, cx| {
3975 let text = "two::TWO";
3976 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3977 });
3978
3979 assert_eq!(
3980 search(
3981 &project,
3982 SearchQuery::text(
3983 "TWO",
3984 false,
3985 true,
3986 false,
3987 Default::default(),
3988 Default::default(),
3989 None,
3990 )
3991 .unwrap(),
3992 cx
3993 )
3994 .await
3995 .unwrap(),
3996 HashMap::from_iter([
3997 ("dir/two.rs".to_string(), vec![6..9]),
3998 ("dir/three.rs".to_string(), vec![37..40]),
3999 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4000 ])
4001 );
4002}
4003
4004#[gpui::test]
4005async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4006 init_test(cx);
4007
4008 let search_query = "file";
4009
4010 let fs = FakeFs::new(cx.executor());
4011 fs.insert_tree(
4012 "/dir",
4013 json!({
4014 "one.rs": r#"// Rust file one"#,
4015 "one.ts": r#"// TypeScript file one"#,
4016 "two.rs": r#"// Rust file two"#,
4017 "two.ts": r#"// TypeScript file two"#,
4018 }),
4019 )
4020 .await;
4021 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4022
4023 assert!(
4024 search(
4025 &project,
4026 SearchQuery::text(
4027 search_query,
4028 false,
4029 true,
4030 false,
4031 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4032 Default::default(),
4033 None
4034 )
4035 .unwrap(),
4036 cx
4037 )
4038 .await
4039 .unwrap()
4040 .is_empty(),
4041 "If no inclusions match, no files should be returned"
4042 );
4043
4044 assert_eq!(
4045 search(
4046 &project,
4047 SearchQuery::text(
4048 search_query,
4049 false,
4050 true,
4051 false,
4052 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4053 Default::default(),
4054 None
4055 )
4056 .unwrap(),
4057 cx
4058 )
4059 .await
4060 .unwrap(),
4061 HashMap::from_iter([
4062 ("dir/one.rs".to_string(), vec![8..12]),
4063 ("dir/two.rs".to_string(), vec![8..12]),
4064 ]),
4065 "Rust only search should give only Rust files"
4066 );
4067
4068 assert_eq!(
4069 search(
4070 &project,
4071 SearchQuery::text(
4072 search_query,
4073 false,
4074 true,
4075 false,
4076
4077 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4078
4079 Default::default(),
4080 None,
4081 ).unwrap(),
4082 cx
4083 )
4084 .await
4085 .unwrap(),
4086 HashMap::from_iter([
4087 ("dir/one.ts".to_string(), vec![14..18]),
4088 ("dir/two.ts".to_string(), vec![14..18]),
4089 ]),
4090 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4091 );
4092
4093 assert_eq!(
4094 search(
4095 &project,
4096 SearchQuery::text(
4097 search_query,
4098 false,
4099 true,
4100 false,
4101
4102 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4103
4104 Default::default(),
4105 None,
4106 ).unwrap(),
4107 cx
4108 )
4109 .await
4110 .unwrap(),
4111 HashMap::from_iter([
4112 ("dir/two.ts".to_string(), vec![14..18]),
4113 ("dir/one.rs".to_string(), vec![8..12]),
4114 ("dir/one.ts".to_string(), vec![14..18]),
4115 ("dir/two.rs".to_string(), vec![8..12]),
4116 ]),
4117 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4118 );
4119}
4120
4121#[gpui::test]
4122async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4123 init_test(cx);
4124
4125 let search_query = "file";
4126
4127 let fs = FakeFs::new(cx.executor());
4128 fs.insert_tree(
4129 "/dir",
4130 json!({
4131 "one.rs": r#"// Rust file one"#,
4132 "one.ts": r#"// TypeScript file one"#,
4133 "two.rs": r#"// Rust file two"#,
4134 "two.ts": r#"// TypeScript file two"#,
4135 }),
4136 )
4137 .await;
4138 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4139
4140 assert_eq!(
4141 search(
4142 &project,
4143 SearchQuery::text(
4144 search_query,
4145 false,
4146 true,
4147 false,
4148 Default::default(),
4149 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4150 None,
4151 )
4152 .unwrap(),
4153 cx
4154 )
4155 .await
4156 .unwrap(),
4157 HashMap::from_iter([
4158 ("dir/one.rs".to_string(), vec![8..12]),
4159 ("dir/one.ts".to_string(), vec![14..18]),
4160 ("dir/two.rs".to_string(), vec![8..12]),
4161 ("dir/two.ts".to_string(), vec![14..18]),
4162 ]),
4163 "If no exclusions match, all files should be returned"
4164 );
4165
4166 assert_eq!(
4167 search(
4168 &project,
4169 SearchQuery::text(
4170 search_query,
4171 false,
4172 true,
4173 false,
4174 Default::default(),
4175 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4176 None,
4177 )
4178 .unwrap(),
4179 cx
4180 )
4181 .await
4182 .unwrap(),
4183 HashMap::from_iter([
4184 ("dir/one.ts".to_string(), vec![14..18]),
4185 ("dir/two.ts".to_string(), vec![14..18]),
4186 ]),
4187 "Rust exclusion search should give only TypeScript files"
4188 );
4189
4190 assert_eq!(
4191 search(
4192 &project,
4193 SearchQuery::text(
4194 search_query,
4195 false,
4196 true,
4197 false,
4198 Default::default(),
4199 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4200 None,
4201 ).unwrap(),
4202 cx
4203 )
4204 .await
4205 .unwrap(),
4206 HashMap::from_iter([
4207 ("dir/one.rs".to_string(), vec![8..12]),
4208 ("dir/two.rs".to_string(), vec![8..12]),
4209 ]),
4210 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4211 );
4212
4213 assert!(
4214 search(
4215 &project,
4216 SearchQuery::text(
4217 search_query,
4218 false,
4219 true,
4220 false,
4221 Default::default(),
4222
4223 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4224 None,
4225
4226 ).unwrap(),
4227 cx
4228 )
4229 .await
4230 .unwrap().is_empty(),
4231 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4232 );
4233}
4234
4235#[gpui::test]
4236async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4237 init_test(cx);
4238
4239 let search_query = "file";
4240
4241 let fs = FakeFs::new(cx.executor());
4242 fs.insert_tree(
4243 "/dir",
4244 json!({
4245 "one.rs": r#"// Rust file one"#,
4246 "one.ts": r#"// TypeScript file one"#,
4247 "two.rs": r#"// Rust file two"#,
4248 "two.ts": r#"// TypeScript file two"#,
4249 }),
4250 )
4251 .await;
4252 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4253
4254 assert!(
4255 search(
4256 &project,
4257 SearchQuery::text(
4258 search_query,
4259 false,
4260 true,
4261 false,
4262 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4263 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4264 None,
4265 )
4266 .unwrap(),
4267 cx
4268 )
4269 .await
4270 .unwrap()
4271 .is_empty(),
4272 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4273 );
4274
4275 assert!(
4276 search(
4277 &project,
4278 SearchQuery::text(
4279 search_query,
4280 false,
4281 true,
4282 false,
4283 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4284 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4285 None,
4286 ).unwrap(),
4287 cx
4288 )
4289 .await
4290 .unwrap()
4291 .is_empty(),
4292 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4293 );
4294
4295 assert!(
4296 search(
4297 &project,
4298 SearchQuery::text(
4299 search_query,
4300 false,
4301 true,
4302 false,
4303 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4304 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4305 None,
4306 )
4307 .unwrap(),
4308 cx
4309 )
4310 .await
4311 .unwrap()
4312 .is_empty(),
4313 "Non-matching inclusions and exclusions should not change that."
4314 );
4315
4316 assert_eq!(
4317 search(
4318 &project,
4319 SearchQuery::text(
4320 search_query,
4321 false,
4322 true,
4323 false,
4324 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4325 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4326 None,
4327 )
4328 .unwrap(),
4329 cx
4330 )
4331 .await
4332 .unwrap(),
4333 HashMap::from_iter([
4334 ("dir/one.ts".to_string(), vec![14..18]),
4335 ("dir/two.ts".to_string(), vec![14..18]),
4336 ]),
4337 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4338 );
4339}
4340
4341#[gpui::test]
4342async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4343 init_test(cx);
4344
4345 let fs = FakeFs::new(cx.executor());
4346 fs.insert_tree(
4347 "/worktree-a",
4348 json!({
4349 "haystack.rs": r#"// NEEDLE"#,
4350 "haystack.ts": r#"// NEEDLE"#,
4351 }),
4352 )
4353 .await;
4354 fs.insert_tree(
4355 "/worktree-b",
4356 json!({
4357 "haystack.rs": r#"// NEEDLE"#,
4358 "haystack.ts": r#"// NEEDLE"#,
4359 }),
4360 )
4361 .await;
4362
4363 let project = Project::test(
4364 fs.clone(),
4365 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4366 cx,
4367 )
4368 .await;
4369
4370 assert_eq!(
4371 search(
4372 &project,
4373 SearchQuery::text(
4374 "NEEDLE",
4375 false,
4376 true,
4377 false,
4378 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4379 Default::default(),
4380 None,
4381 )
4382 .unwrap(),
4383 cx
4384 )
4385 .await
4386 .unwrap(),
4387 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4388 "should only return results from included worktree"
4389 );
4390 assert_eq!(
4391 search(
4392 &project,
4393 SearchQuery::text(
4394 "NEEDLE",
4395 false,
4396 true,
4397 false,
4398 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4399 Default::default(),
4400 None,
4401 )
4402 .unwrap(),
4403 cx
4404 )
4405 .await
4406 .unwrap(),
4407 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4408 "should only return results from included worktree"
4409 );
4410
4411 assert_eq!(
4412 search(
4413 &project,
4414 SearchQuery::text(
4415 "NEEDLE",
4416 false,
4417 true,
4418 false,
4419 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4420 Default::default(),
4421 None,
4422 )
4423 .unwrap(),
4424 cx
4425 )
4426 .await
4427 .unwrap(),
4428 HashMap::from_iter([
4429 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4430 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4431 ]),
4432 "should return results from both worktrees"
4433 );
4434}
4435
4436#[gpui::test]
4437async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4438 init_test(cx);
4439
4440 let fs = FakeFs::new(cx.background_executor.clone());
4441 fs.insert_tree(
4442 "/dir",
4443 json!({
4444 ".git": {},
4445 ".gitignore": "**/target\n/node_modules\n",
4446 "target": {
4447 "index.txt": "index_key:index_value"
4448 },
4449 "node_modules": {
4450 "eslint": {
4451 "index.ts": "const eslint_key = 'eslint value'",
4452 "package.json": r#"{ "some_key": "some value" }"#,
4453 },
4454 "prettier": {
4455 "index.ts": "const prettier_key = 'prettier value'",
4456 "package.json": r#"{ "other_key": "other value" }"#,
4457 },
4458 },
4459 "package.json": r#"{ "main_key": "main value" }"#,
4460 }),
4461 )
4462 .await;
4463 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4464
4465 let query = "key";
4466 assert_eq!(
4467 search(
4468 &project,
4469 SearchQuery::text(
4470 query,
4471 false,
4472 false,
4473 false,
4474 Default::default(),
4475 Default::default(),
4476 None,
4477 )
4478 .unwrap(),
4479 cx
4480 )
4481 .await
4482 .unwrap(),
4483 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4484 "Only one non-ignored file should have the query"
4485 );
4486
4487 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4488 assert_eq!(
4489 search(
4490 &project,
4491 SearchQuery::text(
4492 query,
4493 false,
4494 false,
4495 true,
4496 Default::default(),
4497 Default::default(),
4498 None,
4499 )
4500 .unwrap(),
4501 cx
4502 )
4503 .await
4504 .unwrap(),
4505 HashMap::from_iter([
4506 ("dir/package.json".to_string(), vec![8..11]),
4507 ("dir/target/index.txt".to_string(), vec![6..9]),
4508 (
4509 "dir/node_modules/prettier/package.json".to_string(),
4510 vec![9..12]
4511 ),
4512 (
4513 "dir/node_modules/prettier/index.ts".to_string(),
4514 vec![15..18]
4515 ),
4516 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4517 (
4518 "dir/node_modules/eslint/package.json".to_string(),
4519 vec![8..11]
4520 ),
4521 ]),
4522 "Unrestricted search with ignored directories should find every file with the query"
4523 );
4524
4525 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4526 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4527 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4528 assert_eq!(
4529 search(
4530 &project,
4531 SearchQuery::text(
4532 query,
4533 false,
4534 false,
4535 true,
4536 files_to_include,
4537 files_to_exclude,
4538 None,
4539 )
4540 .unwrap(),
4541 cx
4542 )
4543 .await
4544 .unwrap(),
4545 HashMap::from_iter([(
4546 "dir/node_modules/prettier/package.json".to_string(),
4547 vec![9..12]
4548 )]),
4549 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4550 );
4551}
4552
4553#[gpui::test]
4554async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4555 init_test(cx);
4556
4557 let fs = FakeFs::new(cx.background_executor.clone());
4558 fs.insert_tree(
4559 "/dir",
4560 json!({
4561 ".git": {},
4562 ".gitignore": "**/target\n/node_modules\n",
4563 "aaa.txt": "key:value",
4564 "bbb": {
4565 "index.txt": "index_key:index_value"
4566 },
4567 "node_modules": {
4568 "10 eleven": "key",
4569 "1 two": "key"
4570 },
4571 }),
4572 )
4573 .await;
4574 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4575
4576 let mut search = project.update(cx, |project, cx| {
4577 project.search(
4578 SearchQuery::text(
4579 "key",
4580 false,
4581 false,
4582 true,
4583 Default::default(),
4584 Default::default(),
4585 None,
4586 )
4587 .unwrap(),
4588 cx,
4589 )
4590 });
4591
4592 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4593 match search_result.unwrap() {
4594 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4595 buffer.file().unwrap().path().to_string_lossy().to_string()
4596 }),
4597 _ => panic!("Expected buffer"),
4598 }
4599 }
4600
4601 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4602 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4603 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4604 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4605 assert!(search.next().await.is_none())
4606}
4607
4608#[gpui::test]
4609async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4610 init_test(cx);
4611
4612 let fs = FakeFs::new(cx.executor().clone());
4613 fs.insert_tree(
4614 "/one/two",
4615 json!({
4616 "three": {
4617 "a.txt": "",
4618 "four": {}
4619 },
4620 "c.rs": ""
4621 }),
4622 )
4623 .await;
4624
4625 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4626 project
4627 .update(cx, |project, cx| {
4628 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4629 project.create_entry((id, "b.."), true, cx)
4630 })
4631 .await
4632 .unwrap()
4633 .to_included()
4634 .unwrap();
4635
4636 // Can't create paths outside the project
4637 let result = project
4638 .update(cx, |project, cx| {
4639 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4640 project.create_entry((id, "../../boop"), true, cx)
4641 })
4642 .await;
4643 assert!(result.is_err());
4644
4645 // Can't create paths with '..'
4646 let result = project
4647 .update(cx, |project, cx| {
4648 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4649 project.create_entry((id, "four/../beep"), true, cx)
4650 })
4651 .await;
4652 assert!(result.is_err());
4653
4654 assert_eq!(
4655 fs.paths(true),
4656 vec![
4657 PathBuf::from("/"),
4658 PathBuf::from("/one"),
4659 PathBuf::from("/one/two"),
4660 PathBuf::from("/one/two/c.rs"),
4661 PathBuf::from("/one/two/three"),
4662 PathBuf::from("/one/two/three/a.txt"),
4663 PathBuf::from("/one/two/three/b.."),
4664 PathBuf::from("/one/two/three/four"),
4665 ]
4666 );
4667
4668 // And we cannot open buffers with '..'
4669 let result = project
4670 .update(cx, |project, cx| {
4671 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4672 project.open_buffer((id, "../c.rs"), cx)
4673 })
4674 .await;
4675 assert!(result.is_err())
4676}
4677
4678#[gpui::test]
4679async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4680 init_test(cx);
4681
4682 let fs = FakeFs::new(cx.executor());
4683 fs.insert_tree(
4684 "/dir",
4685 json!({
4686 "a.tsx": "a",
4687 }),
4688 )
4689 .await;
4690
4691 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4692
4693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4694 language_registry.add(tsx_lang());
4695 let language_server_names = [
4696 "TypeScriptServer",
4697 "TailwindServer",
4698 "ESLintServer",
4699 "NoHoverCapabilitiesServer",
4700 ];
4701 let mut language_servers = [
4702 language_registry.register_fake_lsp(
4703 "tsx",
4704 FakeLspAdapter {
4705 name: language_server_names[0],
4706 capabilities: lsp::ServerCapabilities {
4707 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4708 ..lsp::ServerCapabilities::default()
4709 },
4710 ..FakeLspAdapter::default()
4711 },
4712 ),
4713 language_registry.register_fake_lsp(
4714 "tsx",
4715 FakeLspAdapter {
4716 name: language_server_names[1],
4717 capabilities: lsp::ServerCapabilities {
4718 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4719 ..lsp::ServerCapabilities::default()
4720 },
4721 ..FakeLspAdapter::default()
4722 },
4723 ),
4724 language_registry.register_fake_lsp(
4725 "tsx",
4726 FakeLspAdapter {
4727 name: language_server_names[2],
4728 capabilities: lsp::ServerCapabilities {
4729 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4730 ..lsp::ServerCapabilities::default()
4731 },
4732 ..FakeLspAdapter::default()
4733 },
4734 ),
4735 language_registry.register_fake_lsp(
4736 "tsx",
4737 FakeLspAdapter {
4738 name: language_server_names[3],
4739 capabilities: lsp::ServerCapabilities {
4740 hover_provider: None,
4741 ..lsp::ServerCapabilities::default()
4742 },
4743 ..FakeLspAdapter::default()
4744 },
4745 ),
4746 ];
4747
4748 let buffer = project
4749 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4750 .await
4751 .unwrap();
4752 cx.executor().run_until_parked();
4753
4754 let mut servers_with_hover_requests = HashMap::default();
4755 for i in 0..language_server_names.len() {
4756 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4757 panic!(
4758 "Failed to get language server #{i} with name {}",
4759 &language_server_names[i]
4760 )
4761 });
4762 let new_server_name = new_server.server.name();
4763 assert!(
4764 !servers_with_hover_requests.contains_key(new_server_name),
4765 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4766 );
4767 let new_server_name = new_server_name.to_string();
4768 match new_server_name.as_str() {
4769 "TailwindServer" | "TypeScriptServer" => {
4770 servers_with_hover_requests.insert(
4771 new_server_name.clone(),
4772 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4773 let name = new_server_name.clone();
4774 async move {
4775 Ok(Some(lsp::Hover {
4776 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4777 format!("{name} hover"),
4778 )),
4779 range: None,
4780 }))
4781 }
4782 }),
4783 );
4784 }
4785 "ESLintServer" => {
4786 servers_with_hover_requests.insert(
4787 new_server_name,
4788 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4789 |_, _| async move { Ok(None) },
4790 ),
4791 );
4792 }
4793 "NoHoverCapabilitiesServer" => {
4794 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4795 |_, _| async move {
4796 panic!(
4797 "Should not call for hovers server with no corresponding capabilities"
4798 )
4799 },
4800 );
4801 }
4802 unexpected => panic!("Unexpected server name: {unexpected}"),
4803 }
4804 }
4805
4806 let hover_task = project.update(cx, |project, cx| {
4807 project.hover(&buffer, Point::new(0, 0), cx)
4808 });
4809 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4810 |mut hover_request| async move {
4811 hover_request
4812 .next()
4813 .await
4814 .expect("All hover requests should have been triggered")
4815 },
4816 ))
4817 .await;
4818 assert_eq!(
4819 vec!["TailwindServer hover", "TypeScriptServer hover"],
4820 hover_task
4821 .await
4822 .into_iter()
4823 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4824 .sorted()
4825 .collect::<Vec<_>>(),
4826 "Should receive hover responses from all related servers with hover capabilities"
4827 );
4828}
4829
4830#[gpui::test]
4831async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4832 init_test(cx);
4833
4834 let fs = FakeFs::new(cx.executor());
4835 fs.insert_tree(
4836 "/dir",
4837 json!({
4838 "a.ts": "a",
4839 }),
4840 )
4841 .await;
4842
4843 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4844
4845 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4846 language_registry.add(typescript_lang());
4847 let mut fake_language_servers = language_registry.register_fake_lsp(
4848 "TypeScript",
4849 FakeLspAdapter {
4850 capabilities: lsp::ServerCapabilities {
4851 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4852 ..lsp::ServerCapabilities::default()
4853 },
4854 ..FakeLspAdapter::default()
4855 },
4856 );
4857
4858 let buffer = project
4859 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4860 .await
4861 .unwrap();
4862 cx.executor().run_until_parked();
4863
4864 let fake_server = fake_language_servers
4865 .next()
4866 .await
4867 .expect("failed to get the language server");
4868
4869 let mut request_handled =
4870 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4871 Ok(Some(lsp::Hover {
4872 contents: lsp::HoverContents::Array(vec![
4873 lsp::MarkedString::String("".to_string()),
4874 lsp::MarkedString::String(" ".to_string()),
4875 lsp::MarkedString::String("\n\n\n".to_string()),
4876 ]),
4877 range: None,
4878 }))
4879 });
4880
4881 let hover_task = project.update(cx, |project, cx| {
4882 project.hover(&buffer, Point::new(0, 0), cx)
4883 });
4884 let () = request_handled
4885 .next()
4886 .await
4887 .expect("All hover requests should have been triggered");
4888 assert_eq!(
4889 Vec::<String>::new(),
4890 hover_task
4891 .await
4892 .into_iter()
4893 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4894 .sorted()
4895 .collect::<Vec<_>>(),
4896 "Empty hover parts should be ignored"
4897 );
4898}
4899
4900#[gpui::test]
4901async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4902 init_test(cx);
4903
4904 let fs = FakeFs::new(cx.executor());
4905 fs.insert_tree(
4906 "/dir",
4907 json!({
4908 "a.tsx": "a",
4909 }),
4910 )
4911 .await;
4912
4913 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4914
4915 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4916 language_registry.add(tsx_lang());
4917 let language_server_names = [
4918 "TypeScriptServer",
4919 "TailwindServer",
4920 "ESLintServer",
4921 "NoActionsCapabilitiesServer",
4922 ];
4923
4924 let mut language_server_rxs = [
4925 language_registry.register_fake_lsp(
4926 "tsx",
4927 FakeLspAdapter {
4928 name: language_server_names[0],
4929 capabilities: lsp::ServerCapabilities {
4930 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4931 ..lsp::ServerCapabilities::default()
4932 },
4933 ..FakeLspAdapter::default()
4934 },
4935 ),
4936 language_registry.register_fake_lsp(
4937 "tsx",
4938 FakeLspAdapter {
4939 name: language_server_names[1],
4940 capabilities: lsp::ServerCapabilities {
4941 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4942 ..lsp::ServerCapabilities::default()
4943 },
4944 ..FakeLspAdapter::default()
4945 },
4946 ),
4947 language_registry.register_fake_lsp(
4948 "tsx",
4949 FakeLspAdapter {
4950 name: language_server_names[2],
4951 capabilities: lsp::ServerCapabilities {
4952 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4953 ..lsp::ServerCapabilities::default()
4954 },
4955 ..FakeLspAdapter::default()
4956 },
4957 ),
4958 language_registry.register_fake_lsp(
4959 "tsx",
4960 FakeLspAdapter {
4961 name: language_server_names[3],
4962 capabilities: lsp::ServerCapabilities {
4963 code_action_provider: None,
4964 ..lsp::ServerCapabilities::default()
4965 },
4966 ..FakeLspAdapter::default()
4967 },
4968 ),
4969 ];
4970
4971 let buffer = project
4972 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4973 .await
4974 .unwrap();
4975 cx.executor().run_until_parked();
4976
4977 let mut servers_with_actions_requests = HashMap::default();
4978 for i in 0..language_server_names.len() {
4979 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
4980 panic!(
4981 "Failed to get language server #{i} with name {}",
4982 &language_server_names[i]
4983 )
4984 });
4985 let new_server_name = new_server.server.name();
4986
4987 assert!(
4988 !servers_with_actions_requests.contains_key(new_server_name),
4989 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4990 );
4991 let new_server_name = new_server_name.to_string();
4992 match new_server_name.as_str() {
4993 "TailwindServer" | "TypeScriptServer" => {
4994 servers_with_actions_requests.insert(
4995 new_server_name.clone(),
4996 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4997 move |_, _| {
4998 let name = new_server_name.clone();
4999 async move {
5000 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5001 lsp::CodeAction {
5002 title: format!("{name} code action"),
5003 ..lsp::CodeAction::default()
5004 },
5005 )]))
5006 }
5007 },
5008 ),
5009 );
5010 }
5011 "ESLintServer" => {
5012 servers_with_actions_requests.insert(
5013 new_server_name,
5014 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5015 |_, _| async move { Ok(None) },
5016 ),
5017 );
5018 }
5019 "NoActionsCapabilitiesServer" => {
5020 let _never_handled = new_server
5021 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5022 panic!(
5023 "Should not call for code actions server with no corresponding capabilities"
5024 )
5025 });
5026 }
5027 unexpected => panic!("Unexpected server name: {unexpected}"),
5028 }
5029 }
5030
5031 let code_actions_task = project.update(cx, |project, cx| {
5032 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5033 });
5034
5035 // cx.run_until_parked();
5036 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5037 |mut code_actions_request| async move {
5038 code_actions_request
5039 .next()
5040 .await
5041 .expect("All code actions requests should have been triggered")
5042 },
5043 ))
5044 .await;
5045 assert_eq!(
5046 vec!["TailwindServer code action", "TypeScriptServer code action"],
5047 code_actions_task
5048 .await
5049 .into_iter()
5050 .map(|code_action| code_action.lsp_action.title)
5051 .sorted()
5052 .collect::<Vec<_>>(),
5053 "Should receive code actions responses from all related servers with hover capabilities"
5054 );
5055}
5056
5057#[gpui::test]
5058async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5059 init_test(cx);
5060
5061 let fs = FakeFs::new(cx.executor());
5062 fs.insert_tree(
5063 "/dir",
5064 json!({
5065 "a.rs": "let a = 1;",
5066 "b.rs": "let b = 2;",
5067 "c.rs": "let c = 2;",
5068 }),
5069 )
5070 .await;
5071
5072 let project = Project::test(
5073 fs,
5074 [
5075 "/dir/a.rs".as_ref(),
5076 "/dir/b.rs".as_ref(),
5077 "/dir/c.rs".as_ref(),
5078 ],
5079 cx,
5080 )
5081 .await;
5082
5083 // check the initial state and get the worktrees
5084 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5085 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5086 assert_eq!(worktrees.len(), 3);
5087
5088 let worktree_a = worktrees[0].read(cx);
5089 let worktree_b = worktrees[1].read(cx);
5090 let worktree_c = worktrees[2].read(cx);
5091
5092 // check they start in the right order
5093 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5094 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5095 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5096
5097 (
5098 worktrees[0].clone(),
5099 worktrees[1].clone(),
5100 worktrees[2].clone(),
5101 )
5102 });
5103
5104 // move first worktree to after the second
5105 // [a, b, c] -> [b, a, c]
5106 project
5107 .update(cx, |project, cx| {
5108 let first = worktree_a.read(cx);
5109 let second = worktree_b.read(cx);
5110 project.move_worktree(first.id(), second.id(), cx)
5111 })
5112 .expect("moving first after second");
5113
5114 // check the state after moving
5115 project.update(cx, |project, cx| {
5116 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5117 assert_eq!(worktrees.len(), 3);
5118
5119 let first = worktrees[0].read(cx);
5120 let second = worktrees[1].read(cx);
5121 let third = worktrees[2].read(cx);
5122
5123 // check they are now in the right order
5124 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5125 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5126 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5127 });
5128
5129 // move the second worktree to before the first
5130 // [b, a, c] -> [a, b, c]
5131 project
5132 .update(cx, |project, cx| {
5133 let second = worktree_a.read(cx);
5134 let first = worktree_b.read(cx);
5135 project.move_worktree(first.id(), second.id(), cx)
5136 })
5137 .expect("moving second before first");
5138
5139 // check the state after moving
5140 project.update(cx, |project, cx| {
5141 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5142 assert_eq!(worktrees.len(), 3);
5143
5144 let first = worktrees[0].read(cx);
5145 let second = worktrees[1].read(cx);
5146 let third = worktrees[2].read(cx);
5147
5148 // check they are now in the right order
5149 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5150 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5151 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5152 });
5153
5154 // move the second worktree to after the third
5155 // [a, b, c] -> [a, c, b]
5156 project
5157 .update(cx, |project, cx| {
5158 let second = worktree_b.read(cx);
5159 let third = worktree_c.read(cx);
5160 project.move_worktree(second.id(), third.id(), cx)
5161 })
5162 .expect("moving second after third");
5163
5164 // check the state after moving
5165 project.update(cx, |project, cx| {
5166 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5167 assert_eq!(worktrees.len(), 3);
5168
5169 let first = worktrees[0].read(cx);
5170 let second = worktrees[1].read(cx);
5171 let third = worktrees[2].read(cx);
5172
5173 // check they are now in the right order
5174 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5175 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5176 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5177 });
5178
5179 // move the third worktree to before the second
5180 // [a, c, b] -> [a, b, c]
5181 project
5182 .update(cx, |project, cx| {
5183 let third = worktree_c.read(cx);
5184 let second = worktree_b.read(cx);
5185 project.move_worktree(third.id(), second.id(), cx)
5186 })
5187 .expect("moving third before second");
5188
5189 // check the state after moving
5190 project.update(cx, |project, cx| {
5191 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5192 assert_eq!(worktrees.len(), 3);
5193
5194 let first = worktrees[0].read(cx);
5195 let second = worktrees[1].read(cx);
5196 let third = worktrees[2].read(cx);
5197
5198 // check they are now in the right order
5199 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5200 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5201 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5202 });
5203
5204 // move the first worktree to after the third
5205 // [a, b, c] -> [b, c, a]
5206 project
5207 .update(cx, |project, cx| {
5208 let first = worktree_a.read(cx);
5209 let third = worktree_c.read(cx);
5210 project.move_worktree(first.id(), third.id(), cx)
5211 })
5212 .expect("moving first after third");
5213
5214 // check the state after moving
5215 project.update(cx, |project, cx| {
5216 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5217 assert_eq!(worktrees.len(), 3);
5218
5219 let first = worktrees[0].read(cx);
5220 let second = worktrees[1].read(cx);
5221 let third = worktrees[2].read(cx);
5222
5223 // check they are now in the right order
5224 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5225 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5226 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5227 });
5228
5229 // move the third worktree to before the first
5230 // [b, c, a] -> [a, b, c]
5231 project
5232 .update(cx, |project, cx| {
5233 let third = worktree_a.read(cx);
5234 let first = worktree_b.read(cx);
5235 project.move_worktree(third.id(), first.id(), cx)
5236 })
5237 .expect("moving third before first");
5238
5239 // check the state after moving
5240 project.update(cx, |project, cx| {
5241 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5242 assert_eq!(worktrees.len(), 3);
5243
5244 let first = worktrees[0].read(cx);
5245 let second = worktrees[1].read(cx);
5246 let third = worktrees[2].read(cx);
5247
5248 // check they are now in the right order
5249 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5250 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5251 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5252 });
5253}
5254
5255async fn search(
5256 project: &Model<Project>,
5257 query: SearchQuery,
5258 cx: &mut gpui::TestAppContext,
5259) -> Result<HashMap<String, Vec<Range<usize>>>> {
5260 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5261 let mut results = HashMap::default();
5262 while let Some(search_result) = search_rx.next().await {
5263 match search_result {
5264 SearchResult::Buffer { buffer, ranges } => {
5265 results.entry(buffer).or_insert(ranges);
5266 }
5267 SearchResult::LimitReached => {}
5268 }
5269 }
5270 Ok(results
5271 .into_iter()
5272 .map(|(buffer, ranges)| {
5273 buffer.update(cx, |buffer, cx| {
5274 let path = buffer
5275 .file()
5276 .unwrap()
5277 .full_path(cx)
5278 .to_string_lossy()
5279 .to_string();
5280 let ranges = ranges
5281 .into_iter()
5282 .map(|range| range.to_offset(buffer))
5283 .collect::<Vec<_>>();
5284 (path, ranges)
5285 })
5286 })
5287 .collect())
5288}
5289
5290pub fn init_test(cx: &mut gpui::TestAppContext) {
5291 if std::env::var("RUST_LOG").is_ok() {
5292 env_logger::try_init().ok();
5293 }
5294
5295 cx.update(|cx| {
5296 let settings_store = SettingsStore::test(cx);
5297 cx.set_global(settings_store);
5298 release_channel::init(SemanticVersion::default(), cx);
5299 language::init(cx);
5300 Project::init_settings(cx);
5301 });
5302}
5303
5304fn json_lang() -> Arc<Language> {
5305 Arc::new(Language::new(
5306 LanguageConfig {
5307 name: "JSON".into(),
5308 matcher: LanguageMatcher {
5309 path_suffixes: vec!["json".to_string()],
5310 ..Default::default()
5311 },
5312 ..Default::default()
5313 },
5314 None,
5315 ))
5316}
5317
5318fn js_lang() -> Arc<Language> {
5319 Arc::new(Language::new(
5320 LanguageConfig {
5321 name: "JavaScript".into(),
5322 matcher: LanguageMatcher {
5323 path_suffixes: vec!["js".to_string()],
5324 ..Default::default()
5325 },
5326 ..Default::default()
5327 },
5328 None,
5329 ))
5330}
5331
5332fn rust_lang() -> Arc<Language> {
5333 Arc::new(Language::new(
5334 LanguageConfig {
5335 name: "Rust".into(),
5336 matcher: LanguageMatcher {
5337 path_suffixes: vec!["rs".to_string()],
5338 ..Default::default()
5339 },
5340 ..Default::default()
5341 },
5342 Some(tree_sitter_rust::LANGUAGE.into()),
5343 ))
5344}
5345
5346fn typescript_lang() -> Arc<Language> {
5347 Arc::new(Language::new(
5348 LanguageConfig {
5349 name: "TypeScript".into(),
5350 matcher: LanguageMatcher {
5351 path_suffixes: vec!["ts".to_string()],
5352 ..Default::default()
5353 },
5354 ..Default::default()
5355 },
5356 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5357 ))
5358}
5359
5360fn tsx_lang() -> Arc<Language> {
5361 Arc::new(Language::new(
5362 LanguageConfig {
5363 name: "tsx".into(),
5364 matcher: LanguageMatcher {
5365 path_suffixes: vec!["tsx".to_string()],
5366 ..Default::default()
5367 },
5368 ..Default::default()
5369 },
5370 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5371 ))
5372}
5373
5374fn get_all_tasks(
5375 project: &Model<Project>,
5376 worktree_id: Option<WorktreeId>,
5377 task_context: &TaskContext,
5378 cx: &mut AppContext,
5379) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5380 let resolved_tasks = project.update(cx, |project, cx| {
5381 project
5382 .task_inventory()
5383 .read(cx)
5384 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5385 });
5386
5387 cx.spawn(|_| async move {
5388 let (mut old, new) = resolved_tasks.await;
5389 old.extend(new);
5390 old
5391 })
5392}