1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let fs = FakeFs::new(cx.executor());
99 fs.insert_tree(
100 "/the-root",
101 json!({
102 ".zed": {
103 "settings.json": r#"{ "tab_size": 8 }"#,
104 "tasks.json": r#"[{
105 "label": "cargo check",
106 "command": "cargo",
107 "args": ["check", "--all"]
108 },]"#,
109 },
110 "a": {
111 "a.rs": "fn a() {\n A\n}"
112 },
113 "b": {
114 ".zed": {
115 "settings.json": r#"{ "tab_size": 2 }"#,
116 "tasks.json": r#"[{
117 "label": "cargo check",
118 "command": "cargo",
119 "args": ["check"]
120 },]"#,
121 },
122 "b.rs": "fn b() {\n B\n}"
123 }
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
130 let task_context = TaskContext::default();
131
132 cx.executor().run_until_parked();
133 let worktree_id = cx.update(|cx| {
134 project.update(cx, |project, cx| {
135 project.worktrees(cx).next().unwrap().read(cx).id()
136 })
137 });
138 let global_task_source_kind = TaskSourceKind::Worktree {
139 id: worktree_id,
140 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
141 id_base: "local_tasks_for_worktree".into(),
142 };
143
144 let all_tasks = cx
145 .update(|cx| {
146 let tree = worktree.read(cx);
147
148 let settings_a = language_settings(
149 None,
150 Some(
151 &(File::for_entry(
152 tree.entry_for_path("a/a.rs").unwrap().clone(),
153 worktree.clone(),
154 ) as _),
155 ),
156 cx,
157 );
158 let settings_b = language_settings(
159 None,
160 Some(
161 &(File::for_entry(
162 tree.entry_for_path("b/b.rs").unwrap().clone(),
163 worktree.clone(),
164 ) as _),
165 ),
166 cx,
167 );
168
169 assert_eq!(settings_a.tab_size.get(), 8);
170 assert_eq!(settings_b.tab_size.get(), 2);
171
172 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
173 })
174 .await
175 .into_iter()
176 .map(|(source_kind, task)| {
177 let resolved = task.resolved.unwrap();
178 (
179 source_kind,
180 task.resolved_label,
181 resolved.args,
182 resolved.env,
183 )
184 })
185 .collect::<Vec<_>>();
186 assert_eq!(
187 all_tasks,
188 vec![
189 (
190 global_task_source_kind.clone(),
191 "cargo check".to_string(),
192 vec!["check".to_string(), "--all".to_string()],
193 HashMap::default(),
194 ),
195 (
196 TaskSourceKind::Worktree {
197 id: worktree_id,
198 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
199 id_base: "local_tasks_for_worktree".into(),
200 },
201 "cargo check".to_string(),
202 vec!["check".to_string()],
203 HashMap::default(),
204 ),
205 ]
206 );
207
208 let (_, resolved_task) = cx
209 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
210 .await
211 .into_iter()
212 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
213 .expect("should have one global task");
214 project.update(cx, |project, cx| {
215 project.task_inventory().update(cx, |inventory, _| {
216 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
217 });
218 });
219
220 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
221 label: "cargo check".to_string(),
222 command: "cargo".to_string(),
223 args: vec![
224 "check".to_string(),
225 "--all".to_string(),
226 "--all-targets".to_string(),
227 ],
228 env: HashMap::from_iter(Some((
229 "RUSTFLAGS".to_string(),
230 "-Zunstable-options".to_string(),
231 ))),
232 ..TaskTemplate::default()
233 }]))
234 .unwrap();
235 let (tx, rx) = futures::channel::mpsc::unbounded();
236 cx.update(|cx| {
237 project.update(cx, |project, cx| {
238 project.task_inventory().update(cx, |inventory, cx| {
239 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
240 inventory.add_source(
241 global_task_source_kind.clone(),
242 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
243 cx,
244 );
245 });
246 })
247 });
248 tx.unbounded_send(tasks).unwrap();
249
250 cx.run_until_parked();
251 let all_tasks = cx
252 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
253 .await
254 .into_iter()
255 .map(|(source_kind, task)| {
256 let resolved = task.resolved.unwrap();
257 (
258 source_kind,
259 task.resolved_label,
260 resolved.args,
261 resolved.env,
262 )
263 })
264 .collect::<Vec<_>>();
265 assert_eq!(
266 all_tasks,
267 vec![
268 (
269 TaskSourceKind::Worktree {
270 id: worktree_id,
271 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
272 id_base: "local_tasks_for_worktree".into(),
273 },
274 "cargo check".to_string(),
275 vec![
276 "check".to_string(),
277 "--all".to_string(),
278 "--all-targets".to_string()
279 ],
280 HashMap::from_iter(Some((
281 "RUSTFLAGS".to_string(),
282 "-Zunstable-options".to_string()
283 ))),
284 ),
285 (
286 TaskSourceKind::Worktree {
287 id: worktree_id,
288 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
289 id_base: "local_tasks_for_worktree".into(),
290 },
291 "cargo check".to_string(),
292 vec!["check".to_string()],
293 HashMap::default(),
294 ),
295 ]
296 );
297}
298
299#[gpui::test]
300async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
301 init_test(cx);
302
303 let fs = FakeFs::new(cx.executor());
304 fs.insert_tree(
305 "/the-root",
306 json!({
307 "test.rs": "const A: i32 = 1;",
308 "test2.rs": "",
309 "Cargo.toml": "a = 1",
310 "package.json": "{\"a\": 1}",
311 }),
312 )
313 .await;
314
315 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
316 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
317
318 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
319 "Rust",
320 FakeLspAdapter {
321 name: "the-rust-language-server",
322 capabilities: lsp::ServerCapabilities {
323 completion_provider: Some(lsp::CompletionOptions {
324 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
325 ..Default::default()
326 }),
327 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
328 lsp::TextDocumentSyncOptions {
329 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
330 ..Default::default()
331 },
332 )),
333 ..Default::default()
334 },
335 ..Default::default()
336 },
337 );
338 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
339 "JSON",
340 FakeLspAdapter {
341 name: "the-json-language-server",
342 capabilities: lsp::ServerCapabilities {
343 completion_provider: Some(lsp::CompletionOptions {
344 trigger_characters: Some(vec![":".to_string()]),
345 ..Default::default()
346 }),
347 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
348 lsp::TextDocumentSyncOptions {
349 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
350 ..Default::default()
351 },
352 )),
353 ..Default::default()
354 },
355 ..Default::default()
356 },
357 );
358
359 // Open a buffer without an associated language server.
360 let toml_buffer = project
361 .update(cx, |project, cx| {
362 project.open_local_buffer("/the-root/Cargo.toml", cx)
363 })
364 .await
365 .unwrap();
366
367 // Open a buffer with an associated language server before the language for it has been loaded.
368 let rust_buffer = project
369 .update(cx, |project, cx| {
370 project.open_local_buffer("/the-root/test.rs", cx)
371 })
372 .await
373 .unwrap();
374 rust_buffer.update(cx, |buffer, _| {
375 assert_eq!(buffer.language().map(|l| l.name()), None);
376 });
377
378 // Now we add the languages to the project, and ensure they get assigned to all
379 // the relevant open buffers.
380 language_registry.add(json_lang());
381 language_registry.add(rust_lang());
382 cx.executor().run_until_parked();
383 rust_buffer.update(cx, |buffer, _| {
384 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
385 });
386
387 // A server is started up, and it is notified about Rust files.
388 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
389 assert_eq!(
390 fake_rust_server
391 .receive_notification::<lsp::notification::DidOpenTextDocument>()
392 .await
393 .text_document,
394 lsp::TextDocumentItem {
395 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
396 version: 0,
397 text: "const A: i32 = 1;".to_string(),
398 language_id: "rust".to_string(),
399 }
400 );
401
402 // The buffer is configured based on the language server's capabilities.
403 rust_buffer.update(cx, |buffer, _| {
404 assert_eq!(
405 buffer.completion_triggers(),
406 &[".".to_string(), "::".to_string()]
407 );
408 });
409 toml_buffer.update(cx, |buffer, _| {
410 assert!(buffer.completion_triggers().is_empty());
411 });
412
413 // Edit a buffer. The changes are reported to the language server.
414 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
415 assert_eq!(
416 fake_rust_server
417 .receive_notification::<lsp::notification::DidChangeTextDocument>()
418 .await
419 .text_document,
420 lsp::VersionedTextDocumentIdentifier::new(
421 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
422 1
423 )
424 );
425
426 // Open a third buffer with a different associated language server.
427 let json_buffer = project
428 .update(cx, |project, cx| {
429 project.open_local_buffer("/the-root/package.json", cx)
430 })
431 .await
432 .unwrap();
433
434 // A json language server is started up and is only notified about the json buffer.
435 let mut fake_json_server = fake_json_servers.next().await.unwrap();
436 assert_eq!(
437 fake_json_server
438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
439 .await
440 .text_document,
441 lsp::TextDocumentItem {
442 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
443 version: 0,
444 text: "{\"a\": 1}".to_string(),
445 language_id: "json".to_string(),
446 }
447 );
448
449 // This buffer is configured based on the second language server's
450 // capabilities.
451 json_buffer.update(cx, |buffer, _| {
452 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
453 });
454
455 // When opening another buffer whose language server is already running,
456 // it is also configured based on the existing language server's capabilities.
457 let rust_buffer2 = project
458 .update(cx, |project, cx| {
459 project.open_local_buffer("/the-root/test2.rs", cx)
460 })
461 .await
462 .unwrap();
463 rust_buffer2.update(cx, |buffer, _| {
464 assert_eq!(
465 buffer.completion_triggers(),
466 &[".".to_string(), "::".to_string()]
467 );
468 });
469
470 // Changes are reported only to servers matching the buffer's language.
471 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
472 rust_buffer2.update(cx, |buffer, cx| {
473 buffer.edit([(0..0, "let x = 1;")], None, cx)
474 });
475 assert_eq!(
476 fake_rust_server
477 .receive_notification::<lsp::notification::DidChangeTextDocument>()
478 .await
479 .text_document,
480 lsp::VersionedTextDocumentIdentifier::new(
481 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
482 1
483 )
484 );
485
486 // Save notifications are reported to all servers.
487 project
488 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
489 .await
490 .unwrap();
491 assert_eq!(
492 fake_rust_server
493 .receive_notification::<lsp::notification::DidSaveTextDocument>()
494 .await
495 .text_document,
496 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
497 );
498 assert_eq!(
499 fake_json_server
500 .receive_notification::<lsp::notification::DidSaveTextDocument>()
501 .await
502 .text_document,
503 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
504 );
505
506 // Renames are reported only to servers matching the buffer's language.
507 fs.rename(
508 Path::new("/the-root/test2.rs"),
509 Path::new("/the-root/test3.rs"),
510 Default::default(),
511 )
512 .await
513 .unwrap();
514 assert_eq!(
515 fake_rust_server
516 .receive_notification::<lsp::notification::DidCloseTextDocument>()
517 .await
518 .text_document,
519 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
520 );
521 assert_eq!(
522 fake_rust_server
523 .receive_notification::<lsp::notification::DidOpenTextDocument>()
524 .await
525 .text_document,
526 lsp::TextDocumentItem {
527 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
528 version: 0,
529 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
530 language_id: "rust".to_string(),
531 },
532 );
533
534 rust_buffer2.update(cx, |buffer, cx| {
535 buffer.update_diagnostics(
536 LanguageServerId(0),
537 DiagnosticSet::from_sorted_entries(
538 vec![DiagnosticEntry {
539 diagnostic: Default::default(),
540 range: Anchor::MIN..Anchor::MAX,
541 }],
542 &buffer.snapshot(),
543 ),
544 cx,
545 );
546 assert_eq!(
547 buffer
548 .snapshot()
549 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
550 .count(),
551 1
552 );
553 });
554
555 // When the rename changes the extension of the file, the buffer gets closed on the old
556 // language server and gets opened on the new one.
557 fs.rename(
558 Path::new("/the-root/test3.rs"),
559 Path::new("/the-root/test3.json"),
560 Default::default(),
561 )
562 .await
563 .unwrap();
564 assert_eq!(
565 fake_rust_server
566 .receive_notification::<lsp::notification::DidCloseTextDocument>()
567 .await
568 .text_document,
569 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
570 );
571 assert_eq!(
572 fake_json_server
573 .receive_notification::<lsp::notification::DidOpenTextDocument>()
574 .await
575 .text_document,
576 lsp::TextDocumentItem {
577 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
578 version: 0,
579 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
580 language_id: "json".to_string(),
581 },
582 );
583
584 // We clear the diagnostics, since the language has changed.
585 rust_buffer2.update(cx, |buffer, _| {
586 assert_eq!(
587 buffer
588 .snapshot()
589 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
590 .count(),
591 0
592 );
593 });
594
595 // The renamed file's version resets after changing language server.
596 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
597 assert_eq!(
598 fake_json_server
599 .receive_notification::<lsp::notification::DidChangeTextDocument>()
600 .await
601 .text_document,
602 lsp::VersionedTextDocumentIdentifier::new(
603 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
604 1
605 )
606 );
607
608 // Restart language servers
609 project.update(cx, |project, cx| {
610 project.restart_language_servers_for_buffers(
611 vec![rust_buffer.clone(), json_buffer.clone()],
612 cx,
613 );
614 });
615
616 let mut rust_shutdown_requests = fake_rust_server
617 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
618 let mut json_shutdown_requests = fake_json_server
619 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
620 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
621
622 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
623 let mut fake_json_server = fake_json_servers.next().await.unwrap();
624
625 // Ensure rust document is reopened in new rust language server
626 assert_eq!(
627 fake_rust_server
628 .receive_notification::<lsp::notification::DidOpenTextDocument>()
629 .await
630 .text_document,
631 lsp::TextDocumentItem {
632 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
633 version: 0,
634 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
635 language_id: "rust".to_string(),
636 }
637 );
638
639 // Ensure json documents are reopened in new json language server
640 assert_set_eq!(
641 [
642 fake_json_server
643 .receive_notification::<lsp::notification::DidOpenTextDocument>()
644 .await
645 .text_document,
646 fake_json_server
647 .receive_notification::<lsp::notification::DidOpenTextDocument>()
648 .await
649 .text_document,
650 ],
651 [
652 lsp::TextDocumentItem {
653 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
654 version: 0,
655 text: json_buffer.update(cx, |buffer, _| buffer.text()),
656 language_id: "json".to_string(),
657 },
658 lsp::TextDocumentItem {
659 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
660 version: 0,
661 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
662 language_id: "json".to_string(),
663 }
664 ]
665 );
666
667 // Close notifications are reported only to servers matching the buffer's language.
668 cx.update(|_| drop(json_buffer));
669 let close_message = lsp::DidCloseTextDocumentParams {
670 text_document: lsp::TextDocumentIdentifier::new(
671 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
672 ),
673 };
674 assert_eq!(
675 fake_json_server
676 .receive_notification::<lsp::notification::DidCloseTextDocument>()
677 .await,
678 close_message,
679 );
680}
681
682#[gpui::test]
683async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
684 init_test(cx);
685
686 let fs = FakeFs::new(cx.executor());
687 fs.insert_tree(
688 "/the-root",
689 json!({
690 ".gitignore": "target\n",
691 "src": {
692 "a.rs": "",
693 "b.rs": "",
694 },
695 "target": {
696 "x": {
697 "out": {
698 "x.rs": ""
699 }
700 },
701 "y": {
702 "out": {
703 "y.rs": "",
704 }
705 },
706 "z": {
707 "out": {
708 "z.rs": ""
709 }
710 }
711 }
712 }),
713 )
714 .await;
715
716 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
717 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
718 language_registry.add(rust_lang());
719 let mut fake_servers = language_registry.register_fake_lsp_adapter(
720 "Rust",
721 FakeLspAdapter {
722 name: "the-language-server",
723 ..Default::default()
724 },
725 );
726
727 cx.executor().run_until_parked();
728
729 // Start the language server by opening a buffer with a compatible file extension.
730 let _buffer = project
731 .update(cx, |project, cx| {
732 project.open_local_buffer("/the-root/src/a.rs", cx)
733 })
734 .await
735 .unwrap();
736
737 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
738 project.update(cx, |project, cx| {
739 let worktree = project.worktrees(cx).next().unwrap();
740 assert_eq!(
741 worktree
742 .read(cx)
743 .snapshot()
744 .entries(true, 0)
745 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
746 .collect::<Vec<_>>(),
747 &[
748 (Path::new(""), false),
749 (Path::new(".gitignore"), false),
750 (Path::new("src"), false),
751 (Path::new("src/a.rs"), false),
752 (Path::new("src/b.rs"), false),
753 (Path::new("target"), true),
754 ]
755 );
756 });
757
758 let prev_read_dir_count = fs.read_dir_call_count();
759
760 // Keep track of the FS events reported to the language server.
761 let fake_server = fake_servers.next().await.unwrap();
762 let file_changes = Arc::new(Mutex::new(Vec::new()));
763 fake_server
764 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
765 registrations: vec![lsp::Registration {
766 id: Default::default(),
767 method: "workspace/didChangeWatchedFiles".to_string(),
768 register_options: serde_json::to_value(
769 lsp::DidChangeWatchedFilesRegistrationOptions {
770 watchers: vec![
771 lsp::FileSystemWatcher {
772 glob_pattern: lsp::GlobPattern::String(
773 "/the-root/Cargo.toml".to_string(),
774 ),
775 kind: None,
776 },
777 lsp::FileSystemWatcher {
778 glob_pattern: lsp::GlobPattern::String(
779 "/the-root/src/*.{rs,c}".to_string(),
780 ),
781 kind: None,
782 },
783 lsp::FileSystemWatcher {
784 glob_pattern: lsp::GlobPattern::String(
785 "/the-root/target/y/**/*.rs".to_string(),
786 ),
787 kind: None,
788 },
789 ],
790 },
791 )
792 .ok(),
793 }],
794 })
795 .await
796 .unwrap();
797 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
798 let file_changes = file_changes.clone();
799 move |params, _| {
800 let mut file_changes = file_changes.lock();
801 file_changes.extend(params.changes);
802 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
803 }
804 });
805
806 cx.executor().run_until_parked();
807 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
808 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
809
810 // Now the language server has asked us to watch an ignored directory path,
811 // so we recursively load it.
812 project.update(cx, |project, cx| {
813 let worktree = project.worktrees(cx).next().unwrap();
814 assert_eq!(
815 worktree
816 .read(cx)
817 .snapshot()
818 .entries(true, 0)
819 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
820 .collect::<Vec<_>>(),
821 &[
822 (Path::new(""), false),
823 (Path::new(".gitignore"), false),
824 (Path::new("src"), false),
825 (Path::new("src/a.rs"), false),
826 (Path::new("src/b.rs"), false),
827 (Path::new("target"), true),
828 (Path::new("target/x"), true),
829 (Path::new("target/y"), true),
830 (Path::new("target/y/out"), true),
831 (Path::new("target/y/out/y.rs"), true),
832 (Path::new("target/z"), true),
833 ]
834 );
835 });
836
837 // Perform some file system mutations, two of which match the watched patterns,
838 // and one of which does not.
839 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
840 .await
841 .unwrap();
842 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
843 .await
844 .unwrap();
845 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
846 .await
847 .unwrap();
848 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
849 .await
850 .unwrap();
851 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
852 .await
853 .unwrap();
854
855 // The language server receives events for the FS mutations that match its watch patterns.
856 cx.executor().run_until_parked();
857 assert_eq!(
858 &*file_changes.lock(),
859 &[
860 lsp::FileEvent {
861 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
862 typ: lsp::FileChangeType::DELETED,
863 },
864 lsp::FileEvent {
865 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
866 typ: lsp::FileChangeType::CREATED,
867 },
868 lsp::FileEvent {
869 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
870 typ: lsp::FileChangeType::CREATED,
871 },
872 ]
873 );
874}
875
876#[gpui::test]
877async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
878 init_test(cx);
879
880 let fs = FakeFs::new(cx.executor());
881 fs.insert_tree(
882 "/dir",
883 json!({
884 "a.rs": "let a = 1;",
885 "b.rs": "let b = 2;"
886 }),
887 )
888 .await;
889
890 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
891
892 let buffer_a = project
893 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
894 .await
895 .unwrap();
896 let buffer_b = project
897 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
898 .await
899 .unwrap();
900
901 project.update(cx, |project, cx| {
902 project
903 .update_diagnostics(
904 LanguageServerId(0),
905 lsp::PublishDiagnosticsParams {
906 uri: Url::from_file_path("/dir/a.rs").unwrap(),
907 version: None,
908 diagnostics: vec![lsp::Diagnostic {
909 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
910 severity: Some(lsp::DiagnosticSeverity::ERROR),
911 message: "error 1".to_string(),
912 ..Default::default()
913 }],
914 },
915 &[],
916 cx,
917 )
918 .unwrap();
919 project
920 .update_diagnostics(
921 LanguageServerId(0),
922 lsp::PublishDiagnosticsParams {
923 uri: Url::from_file_path("/dir/b.rs").unwrap(),
924 version: None,
925 diagnostics: vec![lsp::Diagnostic {
926 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
927 severity: Some(DiagnosticSeverity::WARNING),
928 message: "error 2".to_string(),
929 ..Default::default()
930 }],
931 },
932 &[],
933 cx,
934 )
935 .unwrap();
936 });
937
938 buffer_a.update(cx, |buffer, _| {
939 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
940 assert_eq!(
941 chunks
942 .iter()
943 .map(|(s, d)| (s.as_str(), *d))
944 .collect::<Vec<_>>(),
945 &[
946 ("let ", None),
947 ("a", Some(DiagnosticSeverity::ERROR)),
948 (" = 1;", None),
949 ]
950 );
951 });
952 buffer_b.update(cx, |buffer, _| {
953 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
954 assert_eq!(
955 chunks
956 .iter()
957 .map(|(s, d)| (s.as_str(), *d))
958 .collect::<Vec<_>>(),
959 &[
960 ("let ", None),
961 ("b", Some(DiagnosticSeverity::WARNING)),
962 (" = 2;", None),
963 ]
964 );
965 });
966}
967
968#[gpui::test]
969async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
970 init_test(cx);
971
972 let fs = FakeFs::new(cx.executor());
973 fs.insert_tree(
974 "/root",
975 json!({
976 "dir": {
977 ".git": {
978 "HEAD": "ref: refs/heads/main",
979 },
980 ".gitignore": "b.rs",
981 "a.rs": "let a = 1;",
982 "b.rs": "let b = 2;",
983 },
984 "other.rs": "let b = c;"
985 }),
986 )
987 .await;
988
989 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
990 let (worktree, _) = project
991 .update(cx, |project, cx| {
992 project.find_or_create_worktree("/root/dir", true, cx)
993 })
994 .await
995 .unwrap();
996 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
997
998 let (worktree, _) = project
999 .update(cx, |project, cx| {
1000 project.find_or_create_worktree("/root/other.rs", false, cx)
1001 })
1002 .await
1003 .unwrap();
1004 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1005
1006 let server_id = LanguageServerId(0);
1007 project.update(cx, |project, cx| {
1008 project
1009 .update_diagnostics(
1010 server_id,
1011 lsp::PublishDiagnosticsParams {
1012 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1013 version: None,
1014 diagnostics: vec![lsp::Diagnostic {
1015 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1016 severity: Some(lsp::DiagnosticSeverity::ERROR),
1017 message: "unused variable 'b'".to_string(),
1018 ..Default::default()
1019 }],
1020 },
1021 &[],
1022 cx,
1023 )
1024 .unwrap();
1025 project
1026 .update_diagnostics(
1027 server_id,
1028 lsp::PublishDiagnosticsParams {
1029 uri: Url::from_file_path("/root/other.rs").unwrap(),
1030 version: None,
1031 diagnostics: vec![lsp::Diagnostic {
1032 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1033 severity: Some(lsp::DiagnosticSeverity::ERROR),
1034 message: "unknown variable 'c'".to_string(),
1035 ..Default::default()
1036 }],
1037 },
1038 &[],
1039 cx,
1040 )
1041 .unwrap();
1042 });
1043
1044 let main_ignored_buffer = project
1045 .update(cx, |project, cx| {
1046 project.open_buffer((main_worktree_id, "b.rs"), cx)
1047 })
1048 .await
1049 .unwrap();
1050 main_ignored_buffer.update(cx, |buffer, _| {
1051 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1052 assert_eq!(
1053 chunks
1054 .iter()
1055 .map(|(s, d)| (s.as_str(), *d))
1056 .collect::<Vec<_>>(),
1057 &[
1058 ("let ", None),
1059 ("b", Some(DiagnosticSeverity::ERROR)),
1060 (" = 2;", None),
1061 ],
1062 "Gigitnored buffers should still get in-buffer diagnostics",
1063 );
1064 });
1065 let other_buffer = project
1066 .update(cx, |project, cx| {
1067 project.open_buffer((other_worktree_id, ""), cx)
1068 })
1069 .await
1070 .unwrap();
1071 other_buffer.update(cx, |buffer, _| {
1072 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1073 assert_eq!(
1074 chunks
1075 .iter()
1076 .map(|(s, d)| (s.as_str(), *d))
1077 .collect::<Vec<_>>(),
1078 &[
1079 ("let b = ", None),
1080 ("c", Some(DiagnosticSeverity::ERROR)),
1081 (";", None),
1082 ],
1083 "Buffers from hidden projects should still get in-buffer diagnostics"
1084 );
1085 });
1086
1087 project.update(cx, |project, cx| {
1088 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1089 assert_eq!(
1090 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1091 vec![(
1092 ProjectPath {
1093 worktree_id: main_worktree_id,
1094 path: Arc::from(Path::new("b.rs")),
1095 },
1096 server_id,
1097 DiagnosticSummary {
1098 error_count: 1,
1099 warning_count: 0,
1100 }
1101 )]
1102 );
1103 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1104 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1105 });
1106}
1107
1108#[gpui::test]
1109async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1110 init_test(cx);
1111
1112 let progress_token = "the-progress-token";
1113
1114 let fs = FakeFs::new(cx.executor());
1115 fs.insert_tree(
1116 "/dir",
1117 json!({
1118 "a.rs": "fn a() { A }",
1119 "b.rs": "const y: i32 = 1",
1120 }),
1121 )
1122 .await;
1123
1124 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1126
1127 language_registry.add(rust_lang());
1128 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1129 "Rust",
1130 FakeLspAdapter {
1131 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1132 disk_based_diagnostics_sources: vec!["disk".into()],
1133 ..Default::default()
1134 },
1135 );
1136
1137 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1138
1139 // Cause worktree to start the fake language server
1140 let _buffer = project
1141 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1142 .await
1143 .unwrap();
1144
1145 let mut events = cx.events(&project);
1146
1147 let fake_server = fake_servers.next().await.unwrap();
1148 assert_eq!(
1149 events.next().await.unwrap(),
1150 Event::LanguageServerAdded(LanguageServerId(0)),
1151 );
1152
1153 fake_server
1154 .start_progress(format!("{}/0", progress_token))
1155 .await;
1156 assert_eq!(
1157 events.next().await.unwrap(),
1158 Event::DiskBasedDiagnosticsStarted {
1159 language_server_id: LanguageServerId(0),
1160 }
1161 );
1162
1163 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1164 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1165 version: None,
1166 diagnostics: vec![lsp::Diagnostic {
1167 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1168 severity: Some(lsp::DiagnosticSeverity::ERROR),
1169 message: "undefined variable 'A'".to_string(),
1170 ..Default::default()
1171 }],
1172 });
1173 assert_eq!(
1174 events.next().await.unwrap(),
1175 Event::DiagnosticsUpdated {
1176 language_server_id: LanguageServerId(0),
1177 path: (worktree_id, Path::new("a.rs")).into()
1178 }
1179 );
1180
1181 fake_server.end_progress(format!("{}/0", progress_token));
1182 assert_eq!(
1183 events.next().await.unwrap(),
1184 Event::DiskBasedDiagnosticsFinished {
1185 language_server_id: LanguageServerId(0)
1186 }
1187 );
1188
1189 let buffer = project
1190 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1191 .await
1192 .unwrap();
1193
1194 buffer.update(cx, |buffer, _| {
1195 let snapshot = buffer.snapshot();
1196 let diagnostics = snapshot
1197 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1198 .collect::<Vec<_>>();
1199 assert_eq!(
1200 diagnostics,
1201 &[DiagnosticEntry {
1202 range: Point::new(0, 9)..Point::new(0, 10),
1203 diagnostic: Diagnostic {
1204 severity: lsp::DiagnosticSeverity::ERROR,
1205 message: "undefined variable 'A'".to_string(),
1206 group_id: 0,
1207 is_primary: true,
1208 ..Default::default()
1209 }
1210 }]
1211 )
1212 });
1213
1214 // Ensure publishing empty diagnostics twice only results in one update event.
1215 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1216 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1217 version: None,
1218 diagnostics: Default::default(),
1219 });
1220 assert_eq!(
1221 events.next().await.unwrap(),
1222 Event::DiagnosticsUpdated {
1223 language_server_id: LanguageServerId(0),
1224 path: (worktree_id, Path::new("a.rs")).into()
1225 }
1226 );
1227
1228 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1229 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1230 version: None,
1231 diagnostics: Default::default(),
1232 });
1233 cx.executor().run_until_parked();
1234 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1235}
1236
1237#[gpui::test]
1238async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1239 init_test(cx);
1240
1241 let progress_token = "the-progress-token";
1242
1243 let fs = FakeFs::new(cx.executor());
1244 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1245
1246 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1247
1248 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1249 language_registry.add(rust_lang());
1250 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1251 "Rust",
1252 FakeLspAdapter {
1253 name: "the-language-server",
1254 disk_based_diagnostics_sources: vec!["disk".into()],
1255 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1256 ..Default::default()
1257 },
1258 );
1259
1260 let buffer = project
1261 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1262 .await
1263 .unwrap();
1264
1265 // Simulate diagnostics starting to update.
1266 let fake_server = fake_servers.next().await.unwrap();
1267 fake_server.start_progress(progress_token).await;
1268
1269 // Restart the server before the diagnostics finish updating.
1270 project.update(cx, |project, cx| {
1271 project.restart_language_servers_for_buffers([buffer], cx);
1272 });
1273 let mut events = cx.events(&project);
1274
1275 // Simulate the newly started server sending more diagnostics.
1276 let fake_server = fake_servers.next().await.unwrap();
1277 assert_eq!(
1278 events.next().await.unwrap(),
1279 Event::LanguageServerAdded(LanguageServerId(1))
1280 );
1281 fake_server.start_progress(progress_token).await;
1282 assert_eq!(
1283 events.next().await.unwrap(),
1284 Event::DiskBasedDiagnosticsStarted {
1285 language_server_id: LanguageServerId(1)
1286 }
1287 );
1288 project.update(cx, |project, cx| {
1289 assert_eq!(
1290 project
1291 .language_servers_running_disk_based_diagnostics(cx)
1292 .collect::<Vec<_>>(),
1293 [LanguageServerId(1)]
1294 );
1295 });
1296
1297 // All diagnostics are considered done, despite the old server's diagnostic
1298 // task never completing.
1299 fake_server.end_progress(progress_token);
1300 assert_eq!(
1301 events.next().await.unwrap(),
1302 Event::DiskBasedDiagnosticsFinished {
1303 language_server_id: LanguageServerId(1)
1304 }
1305 );
1306 project.update(cx, |project, cx| {
1307 assert_eq!(
1308 project
1309 .language_servers_running_disk_based_diagnostics(cx)
1310 .collect::<Vec<_>>(),
1311 [] as [language::LanguageServerId; 0]
1312 );
1313 });
1314}
1315
1316#[gpui::test]
1317async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1318 init_test(cx);
1319
1320 let fs = FakeFs::new(cx.executor());
1321 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1322
1323 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1324
1325 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1326 language_registry.add(rust_lang());
1327 let mut fake_servers =
1328 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1329
1330 let buffer = project
1331 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1332 .await
1333 .unwrap();
1334
1335 // Publish diagnostics
1336 let fake_server = fake_servers.next().await.unwrap();
1337 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1338 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1339 version: None,
1340 diagnostics: vec![lsp::Diagnostic {
1341 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1342 severity: Some(lsp::DiagnosticSeverity::ERROR),
1343 message: "the message".to_string(),
1344 ..Default::default()
1345 }],
1346 });
1347
1348 cx.executor().run_until_parked();
1349 buffer.update(cx, |buffer, _| {
1350 assert_eq!(
1351 buffer
1352 .snapshot()
1353 .diagnostics_in_range::<_, usize>(0..1, false)
1354 .map(|entry| entry.diagnostic.message.clone())
1355 .collect::<Vec<_>>(),
1356 ["the message".to_string()]
1357 );
1358 });
1359 project.update(cx, |project, cx| {
1360 assert_eq!(
1361 project.diagnostic_summary(false, cx),
1362 DiagnosticSummary {
1363 error_count: 1,
1364 warning_count: 0,
1365 }
1366 );
1367 });
1368
1369 project.update(cx, |project, cx| {
1370 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1371 });
1372
1373 // The diagnostics are cleared.
1374 cx.executor().run_until_parked();
1375 buffer.update(cx, |buffer, _| {
1376 assert_eq!(
1377 buffer
1378 .snapshot()
1379 .diagnostics_in_range::<_, usize>(0..1, false)
1380 .map(|entry| entry.diagnostic.message.clone())
1381 .collect::<Vec<_>>(),
1382 Vec::<String>::new(),
1383 );
1384 });
1385 project.update(cx, |project, cx| {
1386 assert_eq!(
1387 project.diagnostic_summary(false, cx),
1388 DiagnosticSummary {
1389 error_count: 0,
1390 warning_count: 0,
1391 }
1392 );
1393 });
1394}
1395
1396#[gpui::test]
1397async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1398 init_test(cx);
1399
1400 let fs = FakeFs::new(cx.executor());
1401 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1402
1403 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1404 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1405
1406 language_registry.add(rust_lang());
1407 let mut fake_servers =
1408 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1409
1410 let buffer = project
1411 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1412 .await
1413 .unwrap();
1414
1415 // Before restarting the server, report diagnostics with an unknown buffer version.
1416 let fake_server = fake_servers.next().await.unwrap();
1417 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1418 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1419 version: Some(10000),
1420 diagnostics: Vec::new(),
1421 });
1422 cx.executor().run_until_parked();
1423
1424 project.update(cx, |project, cx| {
1425 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1426 });
1427 let mut fake_server = fake_servers.next().await.unwrap();
1428 let notification = fake_server
1429 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1430 .await
1431 .text_document;
1432 assert_eq!(notification.version, 0);
1433}
1434
1435#[gpui::test]
1436async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1437 init_test(cx);
1438
1439 let progress_token = "the-progress-token";
1440
1441 let fs = FakeFs::new(cx.executor());
1442 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1443
1444 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1445
1446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1447 language_registry.add(rust_lang());
1448 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1449 "Rust",
1450 FakeLspAdapter {
1451 name: "the-language-server",
1452 disk_based_diagnostics_sources: vec!["disk".into()],
1453 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1454 ..Default::default()
1455 },
1456 );
1457
1458 let buffer = project
1459 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1460 .await
1461 .unwrap();
1462
1463 // Simulate diagnostics starting to update.
1464 let mut fake_server = fake_servers.next().await.unwrap();
1465 fake_server
1466 .start_progress_with(
1467 "another-token",
1468 lsp::WorkDoneProgressBegin {
1469 cancellable: Some(false),
1470 ..Default::default()
1471 },
1472 )
1473 .await;
1474 fake_server
1475 .start_progress_with(
1476 progress_token,
1477 lsp::WorkDoneProgressBegin {
1478 cancellable: Some(true),
1479 ..Default::default()
1480 },
1481 )
1482 .await;
1483 cx.executor().run_until_parked();
1484
1485 project.update(cx, |project, cx| {
1486 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1487 });
1488
1489 let cancel_notification = fake_server
1490 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1491 .await;
1492 assert_eq!(
1493 cancel_notification.token,
1494 NumberOrString::String(progress_token.into())
1495 );
1496}
1497
1498#[gpui::test]
1499async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1500 init_test(cx);
1501
1502 let fs = FakeFs::new(cx.executor());
1503 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1504 .await;
1505
1506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1508
1509 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1510 "Rust",
1511 FakeLspAdapter {
1512 name: "rust-lsp",
1513 ..Default::default()
1514 },
1515 );
1516 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1517 "JavaScript",
1518 FakeLspAdapter {
1519 name: "js-lsp",
1520 ..Default::default()
1521 },
1522 );
1523 language_registry.add(rust_lang());
1524 language_registry.add(js_lang());
1525
1526 let _rs_buffer = project
1527 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1528 .await
1529 .unwrap();
1530 let _js_buffer = project
1531 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1532 .await
1533 .unwrap();
1534
1535 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1536 assert_eq!(
1537 fake_rust_server_1
1538 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1539 .await
1540 .text_document
1541 .uri
1542 .as_str(),
1543 "file:///dir/a.rs"
1544 );
1545
1546 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1547 assert_eq!(
1548 fake_js_server
1549 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1550 .await
1551 .text_document
1552 .uri
1553 .as_str(),
1554 "file:///dir/b.js"
1555 );
1556
1557 // Disable Rust language server, ensuring only that server gets stopped.
1558 cx.update(|cx| {
1559 SettingsStore::update_global(cx, |settings, cx| {
1560 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1561 settings.languages.insert(
1562 Arc::from("Rust"),
1563 LanguageSettingsContent {
1564 enable_language_server: Some(false),
1565 ..Default::default()
1566 },
1567 );
1568 });
1569 })
1570 });
1571 fake_rust_server_1
1572 .receive_notification::<lsp::notification::Exit>()
1573 .await;
1574
1575 // Enable Rust and disable JavaScript language servers, ensuring that the
1576 // former gets started again and that the latter stops.
1577 cx.update(|cx| {
1578 SettingsStore::update_global(cx, |settings, cx| {
1579 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1580 settings.languages.insert(
1581 Arc::from("Rust"),
1582 LanguageSettingsContent {
1583 enable_language_server: Some(true),
1584 ..Default::default()
1585 },
1586 );
1587 settings.languages.insert(
1588 Arc::from("JavaScript"),
1589 LanguageSettingsContent {
1590 enable_language_server: Some(false),
1591 ..Default::default()
1592 },
1593 );
1594 });
1595 })
1596 });
1597 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1598 assert_eq!(
1599 fake_rust_server_2
1600 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1601 .await
1602 .text_document
1603 .uri
1604 .as_str(),
1605 "file:///dir/a.rs"
1606 );
1607 fake_js_server
1608 .receive_notification::<lsp::notification::Exit>()
1609 .await;
1610}
1611
1612#[gpui::test(iterations = 3)]
1613async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1614 init_test(cx);
1615
1616 let text = "
1617 fn a() { A }
1618 fn b() { BB }
1619 fn c() { CCC }
1620 "
1621 .unindent();
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1625
1626 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1627 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1628
1629 language_registry.add(rust_lang());
1630 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1631 "Rust",
1632 FakeLspAdapter {
1633 disk_based_diagnostics_sources: vec!["disk".into()],
1634 ..Default::default()
1635 },
1636 );
1637
1638 let buffer = project
1639 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1640 .await
1641 .unwrap();
1642
1643 let mut fake_server = fake_servers.next().await.unwrap();
1644 let open_notification = fake_server
1645 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1646 .await;
1647
1648 // Edit the buffer, moving the content down
1649 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1650 let change_notification_1 = fake_server
1651 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1652 .await;
1653 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1654
1655 // Report some diagnostics for the initial version of the buffer
1656 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1657 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1658 version: Some(open_notification.text_document.version),
1659 diagnostics: vec![
1660 lsp::Diagnostic {
1661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1662 severity: Some(DiagnosticSeverity::ERROR),
1663 message: "undefined variable 'A'".to_string(),
1664 source: Some("disk".to_string()),
1665 ..Default::default()
1666 },
1667 lsp::Diagnostic {
1668 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1669 severity: Some(DiagnosticSeverity::ERROR),
1670 message: "undefined variable 'BB'".to_string(),
1671 source: Some("disk".to_string()),
1672 ..Default::default()
1673 },
1674 lsp::Diagnostic {
1675 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1676 severity: Some(DiagnosticSeverity::ERROR),
1677 source: Some("disk".to_string()),
1678 message: "undefined variable 'CCC'".to_string(),
1679 ..Default::default()
1680 },
1681 ],
1682 });
1683
1684 // The diagnostics have moved down since they were created.
1685 cx.executor().run_until_parked();
1686 buffer.update(cx, |buffer, _| {
1687 assert_eq!(
1688 buffer
1689 .snapshot()
1690 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1691 .collect::<Vec<_>>(),
1692 &[
1693 DiagnosticEntry {
1694 range: Point::new(3, 9)..Point::new(3, 11),
1695 diagnostic: Diagnostic {
1696 source: Some("disk".into()),
1697 severity: DiagnosticSeverity::ERROR,
1698 message: "undefined variable 'BB'".to_string(),
1699 is_disk_based: true,
1700 group_id: 1,
1701 is_primary: true,
1702 ..Default::default()
1703 },
1704 },
1705 DiagnosticEntry {
1706 range: Point::new(4, 9)..Point::new(4, 12),
1707 diagnostic: Diagnostic {
1708 source: Some("disk".into()),
1709 severity: DiagnosticSeverity::ERROR,
1710 message: "undefined variable 'CCC'".to_string(),
1711 is_disk_based: true,
1712 group_id: 2,
1713 is_primary: true,
1714 ..Default::default()
1715 }
1716 }
1717 ]
1718 );
1719 assert_eq!(
1720 chunks_with_diagnostics(buffer, 0..buffer.len()),
1721 [
1722 ("\n\nfn a() { ".to_string(), None),
1723 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1724 (" }\nfn b() { ".to_string(), None),
1725 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1726 (" }\nfn c() { ".to_string(), None),
1727 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1728 (" }\n".to_string(), None),
1729 ]
1730 );
1731 assert_eq!(
1732 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1733 [
1734 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1735 (" }\nfn c() { ".to_string(), None),
1736 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1737 ]
1738 );
1739 });
1740
1741 // Ensure overlapping diagnostics are highlighted correctly.
1742 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1743 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1744 version: Some(open_notification.text_document.version),
1745 diagnostics: vec![
1746 lsp::Diagnostic {
1747 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1748 severity: Some(DiagnosticSeverity::ERROR),
1749 message: "undefined variable 'A'".to_string(),
1750 source: Some("disk".to_string()),
1751 ..Default::default()
1752 },
1753 lsp::Diagnostic {
1754 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1755 severity: Some(DiagnosticSeverity::WARNING),
1756 message: "unreachable statement".to_string(),
1757 source: Some("disk".to_string()),
1758 ..Default::default()
1759 },
1760 ],
1761 });
1762
1763 cx.executor().run_until_parked();
1764 buffer.update(cx, |buffer, _| {
1765 assert_eq!(
1766 buffer
1767 .snapshot()
1768 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1769 .collect::<Vec<_>>(),
1770 &[
1771 DiagnosticEntry {
1772 range: Point::new(2, 9)..Point::new(2, 12),
1773 diagnostic: Diagnostic {
1774 source: Some("disk".into()),
1775 severity: DiagnosticSeverity::WARNING,
1776 message: "unreachable statement".to_string(),
1777 is_disk_based: true,
1778 group_id: 4,
1779 is_primary: true,
1780 ..Default::default()
1781 }
1782 },
1783 DiagnosticEntry {
1784 range: Point::new(2, 9)..Point::new(2, 10),
1785 diagnostic: Diagnostic {
1786 source: Some("disk".into()),
1787 severity: DiagnosticSeverity::ERROR,
1788 message: "undefined variable 'A'".to_string(),
1789 is_disk_based: true,
1790 group_id: 3,
1791 is_primary: true,
1792 ..Default::default()
1793 },
1794 }
1795 ]
1796 );
1797 assert_eq!(
1798 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1799 [
1800 ("fn a() { ".to_string(), None),
1801 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1802 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1803 ("\n".to_string(), None),
1804 ]
1805 );
1806 assert_eq!(
1807 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1808 [
1809 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1810 ("\n".to_string(), None),
1811 ]
1812 );
1813 });
1814
1815 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1816 // changes since the last save.
1817 buffer.update(cx, |buffer, cx| {
1818 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1819 buffer.edit(
1820 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1821 None,
1822 cx,
1823 );
1824 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1825 });
1826 let change_notification_2 = fake_server
1827 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1828 .await;
1829 assert!(
1830 change_notification_2.text_document.version > change_notification_1.text_document.version
1831 );
1832
1833 // Handle out-of-order diagnostics
1834 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1835 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1836 version: Some(change_notification_2.text_document.version),
1837 diagnostics: vec![
1838 lsp::Diagnostic {
1839 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1840 severity: Some(DiagnosticSeverity::ERROR),
1841 message: "undefined variable 'BB'".to_string(),
1842 source: Some("disk".to_string()),
1843 ..Default::default()
1844 },
1845 lsp::Diagnostic {
1846 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1847 severity: Some(DiagnosticSeverity::WARNING),
1848 message: "undefined variable 'A'".to_string(),
1849 source: Some("disk".to_string()),
1850 ..Default::default()
1851 },
1852 ],
1853 });
1854
1855 cx.executor().run_until_parked();
1856 buffer.update(cx, |buffer, _| {
1857 assert_eq!(
1858 buffer
1859 .snapshot()
1860 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1861 .collect::<Vec<_>>(),
1862 &[
1863 DiagnosticEntry {
1864 range: Point::new(2, 21)..Point::new(2, 22),
1865 diagnostic: Diagnostic {
1866 source: Some("disk".into()),
1867 severity: DiagnosticSeverity::WARNING,
1868 message: "undefined variable 'A'".to_string(),
1869 is_disk_based: true,
1870 group_id: 6,
1871 is_primary: true,
1872 ..Default::default()
1873 }
1874 },
1875 DiagnosticEntry {
1876 range: Point::new(3, 9)..Point::new(3, 14),
1877 diagnostic: Diagnostic {
1878 source: Some("disk".into()),
1879 severity: DiagnosticSeverity::ERROR,
1880 message: "undefined variable 'BB'".to_string(),
1881 is_disk_based: true,
1882 group_id: 5,
1883 is_primary: true,
1884 ..Default::default()
1885 },
1886 }
1887 ]
1888 );
1889 });
1890}
1891
1892#[gpui::test]
1893async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1894 init_test(cx);
1895
1896 let text = concat!(
1897 "let one = ;\n", //
1898 "let two = \n",
1899 "let three = 3;\n",
1900 );
1901
1902 let fs = FakeFs::new(cx.executor());
1903 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1904
1905 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1906 let buffer = project
1907 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1908 .await
1909 .unwrap();
1910
1911 project.update(cx, |project, cx| {
1912 project.lsp_store.update(cx, |lsp_store, cx| {
1913 lsp_store
1914 .update_buffer_diagnostics(
1915 &buffer,
1916 LanguageServerId(0),
1917 None,
1918 vec![
1919 DiagnosticEntry {
1920 range: Unclipped(PointUtf16::new(0, 10))
1921 ..Unclipped(PointUtf16::new(0, 10)),
1922 diagnostic: Diagnostic {
1923 severity: DiagnosticSeverity::ERROR,
1924 message: "syntax error 1".to_string(),
1925 ..Default::default()
1926 },
1927 },
1928 DiagnosticEntry {
1929 range: Unclipped(PointUtf16::new(1, 10))
1930 ..Unclipped(PointUtf16::new(1, 10)),
1931 diagnostic: Diagnostic {
1932 severity: DiagnosticSeverity::ERROR,
1933 message: "syntax error 2".to_string(),
1934 ..Default::default()
1935 },
1936 },
1937 ],
1938 cx,
1939 )
1940 .unwrap();
1941 })
1942 });
1943
1944 // An empty range is extended forward to include the following character.
1945 // At the end of a line, an empty range is extended backward to include
1946 // the preceding character.
1947 buffer.update(cx, |buffer, _| {
1948 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1949 assert_eq!(
1950 chunks
1951 .iter()
1952 .map(|(s, d)| (s.as_str(), *d))
1953 .collect::<Vec<_>>(),
1954 &[
1955 ("let one = ", None),
1956 (";", Some(DiagnosticSeverity::ERROR)),
1957 ("\nlet two =", None),
1958 (" ", Some(DiagnosticSeverity::ERROR)),
1959 ("\nlet three = 3;\n", None)
1960 ]
1961 );
1962 });
1963}
1964
1965#[gpui::test]
1966async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1967 init_test(cx);
1968
1969 let fs = FakeFs::new(cx.executor());
1970 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1971 .await;
1972
1973 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1974
1975 project.update(cx, |project, cx| {
1976 project
1977 .update_diagnostic_entries(
1978 LanguageServerId(0),
1979 Path::new("/dir/a.rs").to_owned(),
1980 None,
1981 vec![DiagnosticEntry {
1982 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1983 diagnostic: Diagnostic {
1984 severity: DiagnosticSeverity::ERROR,
1985 is_primary: true,
1986 message: "syntax error a1".to_string(),
1987 ..Default::default()
1988 },
1989 }],
1990 cx,
1991 )
1992 .unwrap();
1993 project
1994 .update_diagnostic_entries(
1995 LanguageServerId(1),
1996 Path::new("/dir/a.rs").to_owned(),
1997 None,
1998 vec![DiagnosticEntry {
1999 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2000 diagnostic: Diagnostic {
2001 severity: DiagnosticSeverity::ERROR,
2002 is_primary: true,
2003 message: "syntax error b1".to_string(),
2004 ..Default::default()
2005 },
2006 }],
2007 cx,
2008 )
2009 .unwrap();
2010
2011 assert_eq!(
2012 project.diagnostic_summary(false, cx),
2013 DiagnosticSummary {
2014 error_count: 2,
2015 warning_count: 0,
2016 }
2017 );
2018 });
2019}
2020
2021#[gpui::test]
2022async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2023 init_test(cx);
2024
2025 let text = "
2026 fn a() {
2027 f1();
2028 }
2029 fn b() {
2030 f2();
2031 }
2032 fn c() {
2033 f3();
2034 }
2035 "
2036 .unindent();
2037
2038 let fs = FakeFs::new(cx.executor());
2039 fs.insert_tree(
2040 "/dir",
2041 json!({
2042 "a.rs": text.clone(),
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2048 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2049
2050 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2051 language_registry.add(rust_lang());
2052 let mut fake_servers =
2053 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2054
2055 let buffer = project
2056 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2057 .await
2058 .unwrap();
2059
2060 let mut fake_server = fake_servers.next().await.unwrap();
2061 let lsp_document_version = fake_server
2062 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2063 .await
2064 .text_document
2065 .version;
2066
2067 // Simulate editing the buffer after the language server computes some edits.
2068 buffer.update(cx, |buffer, cx| {
2069 buffer.edit(
2070 [(
2071 Point::new(0, 0)..Point::new(0, 0),
2072 "// above first function\n",
2073 )],
2074 None,
2075 cx,
2076 );
2077 buffer.edit(
2078 [(
2079 Point::new(2, 0)..Point::new(2, 0),
2080 " // inside first function\n",
2081 )],
2082 None,
2083 cx,
2084 );
2085 buffer.edit(
2086 [(
2087 Point::new(6, 4)..Point::new(6, 4),
2088 "// inside second function ",
2089 )],
2090 None,
2091 cx,
2092 );
2093
2094 assert_eq!(
2095 buffer.text(),
2096 "
2097 // above first function
2098 fn a() {
2099 // inside first function
2100 f1();
2101 }
2102 fn b() {
2103 // inside second function f2();
2104 }
2105 fn c() {
2106 f3();
2107 }
2108 "
2109 .unindent()
2110 );
2111 });
2112
2113 let edits = lsp_store
2114 .update(cx, |lsp_store, cx| {
2115 lsp_store.edits_from_lsp(
2116 &buffer,
2117 vec![
2118 // replace body of first function
2119 lsp::TextEdit {
2120 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2121 new_text: "
2122 fn a() {
2123 f10();
2124 }
2125 "
2126 .unindent(),
2127 },
2128 // edit inside second function
2129 lsp::TextEdit {
2130 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2131 new_text: "00".into(),
2132 },
2133 // edit inside third function via two distinct edits
2134 lsp::TextEdit {
2135 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2136 new_text: "4000".into(),
2137 },
2138 lsp::TextEdit {
2139 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2140 new_text: "".into(),
2141 },
2142 ],
2143 LanguageServerId(0),
2144 Some(lsp_document_version),
2145 cx,
2146 )
2147 })
2148 .await
2149 .unwrap();
2150
2151 buffer.update(cx, |buffer, cx| {
2152 for (range, new_text) in edits {
2153 buffer.edit([(range, new_text)], None, cx);
2154 }
2155 assert_eq!(
2156 buffer.text(),
2157 "
2158 // above first function
2159 fn a() {
2160 // inside first function
2161 f10();
2162 }
2163 fn b() {
2164 // inside second function f200();
2165 }
2166 fn c() {
2167 f4000();
2168 }
2169 "
2170 .unindent()
2171 );
2172 });
2173}
2174
2175#[gpui::test]
2176async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2177 init_test(cx);
2178
2179 let text = "
2180 use a::b;
2181 use a::c;
2182
2183 fn f() {
2184 b();
2185 c();
2186 }
2187 "
2188 .unindent();
2189
2190 let fs = FakeFs::new(cx.executor());
2191 fs.insert_tree(
2192 "/dir",
2193 json!({
2194 "a.rs": text.clone(),
2195 }),
2196 )
2197 .await;
2198
2199 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2200 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2201 let buffer = project
2202 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2203 .await
2204 .unwrap();
2205
2206 // Simulate the language server sending us a small edit in the form of a very large diff.
2207 // Rust-analyzer does this when performing a merge-imports code action.
2208 let edits = lsp_store
2209 .update(cx, |lsp_store, cx| {
2210 lsp_store.edits_from_lsp(
2211 &buffer,
2212 [
2213 // Replace the first use statement without editing the semicolon.
2214 lsp::TextEdit {
2215 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2216 new_text: "a::{b, c}".into(),
2217 },
2218 // Reinsert the remainder of the file between the semicolon and the final
2219 // newline of the file.
2220 lsp::TextEdit {
2221 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2222 new_text: "\n\n".into(),
2223 },
2224 lsp::TextEdit {
2225 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2226 new_text: "
2227 fn f() {
2228 b();
2229 c();
2230 }"
2231 .unindent(),
2232 },
2233 // Delete everything after the first newline of the file.
2234 lsp::TextEdit {
2235 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2236 new_text: "".into(),
2237 },
2238 ],
2239 LanguageServerId(0),
2240 None,
2241 cx,
2242 )
2243 })
2244 .await
2245 .unwrap();
2246
2247 buffer.update(cx, |buffer, cx| {
2248 let edits = edits
2249 .into_iter()
2250 .map(|(range, text)| {
2251 (
2252 range.start.to_point(buffer)..range.end.to_point(buffer),
2253 text,
2254 )
2255 })
2256 .collect::<Vec<_>>();
2257
2258 assert_eq!(
2259 edits,
2260 [
2261 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2262 (Point::new(1, 0)..Point::new(2, 0), "".into())
2263 ]
2264 );
2265
2266 for (range, new_text) in edits {
2267 buffer.edit([(range, new_text)], None, cx);
2268 }
2269 assert_eq!(
2270 buffer.text(),
2271 "
2272 use a::{b, c};
2273
2274 fn f() {
2275 b();
2276 c();
2277 }
2278 "
2279 .unindent()
2280 );
2281 });
2282}
2283
2284#[gpui::test]
2285async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2286 init_test(cx);
2287
2288 let text = "
2289 use a::b;
2290 use a::c;
2291
2292 fn f() {
2293 b();
2294 c();
2295 }
2296 "
2297 .unindent();
2298
2299 let fs = FakeFs::new(cx.executor());
2300 fs.insert_tree(
2301 "/dir",
2302 json!({
2303 "a.rs": text.clone(),
2304 }),
2305 )
2306 .await;
2307
2308 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2309 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2310 let buffer = project
2311 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2312 .await
2313 .unwrap();
2314
2315 // Simulate the language server sending us edits in a non-ordered fashion,
2316 // with ranges sometimes being inverted or pointing to invalid locations.
2317 let edits = lsp_store
2318 .update(cx, |lsp_store, cx| {
2319 lsp_store.edits_from_lsp(
2320 &buffer,
2321 [
2322 lsp::TextEdit {
2323 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2324 new_text: "\n\n".into(),
2325 },
2326 lsp::TextEdit {
2327 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2328 new_text: "a::{b, c}".into(),
2329 },
2330 lsp::TextEdit {
2331 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2332 new_text: "".into(),
2333 },
2334 lsp::TextEdit {
2335 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2336 new_text: "
2337 fn f() {
2338 b();
2339 c();
2340 }"
2341 .unindent(),
2342 },
2343 ],
2344 LanguageServerId(0),
2345 None,
2346 cx,
2347 )
2348 })
2349 .await
2350 .unwrap();
2351
2352 buffer.update(cx, |buffer, cx| {
2353 let edits = edits
2354 .into_iter()
2355 .map(|(range, text)| {
2356 (
2357 range.start.to_point(buffer)..range.end.to_point(buffer),
2358 text,
2359 )
2360 })
2361 .collect::<Vec<_>>();
2362
2363 assert_eq!(
2364 edits,
2365 [
2366 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2367 (Point::new(1, 0)..Point::new(2, 0), "".into())
2368 ]
2369 );
2370
2371 for (range, new_text) in edits {
2372 buffer.edit([(range, new_text)], None, cx);
2373 }
2374 assert_eq!(
2375 buffer.text(),
2376 "
2377 use a::{b, c};
2378
2379 fn f() {
2380 b();
2381 c();
2382 }
2383 "
2384 .unindent()
2385 );
2386 });
2387}
2388
2389fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2390 buffer: &Buffer,
2391 range: Range<T>,
2392) -> Vec<(String, Option<DiagnosticSeverity>)> {
2393 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2394 for chunk in buffer.snapshot().chunks(range, true) {
2395 if chunks.last().map_or(false, |prev_chunk| {
2396 prev_chunk.1 == chunk.diagnostic_severity
2397 }) {
2398 chunks.last_mut().unwrap().0.push_str(chunk.text);
2399 } else {
2400 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2401 }
2402 }
2403 chunks
2404}
2405
2406#[gpui::test(iterations = 10)]
2407async fn test_definition(cx: &mut gpui::TestAppContext) {
2408 init_test(cx);
2409
2410 let fs = FakeFs::new(cx.executor());
2411 fs.insert_tree(
2412 "/dir",
2413 json!({
2414 "a.rs": "const fn a() { A }",
2415 "b.rs": "const y: i32 = crate::a()",
2416 }),
2417 )
2418 .await;
2419
2420 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2421
2422 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2423 language_registry.add(rust_lang());
2424 let mut fake_servers =
2425 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2426
2427 let buffer = project
2428 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2429 .await
2430 .unwrap();
2431
2432 let fake_server = fake_servers.next().await.unwrap();
2433 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2434 let params = params.text_document_position_params;
2435 assert_eq!(
2436 params.text_document.uri.to_file_path().unwrap(),
2437 Path::new("/dir/b.rs"),
2438 );
2439 assert_eq!(params.position, lsp::Position::new(0, 22));
2440
2441 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2442 lsp::Location::new(
2443 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2444 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2445 ),
2446 )))
2447 });
2448
2449 let mut definitions = project
2450 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2451 .await
2452 .unwrap();
2453
2454 // Assert no new language server started
2455 cx.executor().run_until_parked();
2456 assert!(fake_servers.try_next().is_err());
2457
2458 assert_eq!(definitions.len(), 1);
2459 let definition = definitions.pop().unwrap();
2460 cx.update(|cx| {
2461 let target_buffer = definition.target.buffer.read(cx);
2462 assert_eq!(
2463 target_buffer
2464 .file()
2465 .unwrap()
2466 .as_local()
2467 .unwrap()
2468 .abs_path(cx),
2469 Path::new("/dir/a.rs"),
2470 );
2471 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2472 assert_eq!(
2473 list_worktrees(&project, cx),
2474 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2475 );
2476
2477 drop(definition);
2478 });
2479 cx.update(|cx| {
2480 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2481 });
2482
2483 fn list_worktrees<'a>(
2484 project: &'a Model<Project>,
2485 cx: &'a AppContext,
2486 ) -> Vec<(&'a Path, bool)> {
2487 project
2488 .read(cx)
2489 .worktrees(cx)
2490 .map(|worktree| {
2491 let worktree = worktree.read(cx);
2492 (
2493 worktree.as_local().unwrap().abs_path().as_ref(),
2494 worktree.is_visible(),
2495 )
2496 })
2497 .collect::<Vec<_>>()
2498 }
2499}
2500
2501#[gpui::test]
2502async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2503 init_test(cx);
2504
2505 let fs = FakeFs::new(cx.executor());
2506 fs.insert_tree(
2507 "/dir",
2508 json!({
2509 "a.ts": "",
2510 }),
2511 )
2512 .await;
2513
2514 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2515
2516 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2517 language_registry.add(typescript_lang());
2518 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2519 "TypeScript",
2520 FakeLspAdapter {
2521 capabilities: lsp::ServerCapabilities {
2522 completion_provider: Some(lsp::CompletionOptions {
2523 trigger_characters: Some(vec![":".to_string()]),
2524 ..Default::default()
2525 }),
2526 ..Default::default()
2527 },
2528 ..Default::default()
2529 },
2530 );
2531
2532 let buffer = project
2533 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2534 .await
2535 .unwrap();
2536
2537 let fake_server = fake_language_servers.next().await.unwrap();
2538
2539 let text = "let a = b.fqn";
2540 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2541 let completions = project.update(cx, |project, cx| {
2542 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2543 });
2544
2545 fake_server
2546 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2547 Ok(Some(lsp::CompletionResponse::Array(vec![
2548 lsp::CompletionItem {
2549 label: "fullyQualifiedName?".into(),
2550 insert_text: Some("fullyQualifiedName".into()),
2551 ..Default::default()
2552 },
2553 ])))
2554 })
2555 .next()
2556 .await;
2557 let completions = completions.await.unwrap();
2558 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2559 assert_eq!(completions.len(), 1);
2560 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2561 assert_eq!(
2562 completions[0].old_range.to_offset(&snapshot),
2563 text.len() - 3..text.len()
2564 );
2565
2566 let text = "let a = \"atoms/cmp\"";
2567 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2568 let completions = project.update(cx, |project, cx| {
2569 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2570 });
2571
2572 fake_server
2573 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2574 Ok(Some(lsp::CompletionResponse::Array(vec![
2575 lsp::CompletionItem {
2576 label: "component".into(),
2577 ..Default::default()
2578 },
2579 ])))
2580 })
2581 .next()
2582 .await;
2583 let completions = completions.await.unwrap();
2584 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2585 assert_eq!(completions.len(), 1);
2586 assert_eq!(completions[0].new_text, "component");
2587 assert_eq!(
2588 completions[0].old_range.to_offset(&snapshot),
2589 text.len() - 4..text.len() - 1
2590 );
2591}
2592
2593#[gpui::test]
2594async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2595 init_test(cx);
2596
2597 let fs = FakeFs::new(cx.executor());
2598 fs.insert_tree(
2599 "/dir",
2600 json!({
2601 "a.ts": "",
2602 }),
2603 )
2604 .await;
2605
2606 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2607
2608 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2609 language_registry.add(typescript_lang());
2610 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2611 "TypeScript",
2612 FakeLspAdapter {
2613 capabilities: lsp::ServerCapabilities {
2614 completion_provider: Some(lsp::CompletionOptions {
2615 trigger_characters: Some(vec![":".to_string()]),
2616 ..Default::default()
2617 }),
2618 ..Default::default()
2619 },
2620 ..Default::default()
2621 },
2622 );
2623
2624 let buffer = project
2625 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2626 .await
2627 .unwrap();
2628
2629 let fake_server = fake_language_servers.next().await.unwrap();
2630
2631 let text = "let a = b.fqn";
2632 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2633 let completions = project.update(cx, |project, cx| {
2634 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2635 });
2636
2637 fake_server
2638 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2639 Ok(Some(lsp::CompletionResponse::Array(vec![
2640 lsp::CompletionItem {
2641 label: "fullyQualifiedName?".into(),
2642 insert_text: Some("fully\rQualified\r\nName".into()),
2643 ..Default::default()
2644 },
2645 ])))
2646 })
2647 .next()
2648 .await;
2649 let completions = completions.await.unwrap();
2650 assert_eq!(completions.len(), 1);
2651 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2652}
2653
2654#[gpui::test(iterations = 10)]
2655async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2656 init_test(cx);
2657
2658 let fs = FakeFs::new(cx.executor());
2659 fs.insert_tree(
2660 "/dir",
2661 json!({
2662 "a.ts": "a",
2663 }),
2664 )
2665 .await;
2666
2667 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2668
2669 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2670 language_registry.add(typescript_lang());
2671 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2672 "TypeScript",
2673 FakeLspAdapter {
2674 capabilities: lsp::ServerCapabilities {
2675 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2676 lsp::CodeActionOptions {
2677 resolve_provider: Some(true),
2678 ..lsp::CodeActionOptions::default()
2679 },
2680 )),
2681 ..lsp::ServerCapabilities::default()
2682 },
2683 ..FakeLspAdapter::default()
2684 },
2685 );
2686
2687 let buffer = project
2688 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2689 .await
2690 .unwrap();
2691
2692 let fake_server = fake_language_servers.next().await.unwrap();
2693
2694 // Language server returns code actions that contain commands, and not edits.
2695 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2696 fake_server
2697 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2698 Ok(Some(vec![
2699 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2700 title: "The code action".into(),
2701 data: Some(serde_json::json!({
2702 "command": "_the/command",
2703 })),
2704 ..lsp::CodeAction::default()
2705 }),
2706 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2707 title: "two".into(),
2708 ..lsp::CodeAction::default()
2709 }),
2710 ]))
2711 })
2712 .next()
2713 .await;
2714
2715 let action = actions.await[0].clone();
2716 let apply = project.update(cx, |project, cx| {
2717 project.apply_code_action(buffer.clone(), action, true, cx)
2718 });
2719
2720 // Resolving the code action does not populate its edits. In absence of
2721 // edits, we must execute the given command.
2722 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2723 |mut action, _| async move {
2724 if action.data.is_some() {
2725 action.command = Some(lsp::Command {
2726 title: "The command".into(),
2727 command: "_the/command".into(),
2728 arguments: Some(vec![json!("the-argument")]),
2729 });
2730 }
2731 Ok(action)
2732 },
2733 );
2734
2735 // While executing the command, the language server sends the editor
2736 // a `workspaceEdit` request.
2737 fake_server
2738 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2739 let fake = fake_server.clone();
2740 move |params, _| {
2741 assert_eq!(params.command, "_the/command");
2742 let fake = fake.clone();
2743 async move {
2744 fake.server
2745 .request::<lsp::request::ApplyWorkspaceEdit>(
2746 lsp::ApplyWorkspaceEditParams {
2747 label: None,
2748 edit: lsp::WorkspaceEdit {
2749 changes: Some(
2750 [(
2751 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2752 vec![lsp::TextEdit {
2753 range: lsp::Range::new(
2754 lsp::Position::new(0, 0),
2755 lsp::Position::new(0, 0),
2756 ),
2757 new_text: "X".into(),
2758 }],
2759 )]
2760 .into_iter()
2761 .collect(),
2762 ),
2763 ..Default::default()
2764 },
2765 },
2766 )
2767 .await
2768 .unwrap();
2769 Ok(Some(json!(null)))
2770 }
2771 }
2772 })
2773 .next()
2774 .await;
2775
2776 // Applying the code action returns a project transaction containing the edits
2777 // sent by the language server in its `workspaceEdit` request.
2778 let transaction = apply.await.unwrap();
2779 assert!(transaction.0.contains_key(&buffer));
2780 buffer.update(cx, |buffer, cx| {
2781 assert_eq!(buffer.text(), "Xa");
2782 buffer.undo(cx);
2783 assert_eq!(buffer.text(), "a");
2784 });
2785}
2786
2787#[gpui::test(iterations = 10)]
2788async fn test_save_file(cx: &mut gpui::TestAppContext) {
2789 init_test(cx);
2790
2791 let fs = FakeFs::new(cx.executor());
2792 fs.insert_tree(
2793 "/dir",
2794 json!({
2795 "file1": "the old contents",
2796 }),
2797 )
2798 .await;
2799
2800 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2801 let buffer = project
2802 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2803 .await
2804 .unwrap();
2805 buffer.update(cx, |buffer, cx| {
2806 assert_eq!(buffer.text(), "the old contents");
2807 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2808 });
2809
2810 project
2811 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2812 .await
2813 .unwrap();
2814
2815 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2816 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2817}
2818
2819#[gpui::test(iterations = 30)]
2820async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2821 init_test(cx);
2822
2823 let fs = FakeFs::new(cx.executor().clone());
2824 fs.insert_tree(
2825 "/dir",
2826 json!({
2827 "file1": "the original contents",
2828 }),
2829 )
2830 .await;
2831
2832 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2833 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2834 let buffer = project
2835 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2836 .await
2837 .unwrap();
2838
2839 // Simulate buffer diffs being slow, so that they don't complete before
2840 // the next file change occurs.
2841 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2842
2843 // Change the buffer's file on disk, and then wait for the file change
2844 // to be detected by the worktree, so that the buffer starts reloading.
2845 fs.save(
2846 "/dir/file1".as_ref(),
2847 &"the first contents".into(),
2848 Default::default(),
2849 )
2850 .await
2851 .unwrap();
2852 worktree.next_event(cx).await;
2853
2854 // Change the buffer's file again. Depending on the random seed, the
2855 // previous file change may still be in progress.
2856 fs.save(
2857 "/dir/file1".as_ref(),
2858 &"the second contents".into(),
2859 Default::default(),
2860 )
2861 .await
2862 .unwrap();
2863 worktree.next_event(cx).await;
2864
2865 cx.executor().run_until_parked();
2866 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2867 buffer.read_with(cx, |buffer, _| {
2868 assert_eq!(buffer.text(), on_disk_text);
2869 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2870 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2871 });
2872}
2873
2874#[gpui::test(iterations = 30)]
2875async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2876 init_test(cx);
2877
2878 let fs = FakeFs::new(cx.executor().clone());
2879 fs.insert_tree(
2880 "/dir",
2881 json!({
2882 "file1": "the original contents",
2883 }),
2884 )
2885 .await;
2886
2887 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2888 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2889 let buffer = project
2890 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2891 .await
2892 .unwrap();
2893
2894 // Simulate buffer diffs being slow, so that they don't complete before
2895 // the next file change occurs.
2896 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2897
2898 // Change the buffer's file on disk, and then wait for the file change
2899 // to be detected by the worktree, so that the buffer starts reloading.
2900 fs.save(
2901 "/dir/file1".as_ref(),
2902 &"the first contents".into(),
2903 Default::default(),
2904 )
2905 .await
2906 .unwrap();
2907 worktree.next_event(cx).await;
2908
2909 cx.executor()
2910 .spawn(cx.executor().simulate_random_delay())
2911 .await;
2912
2913 // Perform a noop edit, causing the buffer's version to increase.
2914 buffer.update(cx, |buffer, cx| {
2915 buffer.edit([(0..0, " ")], None, cx);
2916 buffer.undo(cx);
2917 });
2918
2919 cx.executor().run_until_parked();
2920 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2921 buffer.read_with(cx, |buffer, _| {
2922 let buffer_text = buffer.text();
2923 if buffer_text == on_disk_text {
2924 assert!(
2925 !buffer.is_dirty() && !buffer.has_conflict(),
2926 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2927 );
2928 }
2929 // If the file change occurred while the buffer was processing the first
2930 // change, the buffer will be in a conflicting state.
2931 else {
2932 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2933 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2934 }
2935 });
2936}
2937
2938#[gpui::test]
2939async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2940 init_test(cx);
2941
2942 let fs = FakeFs::new(cx.executor());
2943 fs.insert_tree(
2944 "/dir",
2945 json!({
2946 "file1": "the old contents",
2947 }),
2948 )
2949 .await;
2950
2951 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2952 let buffer = project
2953 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2954 .await
2955 .unwrap();
2956 buffer.update(cx, |buffer, cx| {
2957 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2958 });
2959
2960 project
2961 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2962 .await
2963 .unwrap();
2964
2965 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2966 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2967}
2968
2969#[gpui::test]
2970async fn test_save_as(cx: &mut gpui::TestAppContext) {
2971 init_test(cx);
2972
2973 let fs = FakeFs::new(cx.executor());
2974 fs.insert_tree("/dir", json!({})).await;
2975
2976 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2977
2978 let languages = project.update(cx, |project, _| project.languages().clone());
2979 languages.add(rust_lang());
2980
2981 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2982 buffer.update(cx, |buffer, cx| {
2983 buffer.edit([(0..0, "abc")], None, cx);
2984 assert!(buffer.is_dirty());
2985 assert!(!buffer.has_conflict());
2986 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2987 });
2988 project
2989 .update(cx, |project, cx| {
2990 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
2991 let path = ProjectPath {
2992 worktree_id,
2993 path: Arc::from(Path::new("file1.rs")),
2994 };
2995 project.save_buffer_as(buffer.clone(), path, cx)
2996 })
2997 .await
2998 .unwrap();
2999 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3000
3001 cx.executor().run_until_parked();
3002 buffer.update(cx, |buffer, cx| {
3003 assert_eq!(
3004 buffer.file().unwrap().full_path(cx),
3005 Path::new("dir/file1.rs")
3006 );
3007 assert!(!buffer.is_dirty());
3008 assert!(!buffer.has_conflict());
3009 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
3010 });
3011
3012 let opened_buffer = project
3013 .update(cx, |project, cx| {
3014 project.open_local_buffer("/dir/file1.rs", cx)
3015 })
3016 .await
3017 .unwrap();
3018 assert_eq!(opened_buffer, buffer);
3019}
3020
3021#[gpui::test(retries = 5)]
3022async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3023 use worktree::WorktreeModelHandle as _;
3024
3025 init_test(cx);
3026 cx.executor().allow_parking();
3027
3028 let dir = temp_tree(json!({
3029 "a": {
3030 "file1": "",
3031 "file2": "",
3032 "file3": "",
3033 },
3034 "b": {
3035 "c": {
3036 "file4": "",
3037 "file5": "",
3038 }
3039 }
3040 }));
3041
3042 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3043
3044 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3045 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3046 async move { buffer.await.unwrap() }
3047 };
3048 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3049 project.update(cx, |project, cx| {
3050 let tree = project.worktrees(cx).next().unwrap();
3051 tree.read(cx)
3052 .entry_for_path(path)
3053 .unwrap_or_else(|| panic!("no entry for path {}", path))
3054 .id
3055 })
3056 };
3057
3058 let buffer2 = buffer_for_path("a/file2", cx).await;
3059 let buffer3 = buffer_for_path("a/file3", cx).await;
3060 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3061 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3062
3063 let file2_id = id_for_path("a/file2", cx);
3064 let file3_id = id_for_path("a/file3", cx);
3065 let file4_id = id_for_path("b/c/file4", cx);
3066
3067 // Create a remote copy of this worktree.
3068 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3069 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3070
3071 let updates = Arc::new(Mutex::new(Vec::new()));
3072 tree.update(cx, |tree, cx| {
3073 let updates = updates.clone();
3074 tree.observe_updates(0, cx, move |update| {
3075 updates.lock().push(update);
3076 async { true }
3077 });
3078 });
3079
3080 let remote =
3081 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3082
3083 cx.executor().run_until_parked();
3084
3085 cx.update(|cx| {
3086 assert!(!buffer2.read(cx).is_dirty());
3087 assert!(!buffer3.read(cx).is_dirty());
3088 assert!(!buffer4.read(cx).is_dirty());
3089 assert!(!buffer5.read(cx).is_dirty());
3090 });
3091
3092 // Rename and delete files and directories.
3093 tree.flush_fs_events(cx).await;
3094 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3095 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3096 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3097 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3098 tree.flush_fs_events(cx).await;
3099
3100 let expected_paths = vec![
3101 "a",
3102 "a/file1",
3103 "a/file2.new",
3104 "b",
3105 "d",
3106 "d/file3",
3107 "d/file4",
3108 ];
3109
3110 cx.update(|app| {
3111 assert_eq!(
3112 tree.read(app)
3113 .paths()
3114 .map(|p| p.to_str().unwrap())
3115 .collect::<Vec<_>>(),
3116 expected_paths
3117 );
3118 });
3119
3120 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3121 assert_eq!(id_for_path("d/file3", cx), file3_id);
3122 assert_eq!(id_for_path("d/file4", cx), file4_id);
3123
3124 cx.update(|cx| {
3125 assert_eq!(
3126 buffer2.read(cx).file().unwrap().path().as_ref(),
3127 Path::new("a/file2.new")
3128 );
3129 assert_eq!(
3130 buffer3.read(cx).file().unwrap().path().as_ref(),
3131 Path::new("d/file3")
3132 );
3133 assert_eq!(
3134 buffer4.read(cx).file().unwrap().path().as_ref(),
3135 Path::new("d/file4")
3136 );
3137 assert_eq!(
3138 buffer5.read(cx).file().unwrap().path().as_ref(),
3139 Path::new("b/c/file5")
3140 );
3141
3142 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3143 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3144 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3145 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3146 });
3147
3148 // Update the remote worktree. Check that it becomes consistent with the
3149 // local worktree.
3150 cx.executor().run_until_parked();
3151
3152 remote.update(cx, |remote, _| {
3153 for update in updates.lock().drain(..) {
3154 remote.as_remote_mut().unwrap().update_from_remote(update);
3155 }
3156 });
3157 cx.executor().run_until_parked();
3158 remote.update(cx, |remote, _| {
3159 assert_eq!(
3160 remote
3161 .paths()
3162 .map(|p| p.to_str().unwrap())
3163 .collect::<Vec<_>>(),
3164 expected_paths
3165 );
3166 });
3167}
3168
3169#[gpui::test(iterations = 10)]
3170async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3171 init_test(cx);
3172
3173 let fs = FakeFs::new(cx.executor());
3174 fs.insert_tree(
3175 "/dir",
3176 json!({
3177 "a": {
3178 "file1": "",
3179 }
3180 }),
3181 )
3182 .await;
3183
3184 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3185 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3186 let tree_id = tree.update(cx, |tree, _| tree.id());
3187
3188 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3189 project.update(cx, |project, cx| {
3190 let tree = project.worktrees(cx).next().unwrap();
3191 tree.read(cx)
3192 .entry_for_path(path)
3193 .unwrap_or_else(|| panic!("no entry for path {}", path))
3194 .id
3195 })
3196 };
3197
3198 let dir_id = id_for_path("a", cx);
3199 let file_id = id_for_path("a/file1", cx);
3200 let buffer = project
3201 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3202 .await
3203 .unwrap();
3204 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3205
3206 project
3207 .update(cx, |project, cx| {
3208 project.rename_entry(dir_id, Path::new("b"), cx)
3209 })
3210 .unwrap()
3211 .await
3212 .to_included()
3213 .unwrap();
3214 cx.executor().run_until_parked();
3215
3216 assert_eq!(id_for_path("b", cx), dir_id);
3217 assert_eq!(id_for_path("b/file1", cx), file_id);
3218 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3219}
3220
3221#[gpui::test]
3222async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3223 init_test(cx);
3224
3225 let fs = FakeFs::new(cx.executor());
3226 fs.insert_tree(
3227 "/dir",
3228 json!({
3229 "a.txt": "a-contents",
3230 "b.txt": "b-contents",
3231 }),
3232 )
3233 .await;
3234
3235 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3236
3237 // Spawn multiple tasks to open paths, repeating some paths.
3238 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3239 (
3240 p.open_local_buffer("/dir/a.txt", cx),
3241 p.open_local_buffer("/dir/b.txt", cx),
3242 p.open_local_buffer("/dir/a.txt", cx),
3243 )
3244 });
3245
3246 let buffer_a_1 = buffer_a_1.await.unwrap();
3247 let buffer_a_2 = buffer_a_2.await.unwrap();
3248 let buffer_b = buffer_b.await.unwrap();
3249 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3250 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3251
3252 // There is only one buffer per path.
3253 let buffer_a_id = buffer_a_1.entity_id();
3254 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3255
3256 // Open the same path again while it is still open.
3257 drop(buffer_a_1);
3258 let buffer_a_3 = project
3259 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3260 .await
3261 .unwrap();
3262
3263 // There's still only one buffer per path.
3264 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3265}
3266
3267#[gpui::test]
3268async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3269 init_test(cx);
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(
3273 "/dir",
3274 json!({
3275 "file1": "abc",
3276 "file2": "def",
3277 "file3": "ghi",
3278 }),
3279 )
3280 .await;
3281
3282 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3283
3284 let buffer1 = project
3285 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3286 .await
3287 .unwrap();
3288 let events = Arc::new(Mutex::new(Vec::new()));
3289
3290 // initially, the buffer isn't dirty.
3291 buffer1.update(cx, |buffer, cx| {
3292 cx.subscribe(&buffer1, {
3293 let events = events.clone();
3294 move |_, _, event, _| match event {
3295 BufferEvent::Operation(_) => {}
3296 _ => events.lock().push(event.clone()),
3297 }
3298 })
3299 .detach();
3300
3301 assert!(!buffer.is_dirty());
3302 assert!(events.lock().is_empty());
3303
3304 buffer.edit([(1..2, "")], None, cx);
3305 });
3306
3307 // after the first edit, the buffer is dirty, and emits a dirtied event.
3308 buffer1.update(cx, |buffer, cx| {
3309 assert!(buffer.text() == "ac");
3310 assert!(buffer.is_dirty());
3311 assert_eq!(
3312 *events.lock(),
3313 &[language::Event::Edited, language::Event::DirtyChanged]
3314 );
3315 events.lock().clear();
3316 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3317 });
3318
3319 // after saving, the buffer is not dirty, and emits a saved event.
3320 buffer1.update(cx, |buffer, cx| {
3321 assert!(!buffer.is_dirty());
3322 assert_eq!(*events.lock(), &[language::Event::Saved]);
3323 events.lock().clear();
3324
3325 buffer.edit([(1..1, "B")], None, cx);
3326 buffer.edit([(2..2, "D")], None, cx);
3327 });
3328
3329 // after editing again, the buffer is dirty, and emits another dirty event.
3330 buffer1.update(cx, |buffer, cx| {
3331 assert!(buffer.text() == "aBDc");
3332 assert!(buffer.is_dirty());
3333 assert_eq!(
3334 *events.lock(),
3335 &[
3336 language::Event::Edited,
3337 language::Event::DirtyChanged,
3338 language::Event::Edited,
3339 ],
3340 );
3341 events.lock().clear();
3342
3343 // After restoring the buffer to its previously-saved state,
3344 // the buffer is not considered dirty anymore.
3345 buffer.edit([(1..3, "")], None, cx);
3346 assert!(buffer.text() == "ac");
3347 assert!(!buffer.is_dirty());
3348 });
3349
3350 assert_eq!(
3351 *events.lock(),
3352 &[language::Event::Edited, language::Event::DirtyChanged]
3353 );
3354
3355 // When a file is deleted, the buffer is considered dirty.
3356 let events = Arc::new(Mutex::new(Vec::new()));
3357 let buffer2 = project
3358 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3359 .await
3360 .unwrap();
3361 buffer2.update(cx, |_, cx| {
3362 cx.subscribe(&buffer2, {
3363 let events = events.clone();
3364 move |_, _, event, _| events.lock().push(event.clone())
3365 })
3366 .detach();
3367 });
3368
3369 fs.remove_file("/dir/file2".as_ref(), Default::default())
3370 .await
3371 .unwrap();
3372 cx.executor().run_until_parked();
3373 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3374 assert_eq!(
3375 *events.lock(),
3376 &[
3377 language::Event::DirtyChanged,
3378 language::Event::FileHandleChanged
3379 ]
3380 );
3381
3382 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3383 let events = Arc::new(Mutex::new(Vec::new()));
3384 let buffer3 = project
3385 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3386 .await
3387 .unwrap();
3388 buffer3.update(cx, |_, cx| {
3389 cx.subscribe(&buffer3, {
3390 let events = events.clone();
3391 move |_, _, event, _| events.lock().push(event.clone())
3392 })
3393 .detach();
3394 });
3395
3396 buffer3.update(cx, |buffer, cx| {
3397 buffer.edit([(0..0, "x")], None, cx);
3398 });
3399 events.lock().clear();
3400 fs.remove_file("/dir/file3".as_ref(), Default::default())
3401 .await
3402 .unwrap();
3403 cx.executor().run_until_parked();
3404 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3405 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3406}
3407
3408#[gpui::test]
3409async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3410 init_test(cx);
3411
3412 let initial_contents = "aaa\nbbbbb\nc\n";
3413 let fs = FakeFs::new(cx.executor());
3414 fs.insert_tree(
3415 "/dir",
3416 json!({
3417 "the-file": initial_contents,
3418 }),
3419 )
3420 .await;
3421 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3422 let buffer = project
3423 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3424 .await
3425 .unwrap();
3426
3427 let anchors = (0..3)
3428 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3429 .collect::<Vec<_>>();
3430
3431 // Change the file on disk, adding two new lines of text, and removing
3432 // one line.
3433 buffer.update(cx, |buffer, _| {
3434 assert!(!buffer.is_dirty());
3435 assert!(!buffer.has_conflict());
3436 });
3437 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3438 fs.save(
3439 "/dir/the-file".as_ref(),
3440 &new_contents.into(),
3441 LineEnding::Unix,
3442 )
3443 .await
3444 .unwrap();
3445
3446 // Because the buffer was not modified, it is reloaded from disk. Its
3447 // contents are edited according to the diff between the old and new
3448 // file contents.
3449 cx.executor().run_until_parked();
3450 buffer.update(cx, |buffer, _| {
3451 assert_eq!(buffer.text(), new_contents);
3452 assert!(!buffer.is_dirty());
3453 assert!(!buffer.has_conflict());
3454
3455 let anchor_positions = anchors
3456 .iter()
3457 .map(|anchor| anchor.to_point(&*buffer))
3458 .collect::<Vec<_>>();
3459 assert_eq!(
3460 anchor_positions,
3461 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3462 );
3463 });
3464
3465 // Modify the buffer
3466 buffer.update(cx, |buffer, cx| {
3467 buffer.edit([(0..0, " ")], None, cx);
3468 assert!(buffer.is_dirty());
3469 assert!(!buffer.has_conflict());
3470 });
3471
3472 // Change the file on disk again, adding blank lines to the beginning.
3473 fs.save(
3474 "/dir/the-file".as_ref(),
3475 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3476 LineEnding::Unix,
3477 )
3478 .await
3479 .unwrap();
3480
3481 // Because the buffer is modified, it doesn't reload from disk, but is
3482 // marked as having a conflict.
3483 cx.executor().run_until_parked();
3484 buffer.update(cx, |buffer, _| {
3485 assert!(buffer.has_conflict());
3486 });
3487}
3488
3489#[gpui::test]
3490async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3491 init_test(cx);
3492
3493 let fs = FakeFs::new(cx.executor());
3494 fs.insert_tree(
3495 "/dir",
3496 json!({
3497 "file1": "a\nb\nc\n",
3498 "file2": "one\r\ntwo\r\nthree\r\n",
3499 }),
3500 )
3501 .await;
3502
3503 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3504 let buffer1 = project
3505 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3506 .await
3507 .unwrap();
3508 let buffer2 = project
3509 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3510 .await
3511 .unwrap();
3512
3513 buffer1.update(cx, |buffer, _| {
3514 assert_eq!(buffer.text(), "a\nb\nc\n");
3515 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3516 });
3517 buffer2.update(cx, |buffer, _| {
3518 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3519 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3520 });
3521
3522 // Change a file's line endings on disk from unix to windows. The buffer's
3523 // state updates correctly.
3524 fs.save(
3525 "/dir/file1".as_ref(),
3526 &"aaa\nb\nc\n".into(),
3527 LineEnding::Windows,
3528 )
3529 .await
3530 .unwrap();
3531 cx.executor().run_until_parked();
3532 buffer1.update(cx, |buffer, _| {
3533 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3534 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3535 });
3536
3537 // Save a file with windows line endings. The file is written correctly.
3538 buffer2.update(cx, |buffer, cx| {
3539 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3540 });
3541 project
3542 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3543 .await
3544 .unwrap();
3545 assert_eq!(
3546 fs.load("/dir/file2".as_ref()).await.unwrap(),
3547 "one\r\ntwo\r\nthree\r\nfour\r\n",
3548 );
3549}
3550
3551#[gpui::test]
3552async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3553 init_test(cx);
3554
3555 let fs = FakeFs::new(cx.executor());
3556 fs.insert_tree(
3557 "/the-dir",
3558 json!({
3559 "a.rs": "
3560 fn foo(mut v: Vec<usize>) {
3561 for x in &v {
3562 v.push(1);
3563 }
3564 }
3565 "
3566 .unindent(),
3567 }),
3568 )
3569 .await;
3570
3571 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3572 let buffer = project
3573 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3574 .await
3575 .unwrap();
3576
3577 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3578 let message = lsp::PublishDiagnosticsParams {
3579 uri: buffer_uri.clone(),
3580 diagnostics: vec![
3581 lsp::Diagnostic {
3582 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3583 severity: Some(DiagnosticSeverity::WARNING),
3584 message: "error 1".to_string(),
3585 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3586 location: lsp::Location {
3587 uri: buffer_uri.clone(),
3588 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3589 },
3590 message: "error 1 hint 1".to_string(),
3591 }]),
3592 ..Default::default()
3593 },
3594 lsp::Diagnostic {
3595 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3596 severity: Some(DiagnosticSeverity::HINT),
3597 message: "error 1 hint 1".to_string(),
3598 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3599 location: lsp::Location {
3600 uri: buffer_uri.clone(),
3601 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3602 },
3603 message: "original diagnostic".to_string(),
3604 }]),
3605 ..Default::default()
3606 },
3607 lsp::Diagnostic {
3608 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3609 severity: Some(DiagnosticSeverity::ERROR),
3610 message: "error 2".to_string(),
3611 related_information: Some(vec![
3612 lsp::DiagnosticRelatedInformation {
3613 location: lsp::Location {
3614 uri: buffer_uri.clone(),
3615 range: lsp::Range::new(
3616 lsp::Position::new(1, 13),
3617 lsp::Position::new(1, 15),
3618 ),
3619 },
3620 message: "error 2 hint 1".to_string(),
3621 },
3622 lsp::DiagnosticRelatedInformation {
3623 location: lsp::Location {
3624 uri: buffer_uri.clone(),
3625 range: lsp::Range::new(
3626 lsp::Position::new(1, 13),
3627 lsp::Position::new(1, 15),
3628 ),
3629 },
3630 message: "error 2 hint 2".to_string(),
3631 },
3632 ]),
3633 ..Default::default()
3634 },
3635 lsp::Diagnostic {
3636 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3637 severity: Some(DiagnosticSeverity::HINT),
3638 message: "error 2 hint 1".to_string(),
3639 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3640 location: lsp::Location {
3641 uri: buffer_uri.clone(),
3642 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3643 },
3644 message: "original diagnostic".to_string(),
3645 }]),
3646 ..Default::default()
3647 },
3648 lsp::Diagnostic {
3649 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3650 severity: Some(DiagnosticSeverity::HINT),
3651 message: "error 2 hint 2".to_string(),
3652 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3653 location: lsp::Location {
3654 uri: buffer_uri,
3655 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3656 },
3657 message: "original diagnostic".to_string(),
3658 }]),
3659 ..Default::default()
3660 },
3661 ],
3662 version: None,
3663 };
3664
3665 project
3666 .update(cx, |p, cx| {
3667 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3668 })
3669 .unwrap();
3670 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3671
3672 assert_eq!(
3673 buffer
3674 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3675 .collect::<Vec<_>>(),
3676 &[
3677 DiagnosticEntry {
3678 range: Point::new(1, 8)..Point::new(1, 9),
3679 diagnostic: Diagnostic {
3680 severity: DiagnosticSeverity::WARNING,
3681 message: "error 1".to_string(),
3682 group_id: 1,
3683 is_primary: true,
3684 ..Default::default()
3685 }
3686 },
3687 DiagnosticEntry {
3688 range: Point::new(1, 8)..Point::new(1, 9),
3689 diagnostic: Diagnostic {
3690 severity: DiagnosticSeverity::HINT,
3691 message: "error 1 hint 1".to_string(),
3692 group_id: 1,
3693 is_primary: false,
3694 ..Default::default()
3695 }
3696 },
3697 DiagnosticEntry {
3698 range: Point::new(1, 13)..Point::new(1, 15),
3699 diagnostic: Diagnostic {
3700 severity: DiagnosticSeverity::HINT,
3701 message: "error 2 hint 1".to_string(),
3702 group_id: 0,
3703 is_primary: false,
3704 ..Default::default()
3705 }
3706 },
3707 DiagnosticEntry {
3708 range: Point::new(1, 13)..Point::new(1, 15),
3709 diagnostic: Diagnostic {
3710 severity: DiagnosticSeverity::HINT,
3711 message: "error 2 hint 2".to_string(),
3712 group_id: 0,
3713 is_primary: false,
3714 ..Default::default()
3715 }
3716 },
3717 DiagnosticEntry {
3718 range: Point::new(2, 8)..Point::new(2, 17),
3719 diagnostic: Diagnostic {
3720 severity: DiagnosticSeverity::ERROR,
3721 message: "error 2".to_string(),
3722 group_id: 0,
3723 is_primary: true,
3724 ..Default::default()
3725 }
3726 }
3727 ]
3728 );
3729
3730 assert_eq!(
3731 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3732 &[
3733 DiagnosticEntry {
3734 range: Point::new(1, 13)..Point::new(1, 15),
3735 diagnostic: Diagnostic {
3736 severity: DiagnosticSeverity::HINT,
3737 message: "error 2 hint 1".to_string(),
3738 group_id: 0,
3739 is_primary: false,
3740 ..Default::default()
3741 }
3742 },
3743 DiagnosticEntry {
3744 range: Point::new(1, 13)..Point::new(1, 15),
3745 diagnostic: Diagnostic {
3746 severity: DiagnosticSeverity::HINT,
3747 message: "error 2 hint 2".to_string(),
3748 group_id: 0,
3749 is_primary: false,
3750 ..Default::default()
3751 }
3752 },
3753 DiagnosticEntry {
3754 range: Point::new(2, 8)..Point::new(2, 17),
3755 diagnostic: Diagnostic {
3756 severity: DiagnosticSeverity::ERROR,
3757 message: "error 2".to_string(),
3758 group_id: 0,
3759 is_primary: true,
3760 ..Default::default()
3761 }
3762 }
3763 ]
3764 );
3765
3766 assert_eq!(
3767 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3768 &[
3769 DiagnosticEntry {
3770 range: Point::new(1, 8)..Point::new(1, 9),
3771 diagnostic: Diagnostic {
3772 severity: DiagnosticSeverity::WARNING,
3773 message: "error 1".to_string(),
3774 group_id: 1,
3775 is_primary: true,
3776 ..Default::default()
3777 }
3778 },
3779 DiagnosticEntry {
3780 range: Point::new(1, 8)..Point::new(1, 9),
3781 diagnostic: Diagnostic {
3782 severity: DiagnosticSeverity::HINT,
3783 message: "error 1 hint 1".to_string(),
3784 group_id: 1,
3785 is_primary: false,
3786 ..Default::default()
3787 }
3788 },
3789 ]
3790 );
3791}
3792
3793#[gpui::test]
3794async fn test_rename(cx: &mut gpui::TestAppContext) {
3795 // hi
3796 init_test(cx);
3797
3798 let fs = FakeFs::new(cx.executor());
3799 fs.insert_tree(
3800 "/dir",
3801 json!({
3802 "one.rs": "const ONE: usize = 1;",
3803 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3804 }),
3805 )
3806 .await;
3807
3808 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3809
3810 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3811 language_registry.add(rust_lang());
3812 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3813 "Rust",
3814 FakeLspAdapter {
3815 capabilities: lsp::ServerCapabilities {
3816 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3817 prepare_provider: Some(true),
3818 work_done_progress_options: Default::default(),
3819 })),
3820 ..Default::default()
3821 },
3822 ..Default::default()
3823 },
3824 );
3825
3826 let buffer = project
3827 .update(cx, |project, cx| {
3828 project.open_local_buffer("/dir/one.rs", cx)
3829 })
3830 .await
3831 .unwrap();
3832
3833 let fake_server = fake_servers.next().await.unwrap();
3834
3835 let response = project.update(cx, |project, cx| {
3836 project.prepare_rename(buffer.clone(), 7, cx)
3837 });
3838 fake_server
3839 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3840 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3841 assert_eq!(params.position, lsp::Position::new(0, 7));
3842 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3843 lsp::Position::new(0, 6),
3844 lsp::Position::new(0, 9),
3845 ))))
3846 })
3847 .next()
3848 .await
3849 .unwrap();
3850 let range = response.await.unwrap().unwrap();
3851 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3852 assert_eq!(range, 6..9);
3853
3854 let response = project.update(cx, |project, cx| {
3855 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3856 });
3857 fake_server
3858 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3859 assert_eq!(
3860 params.text_document_position.text_document.uri.as_str(),
3861 "file:///dir/one.rs"
3862 );
3863 assert_eq!(
3864 params.text_document_position.position,
3865 lsp::Position::new(0, 7)
3866 );
3867 assert_eq!(params.new_name, "THREE");
3868 Ok(Some(lsp::WorkspaceEdit {
3869 changes: Some(
3870 [
3871 (
3872 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3873 vec![lsp::TextEdit::new(
3874 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3875 "THREE".to_string(),
3876 )],
3877 ),
3878 (
3879 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3880 vec![
3881 lsp::TextEdit::new(
3882 lsp::Range::new(
3883 lsp::Position::new(0, 24),
3884 lsp::Position::new(0, 27),
3885 ),
3886 "THREE".to_string(),
3887 ),
3888 lsp::TextEdit::new(
3889 lsp::Range::new(
3890 lsp::Position::new(0, 35),
3891 lsp::Position::new(0, 38),
3892 ),
3893 "THREE".to_string(),
3894 ),
3895 ],
3896 ),
3897 ]
3898 .into_iter()
3899 .collect(),
3900 ),
3901 ..Default::default()
3902 }))
3903 })
3904 .next()
3905 .await
3906 .unwrap();
3907 let mut transaction = response.await.unwrap().0;
3908 assert_eq!(transaction.len(), 2);
3909 assert_eq!(
3910 transaction
3911 .remove_entry(&buffer)
3912 .unwrap()
3913 .0
3914 .update(cx, |buffer, _| buffer.text()),
3915 "const THREE: usize = 1;"
3916 );
3917 assert_eq!(
3918 transaction
3919 .into_keys()
3920 .next()
3921 .unwrap()
3922 .update(cx, |buffer, _| buffer.text()),
3923 "const TWO: usize = one::THREE + one::THREE;"
3924 );
3925}
3926
3927#[gpui::test]
3928async fn test_search(cx: &mut gpui::TestAppContext) {
3929 init_test(cx);
3930
3931 let fs = FakeFs::new(cx.executor());
3932 fs.insert_tree(
3933 "/dir",
3934 json!({
3935 "one.rs": "const ONE: usize = 1;",
3936 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3937 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3938 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3939 }),
3940 )
3941 .await;
3942 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3943 assert_eq!(
3944 search(
3945 &project,
3946 SearchQuery::text(
3947 "TWO",
3948 false,
3949 true,
3950 false,
3951 Default::default(),
3952 Default::default(),
3953 None
3954 )
3955 .unwrap(),
3956 cx
3957 )
3958 .await
3959 .unwrap(),
3960 HashMap::from_iter([
3961 ("dir/two.rs".to_string(), vec![6..9]),
3962 ("dir/three.rs".to_string(), vec![37..40])
3963 ])
3964 );
3965
3966 let buffer_4 = project
3967 .update(cx, |project, cx| {
3968 project.open_local_buffer("/dir/four.rs", cx)
3969 })
3970 .await
3971 .unwrap();
3972 buffer_4.update(cx, |buffer, cx| {
3973 let text = "two::TWO";
3974 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3975 });
3976
3977 assert_eq!(
3978 search(
3979 &project,
3980 SearchQuery::text(
3981 "TWO",
3982 false,
3983 true,
3984 false,
3985 Default::default(),
3986 Default::default(),
3987 None,
3988 )
3989 .unwrap(),
3990 cx
3991 )
3992 .await
3993 .unwrap(),
3994 HashMap::from_iter([
3995 ("dir/two.rs".to_string(), vec![6..9]),
3996 ("dir/three.rs".to_string(), vec![37..40]),
3997 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3998 ])
3999 );
4000}
4001
4002#[gpui::test]
4003async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4004 init_test(cx);
4005
4006 let search_query = "file";
4007
4008 let fs = FakeFs::new(cx.executor());
4009 fs.insert_tree(
4010 "/dir",
4011 json!({
4012 "one.rs": r#"// Rust file one"#,
4013 "one.ts": r#"// TypeScript file one"#,
4014 "two.rs": r#"// Rust file two"#,
4015 "two.ts": r#"// TypeScript file two"#,
4016 }),
4017 )
4018 .await;
4019 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4020
4021 assert!(
4022 search(
4023 &project,
4024 SearchQuery::text(
4025 search_query,
4026 false,
4027 true,
4028 false,
4029 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4030 Default::default(),
4031 None
4032 )
4033 .unwrap(),
4034 cx
4035 )
4036 .await
4037 .unwrap()
4038 .is_empty(),
4039 "If no inclusions match, no files should be returned"
4040 );
4041
4042 assert_eq!(
4043 search(
4044 &project,
4045 SearchQuery::text(
4046 search_query,
4047 false,
4048 true,
4049 false,
4050 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4051 Default::default(),
4052 None
4053 )
4054 .unwrap(),
4055 cx
4056 )
4057 .await
4058 .unwrap(),
4059 HashMap::from_iter([
4060 ("dir/one.rs".to_string(), vec![8..12]),
4061 ("dir/two.rs".to_string(), vec![8..12]),
4062 ]),
4063 "Rust only search should give only Rust files"
4064 );
4065
4066 assert_eq!(
4067 search(
4068 &project,
4069 SearchQuery::text(
4070 search_query,
4071 false,
4072 true,
4073 false,
4074
4075 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4076
4077 Default::default(),
4078 None,
4079 ).unwrap(),
4080 cx
4081 )
4082 .await
4083 .unwrap(),
4084 HashMap::from_iter([
4085 ("dir/one.ts".to_string(), vec![14..18]),
4086 ("dir/two.ts".to_string(), vec![14..18]),
4087 ]),
4088 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4089 );
4090
4091 assert_eq!(
4092 search(
4093 &project,
4094 SearchQuery::text(
4095 search_query,
4096 false,
4097 true,
4098 false,
4099
4100 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4101
4102 Default::default(),
4103 None,
4104 ).unwrap(),
4105 cx
4106 )
4107 .await
4108 .unwrap(),
4109 HashMap::from_iter([
4110 ("dir/two.ts".to_string(), vec![14..18]),
4111 ("dir/one.rs".to_string(), vec![8..12]),
4112 ("dir/one.ts".to_string(), vec![14..18]),
4113 ("dir/two.rs".to_string(), vec![8..12]),
4114 ]),
4115 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4116 );
4117}
4118
4119#[gpui::test]
4120async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4121 init_test(cx);
4122
4123 let search_query = "file";
4124
4125 let fs = FakeFs::new(cx.executor());
4126 fs.insert_tree(
4127 "/dir",
4128 json!({
4129 "one.rs": r#"// Rust file one"#,
4130 "one.ts": r#"// TypeScript file one"#,
4131 "two.rs": r#"// Rust file two"#,
4132 "two.ts": r#"// TypeScript file two"#,
4133 }),
4134 )
4135 .await;
4136 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4137
4138 assert_eq!(
4139 search(
4140 &project,
4141 SearchQuery::text(
4142 search_query,
4143 false,
4144 true,
4145 false,
4146 Default::default(),
4147 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4148 None,
4149 )
4150 .unwrap(),
4151 cx
4152 )
4153 .await
4154 .unwrap(),
4155 HashMap::from_iter([
4156 ("dir/one.rs".to_string(), vec![8..12]),
4157 ("dir/one.ts".to_string(), vec![14..18]),
4158 ("dir/two.rs".to_string(), vec![8..12]),
4159 ("dir/two.ts".to_string(), vec![14..18]),
4160 ]),
4161 "If no exclusions match, all files should be returned"
4162 );
4163
4164 assert_eq!(
4165 search(
4166 &project,
4167 SearchQuery::text(
4168 search_query,
4169 false,
4170 true,
4171 false,
4172 Default::default(),
4173 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4174 None,
4175 )
4176 .unwrap(),
4177 cx
4178 )
4179 .await
4180 .unwrap(),
4181 HashMap::from_iter([
4182 ("dir/one.ts".to_string(), vec![14..18]),
4183 ("dir/two.ts".to_string(), vec![14..18]),
4184 ]),
4185 "Rust exclusion search should give only TypeScript files"
4186 );
4187
4188 assert_eq!(
4189 search(
4190 &project,
4191 SearchQuery::text(
4192 search_query,
4193 false,
4194 true,
4195 false,
4196 Default::default(),
4197 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4198 None,
4199 ).unwrap(),
4200 cx
4201 )
4202 .await
4203 .unwrap(),
4204 HashMap::from_iter([
4205 ("dir/one.rs".to_string(), vec![8..12]),
4206 ("dir/two.rs".to_string(), vec![8..12]),
4207 ]),
4208 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4209 );
4210
4211 assert!(
4212 search(
4213 &project,
4214 SearchQuery::text(
4215 search_query,
4216 false,
4217 true,
4218 false,
4219 Default::default(),
4220
4221 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4222 None,
4223
4224 ).unwrap(),
4225 cx
4226 )
4227 .await
4228 .unwrap().is_empty(),
4229 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4230 );
4231}
4232
4233#[gpui::test]
4234async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4235 init_test(cx);
4236
4237 let search_query = "file";
4238
4239 let fs = FakeFs::new(cx.executor());
4240 fs.insert_tree(
4241 "/dir",
4242 json!({
4243 "one.rs": r#"// Rust file one"#,
4244 "one.ts": r#"// TypeScript file one"#,
4245 "two.rs": r#"// Rust file two"#,
4246 "two.ts": r#"// TypeScript file two"#,
4247 }),
4248 )
4249 .await;
4250 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4251
4252 assert!(
4253 search(
4254 &project,
4255 SearchQuery::text(
4256 search_query,
4257 false,
4258 true,
4259 false,
4260 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4261 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4262 None,
4263 )
4264 .unwrap(),
4265 cx
4266 )
4267 .await
4268 .unwrap()
4269 .is_empty(),
4270 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4271 );
4272
4273 assert!(
4274 search(
4275 &project,
4276 SearchQuery::text(
4277 search_query,
4278 false,
4279 true,
4280 false,
4281 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4282 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4283 None,
4284 ).unwrap(),
4285 cx
4286 )
4287 .await
4288 .unwrap()
4289 .is_empty(),
4290 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4291 );
4292
4293 assert!(
4294 search(
4295 &project,
4296 SearchQuery::text(
4297 search_query,
4298 false,
4299 true,
4300 false,
4301 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4302 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4303 None,
4304 )
4305 .unwrap(),
4306 cx
4307 )
4308 .await
4309 .unwrap()
4310 .is_empty(),
4311 "Non-matching inclusions and exclusions should not change that."
4312 );
4313
4314 assert_eq!(
4315 search(
4316 &project,
4317 SearchQuery::text(
4318 search_query,
4319 false,
4320 true,
4321 false,
4322 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4323 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4324 None,
4325 )
4326 .unwrap(),
4327 cx
4328 )
4329 .await
4330 .unwrap(),
4331 HashMap::from_iter([
4332 ("dir/one.ts".to_string(), vec![14..18]),
4333 ("dir/two.ts".to_string(), vec![14..18]),
4334 ]),
4335 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4336 );
4337}
4338
4339#[gpui::test]
4340async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4341 init_test(cx);
4342
4343 let fs = FakeFs::new(cx.executor());
4344 fs.insert_tree(
4345 "/worktree-a",
4346 json!({
4347 "haystack.rs": r#"// NEEDLE"#,
4348 "haystack.ts": r#"// NEEDLE"#,
4349 }),
4350 )
4351 .await;
4352 fs.insert_tree(
4353 "/worktree-b",
4354 json!({
4355 "haystack.rs": r#"// NEEDLE"#,
4356 "haystack.ts": r#"// NEEDLE"#,
4357 }),
4358 )
4359 .await;
4360
4361 let project = Project::test(
4362 fs.clone(),
4363 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4364 cx,
4365 )
4366 .await;
4367
4368 assert_eq!(
4369 search(
4370 &project,
4371 SearchQuery::text(
4372 "NEEDLE",
4373 false,
4374 true,
4375 false,
4376 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4377 Default::default(),
4378 None,
4379 )
4380 .unwrap(),
4381 cx
4382 )
4383 .await
4384 .unwrap(),
4385 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4386 "should only return results from included worktree"
4387 );
4388 assert_eq!(
4389 search(
4390 &project,
4391 SearchQuery::text(
4392 "NEEDLE",
4393 false,
4394 true,
4395 false,
4396 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4397 Default::default(),
4398 None,
4399 )
4400 .unwrap(),
4401 cx
4402 )
4403 .await
4404 .unwrap(),
4405 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4406 "should only return results from included worktree"
4407 );
4408
4409 assert_eq!(
4410 search(
4411 &project,
4412 SearchQuery::text(
4413 "NEEDLE",
4414 false,
4415 true,
4416 false,
4417 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4418 Default::default(),
4419 None,
4420 )
4421 .unwrap(),
4422 cx
4423 )
4424 .await
4425 .unwrap(),
4426 HashMap::from_iter([
4427 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4428 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4429 ]),
4430 "should return results from both worktrees"
4431 );
4432}
4433
4434#[gpui::test]
4435async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4436 init_test(cx);
4437
4438 let fs = FakeFs::new(cx.background_executor.clone());
4439 fs.insert_tree(
4440 "/dir",
4441 json!({
4442 ".git": {},
4443 ".gitignore": "**/target\n/node_modules\n",
4444 "target": {
4445 "index.txt": "index_key:index_value"
4446 },
4447 "node_modules": {
4448 "eslint": {
4449 "index.ts": "const eslint_key = 'eslint value'",
4450 "package.json": r#"{ "some_key": "some value" }"#,
4451 },
4452 "prettier": {
4453 "index.ts": "const prettier_key = 'prettier value'",
4454 "package.json": r#"{ "other_key": "other value" }"#,
4455 },
4456 },
4457 "package.json": r#"{ "main_key": "main value" }"#,
4458 }),
4459 )
4460 .await;
4461 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4462
4463 let query = "key";
4464 assert_eq!(
4465 search(
4466 &project,
4467 SearchQuery::text(
4468 query,
4469 false,
4470 false,
4471 false,
4472 Default::default(),
4473 Default::default(),
4474 None,
4475 )
4476 .unwrap(),
4477 cx
4478 )
4479 .await
4480 .unwrap(),
4481 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4482 "Only one non-ignored file should have the query"
4483 );
4484
4485 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4486 assert_eq!(
4487 search(
4488 &project,
4489 SearchQuery::text(
4490 query,
4491 false,
4492 false,
4493 true,
4494 Default::default(),
4495 Default::default(),
4496 None,
4497 )
4498 .unwrap(),
4499 cx
4500 )
4501 .await
4502 .unwrap(),
4503 HashMap::from_iter([
4504 ("dir/package.json".to_string(), vec![8..11]),
4505 ("dir/target/index.txt".to_string(), vec![6..9]),
4506 (
4507 "dir/node_modules/prettier/package.json".to_string(),
4508 vec![9..12]
4509 ),
4510 (
4511 "dir/node_modules/prettier/index.ts".to_string(),
4512 vec![15..18]
4513 ),
4514 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4515 (
4516 "dir/node_modules/eslint/package.json".to_string(),
4517 vec![8..11]
4518 ),
4519 ]),
4520 "Unrestricted search with ignored directories should find every file with the query"
4521 );
4522
4523 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4524 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4525 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4526 assert_eq!(
4527 search(
4528 &project,
4529 SearchQuery::text(
4530 query,
4531 false,
4532 false,
4533 true,
4534 files_to_include,
4535 files_to_exclude,
4536 None,
4537 )
4538 .unwrap(),
4539 cx
4540 )
4541 .await
4542 .unwrap(),
4543 HashMap::from_iter([(
4544 "dir/node_modules/prettier/package.json".to_string(),
4545 vec![9..12]
4546 )]),
4547 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4548 );
4549}
4550
4551#[gpui::test]
4552async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4553 init_test(cx);
4554
4555 let fs = FakeFs::new(cx.background_executor.clone());
4556 fs.insert_tree(
4557 "/dir",
4558 json!({
4559 ".git": {},
4560 ".gitignore": "**/target\n/node_modules\n",
4561 "aaa.txt": "key:value",
4562 "bbb": {
4563 "index.txt": "index_key:index_value"
4564 },
4565 "node_modules": {
4566 "10 eleven": "key",
4567 "1 two": "key"
4568 },
4569 }),
4570 )
4571 .await;
4572 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4573
4574 let mut search = project.update(cx, |project, cx| {
4575 project.search(
4576 SearchQuery::text(
4577 "key",
4578 false,
4579 false,
4580 true,
4581 Default::default(),
4582 Default::default(),
4583 None,
4584 )
4585 .unwrap(),
4586 cx,
4587 )
4588 });
4589
4590 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4591 match search_result.unwrap() {
4592 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4593 buffer.file().unwrap().path().to_string_lossy().to_string()
4594 }),
4595 _ => panic!("Expected buffer"),
4596 }
4597 }
4598
4599 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4600 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4601 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4602 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4603 assert!(search.next().await.is_none())
4604}
4605
4606#[gpui::test]
4607async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4608 init_test(cx);
4609
4610 let fs = FakeFs::new(cx.executor().clone());
4611 fs.insert_tree(
4612 "/one/two",
4613 json!({
4614 "three": {
4615 "a.txt": "",
4616 "four": {}
4617 },
4618 "c.rs": ""
4619 }),
4620 )
4621 .await;
4622
4623 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4624 project
4625 .update(cx, |project, cx| {
4626 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4627 project.create_entry((id, "b.."), true, cx)
4628 })
4629 .await
4630 .unwrap()
4631 .to_included()
4632 .unwrap();
4633
4634 // Can't create paths outside the project
4635 let result = project
4636 .update(cx, |project, cx| {
4637 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4638 project.create_entry((id, "../../boop"), true, cx)
4639 })
4640 .await;
4641 assert!(result.is_err());
4642
4643 // Can't create paths with '..'
4644 let result = project
4645 .update(cx, |project, cx| {
4646 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4647 project.create_entry((id, "four/../beep"), true, cx)
4648 })
4649 .await;
4650 assert!(result.is_err());
4651
4652 assert_eq!(
4653 fs.paths(true),
4654 vec![
4655 PathBuf::from("/"),
4656 PathBuf::from("/one"),
4657 PathBuf::from("/one/two"),
4658 PathBuf::from("/one/two/c.rs"),
4659 PathBuf::from("/one/two/three"),
4660 PathBuf::from("/one/two/three/a.txt"),
4661 PathBuf::from("/one/two/three/b.."),
4662 PathBuf::from("/one/two/three/four"),
4663 ]
4664 );
4665
4666 // And we cannot open buffers with '..'
4667 let result = project
4668 .update(cx, |project, cx| {
4669 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4670 project.open_buffer((id, "../c.rs"), cx)
4671 })
4672 .await;
4673 assert!(result.is_err())
4674}
4675
4676#[gpui::test]
4677async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4678 init_test(cx);
4679
4680 let fs = FakeFs::new(cx.executor());
4681 fs.insert_tree(
4682 "/dir",
4683 json!({
4684 "a.tsx": "a",
4685 }),
4686 )
4687 .await;
4688
4689 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4690
4691 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4692 language_registry.add(tsx_lang());
4693 let language_server_names = [
4694 "TypeScriptServer",
4695 "TailwindServer",
4696 "ESLintServer",
4697 "NoHoverCapabilitiesServer",
4698 ];
4699 let mut fake_tsx_language_servers = language_registry.register_fake_lsp_adapter(
4700 "tsx",
4701 FakeLspAdapter {
4702 name: language_server_names[0],
4703 capabilities: lsp::ServerCapabilities {
4704 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4705 ..lsp::ServerCapabilities::default()
4706 },
4707 ..FakeLspAdapter::default()
4708 },
4709 );
4710 let _a = language_registry.register_fake_lsp_adapter(
4711 "tsx",
4712 FakeLspAdapter {
4713 name: language_server_names[1],
4714 capabilities: lsp::ServerCapabilities {
4715 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4716 ..lsp::ServerCapabilities::default()
4717 },
4718 ..FakeLspAdapter::default()
4719 },
4720 );
4721 let _b = language_registry.register_fake_lsp_adapter(
4722 "tsx",
4723 FakeLspAdapter {
4724 name: language_server_names[2],
4725 capabilities: lsp::ServerCapabilities {
4726 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4727 ..lsp::ServerCapabilities::default()
4728 },
4729 ..FakeLspAdapter::default()
4730 },
4731 );
4732 let _c = language_registry.register_fake_lsp_adapter(
4733 "tsx",
4734 FakeLspAdapter {
4735 name: language_server_names[3],
4736 capabilities: lsp::ServerCapabilities {
4737 hover_provider: None,
4738 ..lsp::ServerCapabilities::default()
4739 },
4740 ..FakeLspAdapter::default()
4741 },
4742 );
4743
4744 let buffer = project
4745 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4746 .await
4747 .unwrap();
4748 cx.executor().run_until_parked();
4749
4750 let mut servers_with_hover_requests = HashMap::default();
4751 for i in 0..language_server_names.len() {
4752 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4753 panic!(
4754 "Failed to get language server #{i} with name {}",
4755 &language_server_names[i]
4756 )
4757 });
4758 let new_server_name = new_server.server.name();
4759 assert!(
4760 !servers_with_hover_requests.contains_key(new_server_name),
4761 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4762 );
4763 let new_server_name = new_server_name.to_string();
4764 match new_server_name.as_str() {
4765 "TailwindServer" | "TypeScriptServer" => {
4766 servers_with_hover_requests.insert(
4767 new_server_name.clone(),
4768 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4769 let name = new_server_name.clone();
4770 async move {
4771 Ok(Some(lsp::Hover {
4772 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4773 format!("{name} hover"),
4774 )),
4775 range: None,
4776 }))
4777 }
4778 }),
4779 );
4780 }
4781 "ESLintServer" => {
4782 servers_with_hover_requests.insert(
4783 new_server_name,
4784 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4785 |_, _| async move { Ok(None) },
4786 ),
4787 );
4788 }
4789 "NoHoverCapabilitiesServer" => {
4790 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4791 |_, _| async move {
4792 panic!(
4793 "Should not call for hovers server with no corresponding capabilities"
4794 )
4795 },
4796 );
4797 }
4798 unexpected => panic!("Unexpected server name: {unexpected}"),
4799 }
4800 }
4801
4802 let hover_task = project.update(cx, |project, cx| {
4803 project.hover(&buffer, Point::new(0, 0), cx)
4804 });
4805 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4806 |mut hover_request| async move {
4807 hover_request
4808 .next()
4809 .await
4810 .expect("All hover requests should have been triggered")
4811 },
4812 ))
4813 .await;
4814 assert_eq!(
4815 vec!["TailwindServer hover", "TypeScriptServer hover"],
4816 hover_task
4817 .await
4818 .into_iter()
4819 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4820 .sorted()
4821 .collect::<Vec<_>>(),
4822 "Should receive hover responses from all related servers with hover capabilities"
4823 );
4824}
4825
4826#[gpui::test]
4827async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4828 init_test(cx);
4829
4830 let fs = FakeFs::new(cx.executor());
4831 fs.insert_tree(
4832 "/dir",
4833 json!({
4834 "a.ts": "a",
4835 }),
4836 )
4837 .await;
4838
4839 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4840
4841 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4842 language_registry.add(typescript_lang());
4843 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4844 "TypeScript",
4845 FakeLspAdapter {
4846 capabilities: lsp::ServerCapabilities {
4847 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4848 ..lsp::ServerCapabilities::default()
4849 },
4850 ..FakeLspAdapter::default()
4851 },
4852 );
4853
4854 let buffer = project
4855 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4856 .await
4857 .unwrap();
4858 cx.executor().run_until_parked();
4859
4860 let fake_server = fake_language_servers
4861 .next()
4862 .await
4863 .expect("failed to get the language server");
4864
4865 let mut request_handled =
4866 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4867 Ok(Some(lsp::Hover {
4868 contents: lsp::HoverContents::Array(vec![
4869 lsp::MarkedString::String("".to_string()),
4870 lsp::MarkedString::String(" ".to_string()),
4871 lsp::MarkedString::String("\n\n\n".to_string()),
4872 ]),
4873 range: None,
4874 }))
4875 });
4876
4877 let hover_task = project.update(cx, |project, cx| {
4878 project.hover(&buffer, Point::new(0, 0), cx)
4879 });
4880 let () = request_handled
4881 .next()
4882 .await
4883 .expect("All hover requests should have been triggered");
4884 assert_eq!(
4885 Vec::<String>::new(),
4886 hover_task
4887 .await
4888 .into_iter()
4889 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4890 .sorted()
4891 .collect::<Vec<_>>(),
4892 "Empty hover parts should be ignored"
4893 );
4894}
4895
4896#[gpui::test]
4897async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4898 init_test(cx);
4899
4900 let fs = FakeFs::new(cx.executor());
4901 fs.insert_tree(
4902 "/dir",
4903 json!({
4904 "a.tsx": "a",
4905 }),
4906 )
4907 .await;
4908
4909 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4910
4911 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4912 language_registry.add(tsx_lang());
4913 let language_server_names = [
4914 "TypeScriptServer",
4915 "TailwindServer",
4916 "ESLintServer",
4917 "NoActionsCapabilitiesServer",
4918 ];
4919 let mut fake_tsx_language_servers = language_registry.register_fake_lsp_adapter(
4920 "tsx",
4921 FakeLspAdapter {
4922 name: language_server_names[0],
4923 capabilities: lsp::ServerCapabilities {
4924 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4925 ..lsp::ServerCapabilities::default()
4926 },
4927 ..FakeLspAdapter::default()
4928 },
4929 );
4930 let _a = language_registry.register_fake_lsp_adapter(
4931 "tsx",
4932 FakeLspAdapter {
4933 name: language_server_names[1],
4934 capabilities: lsp::ServerCapabilities {
4935 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4936 ..lsp::ServerCapabilities::default()
4937 },
4938 ..FakeLspAdapter::default()
4939 },
4940 );
4941 let _b = language_registry.register_fake_lsp_adapter(
4942 "tsx",
4943 FakeLspAdapter {
4944 name: language_server_names[2],
4945 capabilities: lsp::ServerCapabilities {
4946 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4947 ..lsp::ServerCapabilities::default()
4948 },
4949 ..FakeLspAdapter::default()
4950 },
4951 );
4952 let _c = language_registry.register_fake_lsp_adapter(
4953 "tsx",
4954 FakeLspAdapter {
4955 name: language_server_names[3],
4956 capabilities: lsp::ServerCapabilities {
4957 code_action_provider: None,
4958 ..lsp::ServerCapabilities::default()
4959 },
4960 ..FakeLspAdapter::default()
4961 },
4962 );
4963
4964 let buffer = project
4965 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4966 .await
4967 .unwrap();
4968 cx.executor().run_until_parked();
4969
4970 let mut servers_with_actions_requests = HashMap::default();
4971 for i in 0..language_server_names.len() {
4972 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4973 panic!(
4974 "Failed to get language server #{i} with name {}",
4975 &language_server_names[i]
4976 )
4977 });
4978 let new_server_name = new_server.server.name();
4979 assert!(
4980 !servers_with_actions_requests.contains_key(new_server_name),
4981 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4982 );
4983 let new_server_name = new_server_name.to_string();
4984 match new_server_name.as_str() {
4985 "TailwindServer" | "TypeScriptServer" => {
4986 servers_with_actions_requests.insert(
4987 new_server_name.clone(),
4988 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4989 move |_, _| {
4990 let name = new_server_name.clone();
4991 async move {
4992 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4993 lsp::CodeAction {
4994 title: format!("{name} code action"),
4995 ..lsp::CodeAction::default()
4996 },
4997 )]))
4998 }
4999 },
5000 ),
5001 );
5002 }
5003 "ESLintServer" => {
5004 servers_with_actions_requests.insert(
5005 new_server_name,
5006 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5007 |_, _| async move { Ok(None) },
5008 ),
5009 );
5010 }
5011 "NoActionsCapabilitiesServer" => {
5012 let _never_handled = new_server
5013 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5014 panic!(
5015 "Should not call for code actions server with no corresponding capabilities"
5016 )
5017 });
5018 }
5019 unexpected => panic!("Unexpected server name: {unexpected}"),
5020 }
5021 }
5022
5023 let code_actions_task = project.update(cx, |project, cx| {
5024 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5025 });
5026 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5027 |mut code_actions_request| async move {
5028 code_actions_request
5029 .next()
5030 .await
5031 .expect("All code actions requests should have been triggered")
5032 },
5033 ))
5034 .await;
5035 assert_eq!(
5036 vec!["TailwindServer code action", "TypeScriptServer code action"],
5037 code_actions_task
5038 .await
5039 .into_iter()
5040 .map(|code_action| code_action.lsp_action.title)
5041 .sorted()
5042 .collect::<Vec<_>>(),
5043 "Should receive code actions responses from all related servers with hover capabilities"
5044 );
5045}
5046
5047#[gpui::test]
5048async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5049 init_test(cx);
5050
5051 let fs = FakeFs::new(cx.executor());
5052 fs.insert_tree(
5053 "/dir",
5054 json!({
5055 "a.rs": "let a = 1;",
5056 "b.rs": "let b = 2;",
5057 "c.rs": "let c = 2;",
5058 }),
5059 )
5060 .await;
5061
5062 let project = Project::test(
5063 fs,
5064 [
5065 "/dir/a.rs".as_ref(),
5066 "/dir/b.rs".as_ref(),
5067 "/dir/c.rs".as_ref(),
5068 ],
5069 cx,
5070 )
5071 .await;
5072
5073 // check the initial state and get the worktrees
5074 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5075 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5076 assert_eq!(worktrees.len(), 3);
5077
5078 let worktree_a = worktrees[0].read(cx);
5079 let worktree_b = worktrees[1].read(cx);
5080 let worktree_c = worktrees[2].read(cx);
5081
5082 // check they start in the right order
5083 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5084 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5085 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5086
5087 (
5088 worktrees[0].clone(),
5089 worktrees[1].clone(),
5090 worktrees[2].clone(),
5091 )
5092 });
5093
5094 // move first worktree to after the second
5095 // [a, b, c] -> [b, a, c]
5096 project
5097 .update(cx, |project, cx| {
5098 let first = worktree_a.read(cx);
5099 let second = worktree_b.read(cx);
5100 project.move_worktree(first.id(), second.id(), cx)
5101 })
5102 .expect("moving first after second");
5103
5104 // check the state after moving
5105 project.update(cx, |project, cx| {
5106 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5107 assert_eq!(worktrees.len(), 3);
5108
5109 let first = worktrees[0].read(cx);
5110 let second = worktrees[1].read(cx);
5111 let third = worktrees[2].read(cx);
5112
5113 // check they are now in the right order
5114 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5115 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5116 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5117 });
5118
5119 // move the second worktree to before the first
5120 // [b, a, c] -> [a, b, c]
5121 project
5122 .update(cx, |project, cx| {
5123 let second = worktree_a.read(cx);
5124 let first = worktree_b.read(cx);
5125 project.move_worktree(first.id(), second.id(), cx)
5126 })
5127 .expect("moving second before first");
5128
5129 // check the state after moving
5130 project.update(cx, |project, cx| {
5131 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5132 assert_eq!(worktrees.len(), 3);
5133
5134 let first = worktrees[0].read(cx);
5135 let second = worktrees[1].read(cx);
5136 let third = worktrees[2].read(cx);
5137
5138 // check they are now in the right order
5139 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5140 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5141 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5142 });
5143
5144 // move the second worktree to after the third
5145 // [a, b, c] -> [a, c, b]
5146 project
5147 .update(cx, |project, cx| {
5148 let second = worktree_b.read(cx);
5149 let third = worktree_c.read(cx);
5150 project.move_worktree(second.id(), third.id(), cx)
5151 })
5152 .expect("moving second after third");
5153
5154 // check the state after moving
5155 project.update(cx, |project, cx| {
5156 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5157 assert_eq!(worktrees.len(), 3);
5158
5159 let first = worktrees[0].read(cx);
5160 let second = worktrees[1].read(cx);
5161 let third = worktrees[2].read(cx);
5162
5163 // check they are now in the right order
5164 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5165 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5166 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5167 });
5168
5169 // move the third worktree to before the second
5170 // [a, c, b] -> [a, b, c]
5171 project
5172 .update(cx, |project, cx| {
5173 let third = worktree_c.read(cx);
5174 let second = worktree_b.read(cx);
5175 project.move_worktree(third.id(), second.id(), cx)
5176 })
5177 .expect("moving third before second");
5178
5179 // check the state after moving
5180 project.update(cx, |project, cx| {
5181 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5182 assert_eq!(worktrees.len(), 3);
5183
5184 let first = worktrees[0].read(cx);
5185 let second = worktrees[1].read(cx);
5186 let third = worktrees[2].read(cx);
5187
5188 // check they are now in the right order
5189 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5190 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5191 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5192 });
5193
5194 // move the first worktree to after the third
5195 // [a, b, c] -> [b, c, a]
5196 project
5197 .update(cx, |project, cx| {
5198 let first = worktree_a.read(cx);
5199 let third = worktree_c.read(cx);
5200 project.move_worktree(first.id(), third.id(), cx)
5201 })
5202 .expect("moving first after third");
5203
5204 // check the state after moving
5205 project.update(cx, |project, cx| {
5206 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5207 assert_eq!(worktrees.len(), 3);
5208
5209 let first = worktrees[0].read(cx);
5210 let second = worktrees[1].read(cx);
5211 let third = worktrees[2].read(cx);
5212
5213 // check they are now in the right order
5214 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5215 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5216 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5217 });
5218
5219 // move the third worktree to before the first
5220 // [b, c, a] -> [a, b, c]
5221 project
5222 .update(cx, |project, cx| {
5223 let third = worktree_a.read(cx);
5224 let first = worktree_b.read(cx);
5225 project.move_worktree(third.id(), first.id(), cx)
5226 })
5227 .expect("moving third before first");
5228
5229 // check the state after moving
5230 project.update(cx, |project, cx| {
5231 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5232 assert_eq!(worktrees.len(), 3);
5233
5234 let first = worktrees[0].read(cx);
5235 let second = worktrees[1].read(cx);
5236 let third = worktrees[2].read(cx);
5237
5238 // check they are now in the right order
5239 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5240 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5241 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5242 });
5243}
5244
5245async fn search(
5246 project: &Model<Project>,
5247 query: SearchQuery,
5248 cx: &mut gpui::TestAppContext,
5249) -> Result<HashMap<String, Vec<Range<usize>>>> {
5250 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5251 let mut results = HashMap::default();
5252 while let Some(search_result) = search_rx.next().await {
5253 match search_result {
5254 SearchResult::Buffer { buffer, ranges } => {
5255 results.entry(buffer).or_insert(ranges);
5256 }
5257 SearchResult::LimitReached => {}
5258 }
5259 }
5260 Ok(results
5261 .into_iter()
5262 .map(|(buffer, ranges)| {
5263 buffer.update(cx, |buffer, cx| {
5264 let path = buffer
5265 .file()
5266 .unwrap()
5267 .full_path(cx)
5268 .to_string_lossy()
5269 .to_string();
5270 let ranges = ranges
5271 .into_iter()
5272 .map(|range| range.to_offset(buffer))
5273 .collect::<Vec<_>>();
5274 (path, ranges)
5275 })
5276 })
5277 .collect())
5278}
5279
5280pub fn init_test(cx: &mut gpui::TestAppContext) {
5281 if std::env::var("RUST_LOG").is_ok() {
5282 env_logger::try_init().ok();
5283 }
5284
5285 cx.update(|cx| {
5286 let settings_store = SettingsStore::test(cx);
5287 cx.set_global(settings_store);
5288 release_channel::init(SemanticVersion::default(), cx);
5289 language::init(cx);
5290 Project::init_settings(cx);
5291 });
5292}
5293
5294fn json_lang() -> Arc<Language> {
5295 Arc::new(Language::new(
5296 LanguageConfig {
5297 name: "JSON".into(),
5298 matcher: LanguageMatcher {
5299 path_suffixes: vec!["json".to_string()],
5300 ..Default::default()
5301 },
5302 ..Default::default()
5303 },
5304 None,
5305 ))
5306}
5307
5308fn js_lang() -> Arc<Language> {
5309 Arc::new(Language::new(
5310 LanguageConfig {
5311 name: Arc::from("JavaScript"),
5312 matcher: LanguageMatcher {
5313 path_suffixes: vec!["js".to_string()],
5314 ..Default::default()
5315 },
5316 ..Default::default()
5317 },
5318 None,
5319 ))
5320}
5321
5322fn rust_lang() -> Arc<Language> {
5323 Arc::new(Language::new(
5324 LanguageConfig {
5325 name: "Rust".into(),
5326 matcher: LanguageMatcher {
5327 path_suffixes: vec!["rs".to_string()],
5328 ..Default::default()
5329 },
5330 ..Default::default()
5331 },
5332 Some(tree_sitter_rust::language()),
5333 ))
5334}
5335
5336fn typescript_lang() -> Arc<Language> {
5337 Arc::new(Language::new(
5338 LanguageConfig {
5339 name: "TypeScript".into(),
5340 matcher: LanguageMatcher {
5341 path_suffixes: vec!["ts".to_string()],
5342 ..Default::default()
5343 },
5344 ..Default::default()
5345 },
5346 Some(tree_sitter_typescript::language_typescript()),
5347 ))
5348}
5349
5350fn tsx_lang() -> Arc<Language> {
5351 Arc::new(Language::new(
5352 LanguageConfig {
5353 name: "tsx".into(),
5354 matcher: LanguageMatcher {
5355 path_suffixes: vec!["tsx".to_string()],
5356 ..Default::default()
5357 },
5358 ..Default::default()
5359 },
5360 Some(tree_sitter_typescript::language_tsx()),
5361 ))
5362}
5363
5364fn get_all_tasks(
5365 project: &Model<Project>,
5366 worktree_id: Option<WorktreeId>,
5367 task_context: &TaskContext,
5368 cx: &mut AppContext,
5369) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5370 let resolved_tasks = project.update(cx, |project, cx| {
5371 project
5372 .task_inventory()
5373 .read(cx)
5374 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5375 });
5376
5377 cx.spawn(|_| async move {
5378 let (mut old, new) = resolved_tasks.await;
5379 old.extend(new);
5380 old
5381 })
5382}