1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97 TaskStore::init(None);
98
99 let fs = FakeFs::new(cx.executor());
100 fs.insert_tree(
101 "/the-root",
102 json!({
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 8 }"#,
105 "tasks.json": r#"[{
106 "label": "cargo check all",
107 "command": "cargo",
108 "args": ["check", "--all"]
109 },]"#,
110 },
111 "a": {
112 "a.rs": "fn a() {\n A\n}"
113 },
114 "b": {
115 ".zed": {
116 "settings.json": r#"{ "tab_size": 2 }"#,
117 "tasks.json": r#"[{
118 "label": "cargo check",
119 "command": "cargo",
120 "args": ["check"]
121 },]"#,
122 },
123 "b.rs": "fn b() {\n B\n}"
124 }
125 }),
126 )
127 .await;
128
129 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
130 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
131 let task_context = TaskContext::default();
132
133 cx.executor().run_until_parked();
134 let worktree_id = cx.update(|cx| {
135 project.update(cx, |project, cx| {
136 project.worktrees(cx).next().unwrap().read(cx).id()
137 })
138 });
139 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
140 id: worktree_id,
141 directory_in_worktree: PathBuf::from(".zed"),
142 id_base: "local worktree tasks from directory \".zed\"".into(),
143 };
144
145 let all_tasks = cx
146 .update(|cx| {
147 let tree = worktree.read(cx);
148
149 let settings_a = language_settings(
150 None,
151 Some(
152 &(File::for_entry(
153 tree.entry_for_path("a/a.rs").unwrap().clone(),
154 worktree.clone(),
155 ) as _),
156 ),
157 cx,
158 );
159 let settings_b = language_settings(
160 None,
161 Some(
162 &(File::for_entry(
163 tree.entry_for_path("b/b.rs").unwrap().clone(),
164 worktree.clone(),
165 ) as _),
166 ),
167 cx,
168 );
169
170 assert_eq!(settings_a.tab_size.get(), 8);
171 assert_eq!(settings_b.tab_size.get(), 2);
172
173 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
174 })
175 .into_iter()
176 .map(|(source_kind, task)| {
177 let resolved = task.resolved.unwrap();
178 (
179 source_kind,
180 task.resolved_label,
181 resolved.args,
182 resolved.env,
183 )
184 })
185 .collect::<Vec<_>>();
186 assert_eq!(
187 all_tasks,
188 vec![
189 (
190 TaskSourceKind::Worktree {
191 id: worktree_id,
192 directory_in_worktree: PathBuf::from("b/.zed"),
193 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
194 },
195 "cargo check".to_string(),
196 vec!["check".to_string()],
197 HashMap::default(),
198 ),
199 (
200 topmost_local_task_source_kind.clone(),
201 "cargo check all".to_string(),
202 vec!["check".to_string(), "--all".to_string()],
203 HashMap::default(),
204 ),
205 ]
206 );
207
208 let (_, resolved_task) = cx
209 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
210 .into_iter()
211 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
212 .expect("should have one global task");
213 project.update(cx, |project, cx| {
214 let task_inventory = project
215 .task_store
216 .read(cx)
217 .task_inventory()
218 .cloned()
219 .unwrap();
220 task_inventory.update(cx, |inventory, _| {
221 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
222 inventory
223 .update_file_based_tasks(
224 None,
225 Some(
226 &json!([{
227 "label": "cargo check unstable",
228 "command": "cargo",
229 "args": [
230 "check",
231 "--all",
232 "--all-targets"
233 ],
234 "env": {
235 "RUSTFLAGS": "-Zunstable-options"
236 }
237 }])
238 .to_string(),
239 ),
240 )
241 .unwrap();
242 });
243 });
244 cx.run_until_parked();
245
246 let all_tasks = cx
247 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
248 .into_iter()
249 .map(|(source_kind, task)| {
250 let resolved = task.resolved.unwrap();
251 (
252 source_kind,
253 task.resolved_label,
254 resolved.args,
255 resolved.env,
256 )
257 })
258 .collect::<Vec<_>>();
259 assert_eq!(
260 all_tasks,
261 vec![
262 (
263 topmost_local_task_source_kind.clone(),
264 "cargo check all".to_string(),
265 vec!["check".to_string(), "--all".to_string()],
266 HashMap::default(),
267 ),
268 (
269 TaskSourceKind::Worktree {
270 id: worktree_id,
271 directory_in_worktree: PathBuf::from("b/.zed"),
272 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
273 },
274 "cargo check".to_string(),
275 vec!["check".to_string()],
276 HashMap::default(),
277 ),
278 (
279 TaskSourceKind::AbsPath {
280 abs_path: paths::tasks_file().clone(),
281 id_base: "global tasks.json".into(),
282 },
283 "cargo check unstable".to_string(),
284 vec![
285 "check".to_string(),
286 "--all".to_string(),
287 "--all-targets".to_string(),
288 ],
289 HashMap::from_iter(Some((
290 "RUSTFLAGS".to_string(),
291 "-Zunstable-options".to_string()
292 ))),
293 ),
294 ]
295 );
296}
297
298#[gpui::test]
299async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301
302 let fs = FakeFs::new(cx.executor());
303 fs.insert_tree(
304 "/the-root",
305 json!({
306 "test.rs": "const A: i32 = 1;",
307 "test2.rs": "",
308 "Cargo.toml": "a = 1",
309 "package.json": "{\"a\": 1}",
310 }),
311 )
312 .await;
313
314 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
316
317 let mut fake_rust_servers = language_registry.register_fake_lsp(
318 "Rust",
319 FakeLspAdapter {
320 name: "the-rust-language-server",
321 capabilities: lsp::ServerCapabilities {
322 completion_provider: Some(lsp::CompletionOptions {
323 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
324 ..Default::default()
325 }),
326 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
327 lsp::TextDocumentSyncOptions {
328 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
329 ..Default::default()
330 },
331 )),
332 ..Default::default()
333 },
334 ..Default::default()
335 },
336 );
337 let mut fake_json_servers = language_registry.register_fake_lsp(
338 "JSON",
339 FakeLspAdapter {
340 name: "the-json-language-server",
341 capabilities: lsp::ServerCapabilities {
342 completion_provider: Some(lsp::CompletionOptions {
343 trigger_characters: Some(vec![":".to_string()]),
344 ..Default::default()
345 }),
346 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
347 lsp::TextDocumentSyncOptions {
348 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
349 ..Default::default()
350 },
351 )),
352 ..Default::default()
353 },
354 ..Default::default()
355 },
356 );
357
358 // Open a buffer without an associated language server.
359 let toml_buffer = project
360 .update(cx, |project, cx| {
361 project.open_local_buffer("/the-root/Cargo.toml", cx)
362 })
363 .await
364 .unwrap();
365
366 // Open a buffer with an associated language server before the language for it has been loaded.
367 let rust_buffer = project
368 .update(cx, |project, cx| {
369 project.open_local_buffer("/the-root/test.rs", cx)
370 })
371 .await
372 .unwrap();
373 rust_buffer.update(cx, |buffer, _| {
374 assert_eq!(buffer.language().map(|l| l.name()), None);
375 });
376
377 // Now we add the languages to the project, and ensure they get assigned to all
378 // the relevant open buffers.
379 language_registry.add(json_lang());
380 language_registry.add(rust_lang());
381 cx.executor().run_until_parked();
382 rust_buffer.update(cx, |buffer, _| {
383 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
384 });
385
386 // A server is started up, and it is notified about Rust files.
387 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
388 assert_eq!(
389 fake_rust_server
390 .receive_notification::<lsp::notification::DidOpenTextDocument>()
391 .await
392 .text_document,
393 lsp::TextDocumentItem {
394 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
395 version: 0,
396 text: "const A: i32 = 1;".to_string(),
397 language_id: "rust".to_string(),
398 }
399 );
400
401 // The buffer is configured based on the language server's capabilities.
402 rust_buffer.update(cx, |buffer, _| {
403 assert_eq!(
404 buffer.completion_triggers(),
405 &[".".to_string(), "::".to_string()]
406 );
407 });
408 toml_buffer.update(cx, |buffer, _| {
409 assert!(buffer.completion_triggers().is_empty());
410 });
411
412 // Edit a buffer. The changes are reported to the language server.
413 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
414 assert_eq!(
415 fake_rust_server
416 .receive_notification::<lsp::notification::DidChangeTextDocument>()
417 .await
418 .text_document,
419 lsp::VersionedTextDocumentIdentifier::new(
420 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
421 1
422 )
423 );
424
425 // Open a third buffer with a different associated language server.
426 let json_buffer = project
427 .update(cx, |project, cx| {
428 project.open_local_buffer("/the-root/package.json", cx)
429 })
430 .await
431 .unwrap();
432
433 // A json language server is started up and is only notified about the json buffer.
434 let mut fake_json_server = fake_json_servers.next().await.unwrap();
435 assert_eq!(
436 fake_json_server
437 .receive_notification::<lsp::notification::DidOpenTextDocument>()
438 .await
439 .text_document,
440 lsp::TextDocumentItem {
441 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
442 version: 0,
443 text: "{\"a\": 1}".to_string(),
444 language_id: "json".to_string(),
445 }
446 );
447
448 // This buffer is configured based on the second language server's
449 // capabilities.
450 json_buffer.update(cx, |buffer, _| {
451 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
452 });
453
454 // When opening another buffer whose language server is already running,
455 // it is also configured based on the existing language server's capabilities.
456 let rust_buffer2 = project
457 .update(cx, |project, cx| {
458 project.open_local_buffer("/the-root/test2.rs", cx)
459 })
460 .await
461 .unwrap();
462 rust_buffer2.update(cx, |buffer, _| {
463 assert_eq!(
464 buffer.completion_triggers(),
465 &[".".to_string(), "::".to_string()]
466 );
467 });
468
469 // Changes are reported only to servers matching the buffer's language.
470 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
471 rust_buffer2.update(cx, |buffer, cx| {
472 buffer.edit([(0..0, "let x = 1;")], None, cx)
473 });
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidChangeTextDocument>()
477 .await
478 .text_document,
479 lsp::VersionedTextDocumentIdentifier::new(
480 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
481 1
482 )
483 );
484
485 // Save notifications are reported to all servers.
486 project
487 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
488 .await
489 .unwrap();
490 assert_eq!(
491 fake_rust_server
492 .receive_notification::<lsp::notification::DidSaveTextDocument>()
493 .await
494 .text_document,
495 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
496 );
497 assert_eq!(
498 fake_json_server
499 .receive_notification::<lsp::notification::DidSaveTextDocument>()
500 .await
501 .text_document,
502 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
503 );
504
505 // Renames are reported only to servers matching the buffer's language.
506 fs.rename(
507 Path::new("/the-root/test2.rs"),
508 Path::new("/the-root/test3.rs"),
509 Default::default(),
510 )
511 .await
512 .unwrap();
513 assert_eq!(
514 fake_rust_server
515 .receive_notification::<lsp::notification::DidCloseTextDocument>()
516 .await
517 .text_document,
518 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
519 );
520 assert_eq!(
521 fake_rust_server
522 .receive_notification::<lsp::notification::DidOpenTextDocument>()
523 .await
524 .text_document,
525 lsp::TextDocumentItem {
526 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
527 version: 0,
528 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
529 language_id: "rust".to_string(),
530 },
531 );
532
533 rust_buffer2.update(cx, |buffer, cx| {
534 buffer.update_diagnostics(
535 LanguageServerId(0),
536 DiagnosticSet::from_sorted_entries(
537 vec![DiagnosticEntry {
538 diagnostic: Default::default(),
539 range: Anchor::MIN..Anchor::MAX,
540 }],
541 &buffer.snapshot(),
542 ),
543 cx,
544 );
545 assert_eq!(
546 buffer
547 .snapshot()
548 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
549 .count(),
550 1
551 );
552 });
553
554 // When the rename changes the extension of the file, the buffer gets closed on the old
555 // language server and gets opened on the new one.
556 fs.rename(
557 Path::new("/the-root/test3.rs"),
558 Path::new("/the-root/test3.json"),
559 Default::default(),
560 )
561 .await
562 .unwrap();
563 assert_eq!(
564 fake_rust_server
565 .receive_notification::<lsp::notification::DidCloseTextDocument>()
566 .await
567 .text_document,
568 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
569 );
570 assert_eq!(
571 fake_json_server
572 .receive_notification::<lsp::notification::DidOpenTextDocument>()
573 .await
574 .text_document,
575 lsp::TextDocumentItem {
576 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
577 version: 0,
578 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
579 language_id: "json".to_string(),
580 },
581 );
582
583 // We clear the diagnostics, since the language has changed.
584 rust_buffer2.update(cx, |buffer, _| {
585 assert_eq!(
586 buffer
587 .snapshot()
588 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
589 .count(),
590 0
591 );
592 });
593
594 // The renamed file's version resets after changing language server.
595 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
596 assert_eq!(
597 fake_json_server
598 .receive_notification::<lsp::notification::DidChangeTextDocument>()
599 .await
600 .text_document,
601 lsp::VersionedTextDocumentIdentifier::new(
602 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
603 1
604 )
605 );
606
607 // Restart language servers
608 project.update(cx, |project, cx| {
609 project.restart_language_servers_for_buffers(
610 vec![rust_buffer.clone(), json_buffer.clone()],
611 cx,
612 );
613 });
614
615 let mut rust_shutdown_requests = fake_rust_server
616 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
617 let mut json_shutdown_requests = fake_json_server
618 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
619 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
620
621 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
622 let mut fake_json_server = fake_json_servers.next().await.unwrap();
623
624 // Ensure rust document is reopened in new rust language server
625 assert_eq!(
626 fake_rust_server
627 .receive_notification::<lsp::notification::DidOpenTextDocument>()
628 .await
629 .text_document,
630 lsp::TextDocumentItem {
631 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
632 version: 0,
633 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
634 language_id: "rust".to_string(),
635 }
636 );
637
638 // Ensure json documents are reopened in new json language server
639 assert_set_eq!(
640 [
641 fake_json_server
642 .receive_notification::<lsp::notification::DidOpenTextDocument>()
643 .await
644 .text_document,
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 ],
650 [
651 lsp::TextDocumentItem {
652 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
653 version: 0,
654 text: json_buffer.update(cx, |buffer, _| buffer.text()),
655 language_id: "json".to_string(),
656 },
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
659 version: 0,
660 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
661 language_id: "json".to_string(),
662 }
663 ]
664 );
665
666 // Close notifications are reported only to servers matching the buffer's language.
667 cx.update(|_| drop(json_buffer));
668 let close_message = lsp::DidCloseTextDocumentParams {
669 text_document: lsp::TextDocumentIdentifier::new(
670 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
671 ),
672 };
673 assert_eq!(
674 fake_json_server
675 .receive_notification::<lsp::notification::DidCloseTextDocument>()
676 .await,
677 close_message,
678 );
679}
680
681#[gpui::test]
682async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
683 init_test(cx);
684
685 let fs = FakeFs::new(cx.executor());
686 fs.insert_tree(
687 "/the-root",
688 json!({
689 ".gitignore": "target\n",
690 "src": {
691 "a.rs": "",
692 "b.rs": "",
693 },
694 "target": {
695 "x": {
696 "out": {
697 "x.rs": ""
698 }
699 },
700 "y": {
701 "out": {
702 "y.rs": "",
703 }
704 },
705 "z": {
706 "out": {
707 "z.rs": ""
708 }
709 }
710 }
711 }),
712 )
713 .await;
714
715 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
716 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
717 language_registry.add(rust_lang());
718 let mut fake_servers = language_registry.register_fake_lsp(
719 "Rust",
720 FakeLspAdapter {
721 name: "the-language-server",
722 ..Default::default()
723 },
724 );
725
726 cx.executor().run_until_parked();
727
728 // Start the language server by opening a buffer with a compatible file extension.
729 let _buffer = project
730 .update(cx, |project, cx| {
731 project.open_local_buffer("/the-root/src/a.rs", cx)
732 })
733 .await
734 .unwrap();
735
736 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
737 project.update(cx, |project, cx| {
738 let worktree = project.worktrees(cx).next().unwrap();
739 assert_eq!(
740 worktree
741 .read(cx)
742 .snapshot()
743 .entries(true, 0)
744 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
745 .collect::<Vec<_>>(),
746 &[
747 (Path::new(""), false),
748 (Path::new(".gitignore"), false),
749 (Path::new("src"), false),
750 (Path::new("src/a.rs"), false),
751 (Path::new("src/b.rs"), false),
752 (Path::new("target"), true),
753 ]
754 );
755 });
756
757 let prev_read_dir_count = fs.read_dir_call_count();
758
759 // Keep track of the FS events reported to the language server.
760 let fake_server = fake_servers.next().await.unwrap();
761 let file_changes = Arc::new(Mutex::new(Vec::new()));
762 fake_server
763 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
764 registrations: vec![lsp::Registration {
765 id: Default::default(),
766 method: "workspace/didChangeWatchedFiles".to_string(),
767 register_options: serde_json::to_value(
768 lsp::DidChangeWatchedFilesRegistrationOptions {
769 watchers: vec![
770 lsp::FileSystemWatcher {
771 glob_pattern: lsp::GlobPattern::String(
772 "/the-root/Cargo.toml".to_string(),
773 ),
774 kind: None,
775 },
776 lsp::FileSystemWatcher {
777 glob_pattern: lsp::GlobPattern::String(
778 "/the-root/src/*.{rs,c}".to_string(),
779 ),
780 kind: None,
781 },
782 lsp::FileSystemWatcher {
783 glob_pattern: lsp::GlobPattern::String(
784 "/the-root/target/y/**/*.rs".to_string(),
785 ),
786 kind: None,
787 },
788 ],
789 },
790 )
791 .ok(),
792 }],
793 })
794 .await
795 .unwrap();
796 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
797 let file_changes = file_changes.clone();
798 move |params, _| {
799 let mut file_changes = file_changes.lock();
800 file_changes.extend(params.changes);
801 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
802 }
803 });
804
805 cx.executor().run_until_parked();
806 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
807 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
808
809 // Now the language server has asked us to watch an ignored directory path,
810 // so we recursively load it.
811 project.update(cx, |project, cx| {
812 let worktree = project.worktrees(cx).next().unwrap();
813 assert_eq!(
814 worktree
815 .read(cx)
816 .snapshot()
817 .entries(true, 0)
818 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
819 .collect::<Vec<_>>(),
820 &[
821 (Path::new(""), false),
822 (Path::new(".gitignore"), false),
823 (Path::new("src"), false),
824 (Path::new("src/a.rs"), false),
825 (Path::new("src/b.rs"), false),
826 (Path::new("target"), true),
827 (Path::new("target/x"), true),
828 (Path::new("target/y"), true),
829 (Path::new("target/y/out"), true),
830 (Path::new("target/y/out/y.rs"), true),
831 (Path::new("target/z"), true),
832 ]
833 );
834 });
835
836 // Perform some file system mutations, two of which match the watched patterns,
837 // and one of which does not.
838 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
839 .await
840 .unwrap();
841 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
842 .await
843 .unwrap();
844 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
845 .await
846 .unwrap();
847 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
848 .await
849 .unwrap();
850 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
851 .await
852 .unwrap();
853
854 // The language server receives events for the FS mutations that match its watch patterns.
855 cx.executor().run_until_parked();
856 assert_eq!(
857 &*file_changes.lock(),
858 &[
859 lsp::FileEvent {
860 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
861 typ: lsp::FileChangeType::DELETED,
862 },
863 lsp::FileEvent {
864 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
865 typ: lsp::FileChangeType::CREATED,
866 },
867 lsp::FileEvent {
868 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
869 typ: lsp::FileChangeType::CREATED,
870 },
871 ]
872 );
873}
874
875#[gpui::test]
876async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
877 init_test(cx);
878
879 let fs = FakeFs::new(cx.executor());
880 fs.insert_tree(
881 "/dir",
882 json!({
883 "a.rs": "let a = 1;",
884 "b.rs": "let b = 2;"
885 }),
886 )
887 .await;
888
889 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
890
891 let buffer_a = project
892 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
893 .await
894 .unwrap();
895 let buffer_b = project
896 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
897 .await
898 .unwrap();
899
900 project.update(cx, |project, cx| {
901 project
902 .update_diagnostics(
903 LanguageServerId(0),
904 lsp::PublishDiagnosticsParams {
905 uri: Url::from_file_path("/dir/a.rs").unwrap(),
906 version: None,
907 diagnostics: vec![lsp::Diagnostic {
908 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
909 severity: Some(lsp::DiagnosticSeverity::ERROR),
910 message: "error 1".to_string(),
911 ..Default::default()
912 }],
913 },
914 &[],
915 cx,
916 )
917 .unwrap();
918 project
919 .update_diagnostics(
920 LanguageServerId(0),
921 lsp::PublishDiagnosticsParams {
922 uri: Url::from_file_path("/dir/b.rs").unwrap(),
923 version: None,
924 diagnostics: vec![lsp::Diagnostic {
925 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
926 severity: Some(DiagnosticSeverity::WARNING),
927 message: "error 2".to_string(),
928 ..Default::default()
929 }],
930 },
931 &[],
932 cx,
933 )
934 .unwrap();
935 });
936
937 buffer_a.update(cx, |buffer, _| {
938 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
939 assert_eq!(
940 chunks
941 .iter()
942 .map(|(s, d)| (s.as_str(), *d))
943 .collect::<Vec<_>>(),
944 &[
945 ("let ", None),
946 ("a", Some(DiagnosticSeverity::ERROR)),
947 (" = 1;", None),
948 ]
949 );
950 });
951 buffer_b.update(cx, |buffer, _| {
952 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
953 assert_eq!(
954 chunks
955 .iter()
956 .map(|(s, d)| (s.as_str(), *d))
957 .collect::<Vec<_>>(),
958 &[
959 ("let ", None),
960 ("b", Some(DiagnosticSeverity::WARNING)),
961 (" = 2;", None),
962 ]
963 );
964 });
965}
966
967#[gpui::test]
968async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
969 init_test(cx);
970
971 let fs = FakeFs::new(cx.executor());
972 fs.insert_tree(
973 "/root",
974 json!({
975 "dir": {
976 ".git": {
977 "HEAD": "ref: refs/heads/main",
978 },
979 ".gitignore": "b.rs",
980 "a.rs": "let a = 1;",
981 "b.rs": "let b = 2;",
982 },
983 "other.rs": "let b = c;"
984 }),
985 )
986 .await;
987
988 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
989 let (worktree, _) = project
990 .update(cx, |project, cx| {
991 project.find_or_create_worktree("/root/dir", true, cx)
992 })
993 .await
994 .unwrap();
995 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
996
997 let (worktree, _) = project
998 .update(cx, |project, cx| {
999 project.find_or_create_worktree("/root/other.rs", false, cx)
1000 })
1001 .await
1002 .unwrap();
1003 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1004
1005 let server_id = LanguageServerId(0);
1006 project.update(cx, |project, cx| {
1007 project
1008 .update_diagnostics(
1009 server_id,
1010 lsp::PublishDiagnosticsParams {
1011 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1012 version: None,
1013 diagnostics: vec![lsp::Diagnostic {
1014 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1015 severity: Some(lsp::DiagnosticSeverity::ERROR),
1016 message: "unused variable 'b'".to_string(),
1017 ..Default::default()
1018 }],
1019 },
1020 &[],
1021 cx,
1022 )
1023 .unwrap();
1024 project
1025 .update_diagnostics(
1026 server_id,
1027 lsp::PublishDiagnosticsParams {
1028 uri: Url::from_file_path("/root/other.rs").unwrap(),
1029 version: None,
1030 diagnostics: vec![lsp::Diagnostic {
1031 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1032 severity: Some(lsp::DiagnosticSeverity::ERROR),
1033 message: "unknown variable 'c'".to_string(),
1034 ..Default::default()
1035 }],
1036 },
1037 &[],
1038 cx,
1039 )
1040 .unwrap();
1041 });
1042
1043 let main_ignored_buffer = project
1044 .update(cx, |project, cx| {
1045 project.open_buffer((main_worktree_id, "b.rs"), cx)
1046 })
1047 .await
1048 .unwrap();
1049 main_ignored_buffer.update(cx, |buffer, _| {
1050 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1051 assert_eq!(
1052 chunks
1053 .iter()
1054 .map(|(s, d)| (s.as_str(), *d))
1055 .collect::<Vec<_>>(),
1056 &[
1057 ("let ", None),
1058 ("b", Some(DiagnosticSeverity::ERROR)),
1059 (" = 2;", None),
1060 ],
1061 "Gigitnored buffers should still get in-buffer diagnostics",
1062 );
1063 });
1064 let other_buffer = project
1065 .update(cx, |project, cx| {
1066 project.open_buffer((other_worktree_id, ""), cx)
1067 })
1068 .await
1069 .unwrap();
1070 other_buffer.update(cx, |buffer, _| {
1071 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1072 assert_eq!(
1073 chunks
1074 .iter()
1075 .map(|(s, d)| (s.as_str(), *d))
1076 .collect::<Vec<_>>(),
1077 &[
1078 ("let b = ", None),
1079 ("c", Some(DiagnosticSeverity::ERROR)),
1080 (";", None),
1081 ],
1082 "Buffers from hidden projects should still get in-buffer diagnostics"
1083 );
1084 });
1085
1086 project.update(cx, |project, cx| {
1087 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1088 assert_eq!(
1089 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1090 vec![(
1091 ProjectPath {
1092 worktree_id: main_worktree_id,
1093 path: Arc::from(Path::new("b.rs")),
1094 },
1095 server_id,
1096 DiagnosticSummary {
1097 error_count: 1,
1098 warning_count: 0,
1099 }
1100 )]
1101 );
1102 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1103 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1104 });
1105}
1106
1107#[gpui::test]
1108async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1109 init_test(cx);
1110
1111 let progress_token = "the-progress-token";
1112
1113 let fs = FakeFs::new(cx.executor());
1114 fs.insert_tree(
1115 "/dir",
1116 json!({
1117 "a.rs": "fn a() { A }",
1118 "b.rs": "const y: i32 = 1",
1119 }),
1120 )
1121 .await;
1122
1123 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1124 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1125
1126 language_registry.add(rust_lang());
1127 let mut fake_servers = language_registry.register_fake_lsp(
1128 "Rust",
1129 FakeLspAdapter {
1130 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1131 disk_based_diagnostics_sources: vec!["disk".into()],
1132 ..Default::default()
1133 },
1134 );
1135
1136 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1137
1138 // Cause worktree to start the fake language server
1139 let _buffer = project
1140 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1141 .await
1142 .unwrap();
1143
1144 let mut events = cx.events(&project);
1145
1146 let fake_server = fake_servers.next().await.unwrap();
1147 assert_eq!(
1148 events.next().await.unwrap(),
1149 Event::LanguageServerAdded(
1150 LanguageServerId(0),
1151 fake_server.server.name().into(),
1152 Some(worktree_id)
1153 ),
1154 );
1155
1156 fake_server
1157 .start_progress(format!("{}/0", progress_token))
1158 .await;
1159 assert_eq!(
1160 events.next().await.unwrap(),
1161 Event::DiskBasedDiagnosticsStarted {
1162 language_server_id: LanguageServerId(0),
1163 }
1164 );
1165
1166 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1167 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1168 version: None,
1169 diagnostics: vec![lsp::Diagnostic {
1170 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1171 severity: Some(lsp::DiagnosticSeverity::ERROR),
1172 message: "undefined variable 'A'".to_string(),
1173 ..Default::default()
1174 }],
1175 });
1176 assert_eq!(
1177 events.next().await.unwrap(),
1178 Event::DiagnosticsUpdated {
1179 language_server_id: LanguageServerId(0),
1180 path: (worktree_id, Path::new("a.rs")).into()
1181 }
1182 );
1183
1184 fake_server.end_progress(format!("{}/0", progress_token));
1185 assert_eq!(
1186 events.next().await.unwrap(),
1187 Event::DiskBasedDiagnosticsFinished {
1188 language_server_id: LanguageServerId(0)
1189 }
1190 );
1191
1192 let buffer = project
1193 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1194 .await
1195 .unwrap();
1196
1197 buffer.update(cx, |buffer, _| {
1198 let snapshot = buffer.snapshot();
1199 let diagnostics = snapshot
1200 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1201 .collect::<Vec<_>>();
1202 assert_eq!(
1203 diagnostics,
1204 &[DiagnosticEntry {
1205 range: Point::new(0, 9)..Point::new(0, 10),
1206 diagnostic: Diagnostic {
1207 severity: lsp::DiagnosticSeverity::ERROR,
1208 message: "undefined variable 'A'".to_string(),
1209 group_id: 0,
1210 is_primary: true,
1211 ..Default::default()
1212 }
1213 }]
1214 )
1215 });
1216
1217 // Ensure publishing empty diagnostics twice only results in one update event.
1218 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1219 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1220 version: None,
1221 diagnostics: Default::default(),
1222 });
1223 assert_eq!(
1224 events.next().await.unwrap(),
1225 Event::DiagnosticsUpdated {
1226 language_server_id: LanguageServerId(0),
1227 path: (worktree_id, Path::new("a.rs")).into()
1228 }
1229 );
1230
1231 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1232 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1233 version: None,
1234 diagnostics: Default::default(),
1235 });
1236 cx.executor().run_until_parked();
1237 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1238}
1239
1240#[gpui::test]
1241async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1242 init_test(cx);
1243
1244 let progress_token = "the-progress-token";
1245
1246 let fs = FakeFs::new(cx.executor());
1247 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1248
1249 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1250
1251 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1252 language_registry.add(rust_lang());
1253 let mut fake_servers = language_registry.register_fake_lsp(
1254 "Rust",
1255 FakeLspAdapter {
1256 name: "the-language-server",
1257 disk_based_diagnostics_sources: vec!["disk".into()],
1258 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1259 ..Default::default()
1260 },
1261 );
1262
1263 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1264
1265 let buffer = project
1266 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1267 .await
1268 .unwrap();
1269
1270 // Simulate diagnostics starting to update.
1271 let fake_server = fake_servers.next().await.unwrap();
1272 fake_server.start_progress(progress_token).await;
1273
1274 // Restart the server before the diagnostics finish updating.
1275 project.update(cx, |project, cx| {
1276 project.restart_language_servers_for_buffers([buffer], cx);
1277 });
1278 let mut events = cx.events(&project);
1279
1280 // Simulate the newly started server sending more diagnostics.
1281 let fake_server = fake_servers.next().await.unwrap();
1282 assert_eq!(
1283 events.next().await.unwrap(),
1284 Event::LanguageServerAdded(
1285 LanguageServerId(1),
1286 fake_server.server.name().into(),
1287 Some(worktree_id)
1288 )
1289 );
1290 fake_server.start_progress(progress_token).await;
1291 assert_eq!(
1292 events.next().await.unwrap(),
1293 Event::DiskBasedDiagnosticsStarted {
1294 language_server_id: LanguageServerId(1)
1295 }
1296 );
1297 project.update(cx, |project, cx| {
1298 assert_eq!(
1299 project
1300 .language_servers_running_disk_based_diagnostics(cx)
1301 .collect::<Vec<_>>(),
1302 [LanguageServerId(1)]
1303 );
1304 });
1305
1306 // All diagnostics are considered done, despite the old server's diagnostic
1307 // task never completing.
1308 fake_server.end_progress(progress_token);
1309 assert_eq!(
1310 events.next().await.unwrap(),
1311 Event::DiskBasedDiagnosticsFinished {
1312 language_server_id: LanguageServerId(1)
1313 }
1314 );
1315 project.update(cx, |project, cx| {
1316 assert_eq!(
1317 project
1318 .language_servers_running_disk_based_diagnostics(cx)
1319 .collect::<Vec<_>>(),
1320 [] as [language::LanguageServerId; 0]
1321 );
1322 });
1323}
1324
1325#[gpui::test]
1326async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1327 init_test(cx);
1328
1329 let fs = FakeFs::new(cx.executor());
1330 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1331
1332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1333
1334 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1335 language_registry.add(rust_lang());
1336 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1337
1338 let buffer = project
1339 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1340 .await
1341 .unwrap();
1342
1343 // Publish diagnostics
1344 let fake_server = fake_servers.next().await.unwrap();
1345 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1346 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1347 version: None,
1348 diagnostics: vec![lsp::Diagnostic {
1349 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1350 severity: Some(lsp::DiagnosticSeverity::ERROR),
1351 message: "the message".to_string(),
1352 ..Default::default()
1353 }],
1354 });
1355
1356 cx.executor().run_until_parked();
1357 buffer.update(cx, |buffer, _| {
1358 assert_eq!(
1359 buffer
1360 .snapshot()
1361 .diagnostics_in_range::<_, usize>(0..1, false)
1362 .map(|entry| entry.diagnostic.message.clone())
1363 .collect::<Vec<_>>(),
1364 ["the message".to_string()]
1365 );
1366 });
1367 project.update(cx, |project, cx| {
1368 assert_eq!(
1369 project.diagnostic_summary(false, cx),
1370 DiagnosticSummary {
1371 error_count: 1,
1372 warning_count: 0,
1373 }
1374 );
1375 });
1376
1377 project.update(cx, |project, cx| {
1378 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1379 });
1380
1381 // The diagnostics are cleared.
1382 cx.executor().run_until_parked();
1383 buffer.update(cx, |buffer, _| {
1384 assert_eq!(
1385 buffer
1386 .snapshot()
1387 .diagnostics_in_range::<_, usize>(0..1, false)
1388 .map(|entry| entry.diagnostic.message.clone())
1389 .collect::<Vec<_>>(),
1390 Vec::<String>::new(),
1391 );
1392 });
1393 project.update(cx, |project, cx| {
1394 assert_eq!(
1395 project.diagnostic_summary(false, cx),
1396 DiagnosticSummary {
1397 error_count: 0,
1398 warning_count: 0,
1399 }
1400 );
1401 });
1402}
1403
1404#[gpui::test]
1405async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1406 init_test(cx);
1407
1408 let fs = FakeFs::new(cx.executor());
1409 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1410
1411 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1412 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1413
1414 language_registry.add(rust_lang());
1415 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1416
1417 let buffer = project
1418 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1419 .await
1420 .unwrap();
1421
1422 // Before restarting the server, report diagnostics with an unknown buffer version.
1423 let fake_server = fake_servers.next().await.unwrap();
1424 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1425 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1426 version: Some(10000),
1427 diagnostics: Vec::new(),
1428 });
1429 cx.executor().run_until_parked();
1430
1431 project.update(cx, |project, cx| {
1432 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1433 });
1434 let mut fake_server = fake_servers.next().await.unwrap();
1435 let notification = fake_server
1436 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1437 .await
1438 .text_document;
1439 assert_eq!(notification.version, 0);
1440}
1441
1442#[gpui::test]
1443async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1444 init_test(cx);
1445
1446 let progress_token = "the-progress-token";
1447
1448 let fs = FakeFs::new(cx.executor());
1449 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1450
1451 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1452
1453 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1454 language_registry.add(rust_lang());
1455 let mut fake_servers = language_registry.register_fake_lsp(
1456 "Rust",
1457 FakeLspAdapter {
1458 name: "the-language-server",
1459 disk_based_diagnostics_sources: vec!["disk".into()],
1460 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1461 ..Default::default()
1462 },
1463 );
1464
1465 let buffer = project
1466 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1467 .await
1468 .unwrap();
1469
1470 // Simulate diagnostics starting to update.
1471 let mut fake_server = fake_servers.next().await.unwrap();
1472 fake_server
1473 .start_progress_with(
1474 "another-token",
1475 lsp::WorkDoneProgressBegin {
1476 cancellable: Some(false),
1477 ..Default::default()
1478 },
1479 )
1480 .await;
1481 fake_server
1482 .start_progress_with(
1483 progress_token,
1484 lsp::WorkDoneProgressBegin {
1485 cancellable: Some(true),
1486 ..Default::default()
1487 },
1488 )
1489 .await;
1490 cx.executor().run_until_parked();
1491
1492 project.update(cx, |project, cx| {
1493 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1494 });
1495
1496 let cancel_notification = fake_server
1497 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1498 .await;
1499 assert_eq!(
1500 cancel_notification.token,
1501 NumberOrString::String(progress_token.into())
1502 );
1503}
1504
1505#[gpui::test]
1506async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1507 init_test(cx);
1508
1509 let fs = FakeFs::new(cx.executor());
1510 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1511 .await;
1512
1513 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1514 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1515
1516 let mut fake_rust_servers = language_registry.register_fake_lsp(
1517 "Rust",
1518 FakeLspAdapter {
1519 name: "rust-lsp",
1520 ..Default::default()
1521 },
1522 );
1523 let mut fake_js_servers = language_registry.register_fake_lsp(
1524 "JavaScript",
1525 FakeLspAdapter {
1526 name: "js-lsp",
1527 ..Default::default()
1528 },
1529 );
1530 language_registry.add(rust_lang());
1531 language_registry.add(js_lang());
1532
1533 let _rs_buffer = project
1534 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1535 .await
1536 .unwrap();
1537 let _js_buffer = project
1538 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1539 .await
1540 .unwrap();
1541
1542 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1543 assert_eq!(
1544 fake_rust_server_1
1545 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1546 .await
1547 .text_document
1548 .uri
1549 .as_str(),
1550 "file:///dir/a.rs"
1551 );
1552
1553 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1554 assert_eq!(
1555 fake_js_server
1556 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1557 .await
1558 .text_document
1559 .uri
1560 .as_str(),
1561 "file:///dir/b.js"
1562 );
1563
1564 // Disable Rust language server, ensuring only that server gets stopped.
1565 cx.update(|cx| {
1566 SettingsStore::update_global(cx, |settings, cx| {
1567 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1568 settings.languages.insert(
1569 "Rust".into(),
1570 LanguageSettingsContent {
1571 enable_language_server: Some(false),
1572 ..Default::default()
1573 },
1574 );
1575 });
1576 })
1577 });
1578 fake_rust_server_1
1579 .receive_notification::<lsp::notification::Exit>()
1580 .await;
1581
1582 // Enable Rust and disable JavaScript language servers, ensuring that the
1583 // former gets started again and that the latter stops.
1584 cx.update(|cx| {
1585 SettingsStore::update_global(cx, |settings, cx| {
1586 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1587 settings.languages.insert(
1588 LanguageName::new("Rust"),
1589 LanguageSettingsContent {
1590 enable_language_server: Some(true),
1591 ..Default::default()
1592 },
1593 );
1594 settings.languages.insert(
1595 LanguageName::new("JavaScript"),
1596 LanguageSettingsContent {
1597 enable_language_server: Some(false),
1598 ..Default::default()
1599 },
1600 );
1601 });
1602 })
1603 });
1604 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1605 assert_eq!(
1606 fake_rust_server_2
1607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1608 .await
1609 .text_document
1610 .uri
1611 .as_str(),
1612 "file:///dir/a.rs"
1613 );
1614 fake_js_server
1615 .receive_notification::<lsp::notification::Exit>()
1616 .await;
1617}
1618
1619#[gpui::test(iterations = 3)]
1620async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1621 init_test(cx);
1622
1623 let text = "
1624 fn a() { A }
1625 fn b() { BB }
1626 fn c() { CCC }
1627 "
1628 .unindent();
1629
1630 let fs = FakeFs::new(cx.executor());
1631 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1632
1633 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1634 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1635
1636 language_registry.add(rust_lang());
1637 let mut fake_servers = language_registry.register_fake_lsp(
1638 "Rust",
1639 FakeLspAdapter {
1640 disk_based_diagnostics_sources: vec!["disk".into()],
1641 ..Default::default()
1642 },
1643 );
1644
1645 let buffer = project
1646 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1647 .await
1648 .unwrap();
1649
1650 let mut fake_server = fake_servers.next().await.unwrap();
1651 let open_notification = fake_server
1652 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1653 .await;
1654
1655 // Edit the buffer, moving the content down
1656 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1657 let change_notification_1 = fake_server
1658 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1659 .await;
1660 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1661
1662 // Report some diagnostics for the initial version of the buffer
1663 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1664 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1665 version: Some(open_notification.text_document.version),
1666 diagnostics: vec![
1667 lsp::Diagnostic {
1668 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1669 severity: Some(DiagnosticSeverity::ERROR),
1670 message: "undefined variable 'A'".to_string(),
1671 source: Some("disk".to_string()),
1672 ..Default::default()
1673 },
1674 lsp::Diagnostic {
1675 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1676 severity: Some(DiagnosticSeverity::ERROR),
1677 message: "undefined variable 'BB'".to_string(),
1678 source: Some("disk".to_string()),
1679 ..Default::default()
1680 },
1681 lsp::Diagnostic {
1682 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1683 severity: Some(DiagnosticSeverity::ERROR),
1684 source: Some("disk".to_string()),
1685 message: "undefined variable 'CCC'".to_string(),
1686 ..Default::default()
1687 },
1688 ],
1689 });
1690
1691 // The diagnostics have moved down since they were created.
1692 cx.executor().run_until_parked();
1693 buffer.update(cx, |buffer, _| {
1694 assert_eq!(
1695 buffer
1696 .snapshot()
1697 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1698 .collect::<Vec<_>>(),
1699 &[
1700 DiagnosticEntry {
1701 range: Point::new(3, 9)..Point::new(3, 11),
1702 diagnostic: Diagnostic {
1703 source: Some("disk".into()),
1704 severity: DiagnosticSeverity::ERROR,
1705 message: "undefined variable 'BB'".to_string(),
1706 is_disk_based: true,
1707 group_id: 1,
1708 is_primary: true,
1709 ..Default::default()
1710 },
1711 },
1712 DiagnosticEntry {
1713 range: Point::new(4, 9)..Point::new(4, 12),
1714 diagnostic: Diagnostic {
1715 source: Some("disk".into()),
1716 severity: DiagnosticSeverity::ERROR,
1717 message: "undefined variable 'CCC'".to_string(),
1718 is_disk_based: true,
1719 group_id: 2,
1720 is_primary: true,
1721 ..Default::default()
1722 }
1723 }
1724 ]
1725 );
1726 assert_eq!(
1727 chunks_with_diagnostics(buffer, 0..buffer.len()),
1728 [
1729 ("\n\nfn a() { ".to_string(), None),
1730 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1731 (" }\nfn b() { ".to_string(), None),
1732 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1733 (" }\nfn c() { ".to_string(), None),
1734 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1735 (" }\n".to_string(), None),
1736 ]
1737 );
1738 assert_eq!(
1739 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1740 [
1741 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1742 (" }\nfn c() { ".to_string(), None),
1743 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1744 ]
1745 );
1746 });
1747
1748 // Ensure overlapping diagnostics are highlighted correctly.
1749 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1750 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1751 version: Some(open_notification.text_document.version),
1752 diagnostics: vec![
1753 lsp::Diagnostic {
1754 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1755 severity: Some(DiagnosticSeverity::ERROR),
1756 message: "undefined variable 'A'".to_string(),
1757 source: Some("disk".to_string()),
1758 ..Default::default()
1759 },
1760 lsp::Diagnostic {
1761 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1762 severity: Some(DiagnosticSeverity::WARNING),
1763 message: "unreachable statement".to_string(),
1764 source: Some("disk".to_string()),
1765 ..Default::default()
1766 },
1767 ],
1768 });
1769
1770 cx.executor().run_until_parked();
1771 buffer.update(cx, |buffer, _| {
1772 assert_eq!(
1773 buffer
1774 .snapshot()
1775 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1776 .collect::<Vec<_>>(),
1777 &[
1778 DiagnosticEntry {
1779 range: Point::new(2, 9)..Point::new(2, 12),
1780 diagnostic: Diagnostic {
1781 source: Some("disk".into()),
1782 severity: DiagnosticSeverity::WARNING,
1783 message: "unreachable statement".to_string(),
1784 is_disk_based: true,
1785 group_id: 4,
1786 is_primary: true,
1787 ..Default::default()
1788 }
1789 },
1790 DiagnosticEntry {
1791 range: Point::new(2, 9)..Point::new(2, 10),
1792 diagnostic: Diagnostic {
1793 source: Some("disk".into()),
1794 severity: DiagnosticSeverity::ERROR,
1795 message: "undefined variable 'A'".to_string(),
1796 is_disk_based: true,
1797 group_id: 3,
1798 is_primary: true,
1799 ..Default::default()
1800 },
1801 }
1802 ]
1803 );
1804 assert_eq!(
1805 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1806 [
1807 ("fn a() { ".to_string(), None),
1808 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1809 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1810 ("\n".to_string(), None),
1811 ]
1812 );
1813 assert_eq!(
1814 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1815 [
1816 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1817 ("\n".to_string(), None),
1818 ]
1819 );
1820 });
1821
1822 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1823 // changes since the last save.
1824 buffer.update(cx, |buffer, cx| {
1825 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1826 buffer.edit(
1827 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1828 None,
1829 cx,
1830 );
1831 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1832 });
1833 let change_notification_2 = fake_server
1834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1835 .await;
1836 assert!(
1837 change_notification_2.text_document.version > change_notification_1.text_document.version
1838 );
1839
1840 // Handle out-of-order diagnostics
1841 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1842 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1843 version: Some(change_notification_2.text_document.version),
1844 diagnostics: vec![
1845 lsp::Diagnostic {
1846 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1847 severity: Some(DiagnosticSeverity::ERROR),
1848 message: "undefined variable 'BB'".to_string(),
1849 source: Some("disk".to_string()),
1850 ..Default::default()
1851 },
1852 lsp::Diagnostic {
1853 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1854 severity: Some(DiagnosticSeverity::WARNING),
1855 message: "undefined variable 'A'".to_string(),
1856 source: Some("disk".to_string()),
1857 ..Default::default()
1858 },
1859 ],
1860 });
1861
1862 cx.executor().run_until_parked();
1863 buffer.update(cx, |buffer, _| {
1864 assert_eq!(
1865 buffer
1866 .snapshot()
1867 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1868 .collect::<Vec<_>>(),
1869 &[
1870 DiagnosticEntry {
1871 range: Point::new(2, 21)..Point::new(2, 22),
1872 diagnostic: Diagnostic {
1873 source: Some("disk".into()),
1874 severity: DiagnosticSeverity::WARNING,
1875 message: "undefined variable 'A'".to_string(),
1876 is_disk_based: true,
1877 group_id: 6,
1878 is_primary: true,
1879 ..Default::default()
1880 }
1881 },
1882 DiagnosticEntry {
1883 range: Point::new(3, 9)..Point::new(3, 14),
1884 diagnostic: Diagnostic {
1885 source: Some("disk".into()),
1886 severity: DiagnosticSeverity::ERROR,
1887 message: "undefined variable 'BB'".to_string(),
1888 is_disk_based: true,
1889 group_id: 5,
1890 is_primary: true,
1891 ..Default::default()
1892 },
1893 }
1894 ]
1895 );
1896 });
1897}
1898
1899#[gpui::test]
1900async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1901 init_test(cx);
1902
1903 let text = concat!(
1904 "let one = ;\n", //
1905 "let two = \n",
1906 "let three = 3;\n",
1907 );
1908
1909 let fs = FakeFs::new(cx.executor());
1910 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1911
1912 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1913 let buffer = project
1914 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1915 .await
1916 .unwrap();
1917
1918 project.update(cx, |project, cx| {
1919 project.lsp_store.update(cx, |lsp_store, cx| {
1920 lsp_store
1921 .update_buffer_diagnostics(
1922 &buffer,
1923 LanguageServerId(0),
1924 None,
1925 vec![
1926 DiagnosticEntry {
1927 range: Unclipped(PointUtf16::new(0, 10))
1928 ..Unclipped(PointUtf16::new(0, 10)),
1929 diagnostic: Diagnostic {
1930 severity: DiagnosticSeverity::ERROR,
1931 message: "syntax error 1".to_string(),
1932 ..Default::default()
1933 },
1934 },
1935 DiagnosticEntry {
1936 range: Unclipped(PointUtf16::new(1, 10))
1937 ..Unclipped(PointUtf16::new(1, 10)),
1938 diagnostic: Diagnostic {
1939 severity: DiagnosticSeverity::ERROR,
1940 message: "syntax error 2".to_string(),
1941 ..Default::default()
1942 },
1943 },
1944 ],
1945 cx,
1946 )
1947 .unwrap();
1948 })
1949 });
1950
1951 // An empty range is extended forward to include the following character.
1952 // At the end of a line, an empty range is extended backward to include
1953 // the preceding character.
1954 buffer.update(cx, |buffer, _| {
1955 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1956 assert_eq!(
1957 chunks
1958 .iter()
1959 .map(|(s, d)| (s.as_str(), *d))
1960 .collect::<Vec<_>>(),
1961 &[
1962 ("let one = ", None),
1963 (";", Some(DiagnosticSeverity::ERROR)),
1964 ("\nlet two =", None),
1965 (" ", Some(DiagnosticSeverity::ERROR)),
1966 ("\nlet three = 3;\n", None)
1967 ]
1968 );
1969 });
1970}
1971
1972#[gpui::test]
1973async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1974 init_test(cx);
1975
1976 let fs = FakeFs::new(cx.executor());
1977 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1978 .await;
1979
1980 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1981
1982 project.update(cx, |project, cx| {
1983 project
1984 .update_diagnostic_entries(
1985 LanguageServerId(0),
1986 Path::new("/dir/a.rs").to_owned(),
1987 None,
1988 vec![DiagnosticEntry {
1989 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1990 diagnostic: Diagnostic {
1991 severity: DiagnosticSeverity::ERROR,
1992 is_primary: true,
1993 message: "syntax error a1".to_string(),
1994 ..Default::default()
1995 },
1996 }],
1997 cx,
1998 )
1999 .unwrap();
2000 project
2001 .update_diagnostic_entries(
2002 LanguageServerId(1),
2003 Path::new("/dir/a.rs").to_owned(),
2004 None,
2005 vec![DiagnosticEntry {
2006 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2007 diagnostic: Diagnostic {
2008 severity: DiagnosticSeverity::ERROR,
2009 is_primary: true,
2010 message: "syntax error b1".to_string(),
2011 ..Default::default()
2012 },
2013 }],
2014 cx,
2015 )
2016 .unwrap();
2017
2018 assert_eq!(
2019 project.diagnostic_summary(false, cx),
2020 DiagnosticSummary {
2021 error_count: 2,
2022 warning_count: 0,
2023 }
2024 );
2025 });
2026}
2027
2028#[gpui::test]
2029async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2030 init_test(cx);
2031
2032 let text = "
2033 fn a() {
2034 f1();
2035 }
2036 fn b() {
2037 f2();
2038 }
2039 fn c() {
2040 f3();
2041 }
2042 "
2043 .unindent();
2044
2045 let fs = FakeFs::new(cx.executor());
2046 fs.insert_tree(
2047 "/dir",
2048 json!({
2049 "a.rs": text.clone(),
2050 }),
2051 )
2052 .await;
2053
2054 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2055 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2060
2061 let buffer = project
2062 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2063 .await
2064 .unwrap();
2065
2066 let mut fake_server = fake_servers.next().await.unwrap();
2067 let lsp_document_version = fake_server
2068 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2069 .await
2070 .text_document
2071 .version;
2072
2073 // Simulate editing the buffer after the language server computes some edits.
2074 buffer.update(cx, |buffer, cx| {
2075 buffer.edit(
2076 [(
2077 Point::new(0, 0)..Point::new(0, 0),
2078 "// above first function\n",
2079 )],
2080 None,
2081 cx,
2082 );
2083 buffer.edit(
2084 [(
2085 Point::new(2, 0)..Point::new(2, 0),
2086 " // inside first function\n",
2087 )],
2088 None,
2089 cx,
2090 );
2091 buffer.edit(
2092 [(
2093 Point::new(6, 4)..Point::new(6, 4),
2094 "// inside second function ",
2095 )],
2096 None,
2097 cx,
2098 );
2099
2100 assert_eq!(
2101 buffer.text(),
2102 "
2103 // above first function
2104 fn a() {
2105 // inside first function
2106 f1();
2107 }
2108 fn b() {
2109 // inside second function f2();
2110 }
2111 fn c() {
2112 f3();
2113 }
2114 "
2115 .unindent()
2116 );
2117 });
2118
2119 let edits = lsp_store
2120 .update(cx, |lsp_store, cx| {
2121 lsp_store.edits_from_lsp(
2122 &buffer,
2123 vec![
2124 // replace body of first function
2125 lsp::TextEdit {
2126 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2127 new_text: "
2128 fn a() {
2129 f10();
2130 }
2131 "
2132 .unindent(),
2133 },
2134 // edit inside second function
2135 lsp::TextEdit {
2136 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2137 new_text: "00".into(),
2138 },
2139 // edit inside third function via two distinct edits
2140 lsp::TextEdit {
2141 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2142 new_text: "4000".into(),
2143 },
2144 lsp::TextEdit {
2145 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2146 new_text: "".into(),
2147 },
2148 ],
2149 LanguageServerId(0),
2150 Some(lsp_document_version),
2151 cx,
2152 )
2153 })
2154 .await
2155 .unwrap();
2156
2157 buffer.update(cx, |buffer, cx| {
2158 for (range, new_text) in edits {
2159 buffer.edit([(range, new_text)], None, cx);
2160 }
2161 assert_eq!(
2162 buffer.text(),
2163 "
2164 // above first function
2165 fn a() {
2166 // inside first function
2167 f10();
2168 }
2169 fn b() {
2170 // inside second function f200();
2171 }
2172 fn c() {
2173 f4000();
2174 }
2175 "
2176 .unindent()
2177 );
2178 });
2179}
2180
2181#[gpui::test]
2182async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2183 init_test(cx);
2184
2185 let text = "
2186 use a::b;
2187 use a::c;
2188
2189 fn f() {
2190 b();
2191 c();
2192 }
2193 "
2194 .unindent();
2195
2196 let fs = FakeFs::new(cx.executor());
2197 fs.insert_tree(
2198 "/dir",
2199 json!({
2200 "a.rs": text.clone(),
2201 }),
2202 )
2203 .await;
2204
2205 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2206 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2207 let buffer = project
2208 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2209 .await
2210 .unwrap();
2211
2212 // Simulate the language server sending us a small edit in the form of a very large diff.
2213 // Rust-analyzer does this when performing a merge-imports code action.
2214 let edits = lsp_store
2215 .update(cx, |lsp_store, cx| {
2216 lsp_store.edits_from_lsp(
2217 &buffer,
2218 [
2219 // Replace the first use statement without editing the semicolon.
2220 lsp::TextEdit {
2221 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2222 new_text: "a::{b, c}".into(),
2223 },
2224 // Reinsert the remainder of the file between the semicolon and the final
2225 // newline of the file.
2226 lsp::TextEdit {
2227 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2228 new_text: "\n\n".into(),
2229 },
2230 lsp::TextEdit {
2231 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2232 new_text: "
2233 fn f() {
2234 b();
2235 c();
2236 }"
2237 .unindent(),
2238 },
2239 // Delete everything after the first newline of the file.
2240 lsp::TextEdit {
2241 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2242 new_text: "".into(),
2243 },
2244 ],
2245 LanguageServerId(0),
2246 None,
2247 cx,
2248 )
2249 })
2250 .await
2251 .unwrap();
2252
2253 buffer.update(cx, |buffer, cx| {
2254 let edits = edits
2255 .into_iter()
2256 .map(|(range, text)| {
2257 (
2258 range.start.to_point(buffer)..range.end.to_point(buffer),
2259 text,
2260 )
2261 })
2262 .collect::<Vec<_>>();
2263
2264 assert_eq!(
2265 edits,
2266 [
2267 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2268 (Point::new(1, 0)..Point::new(2, 0), "".into())
2269 ]
2270 );
2271
2272 for (range, new_text) in edits {
2273 buffer.edit([(range, new_text)], None, cx);
2274 }
2275 assert_eq!(
2276 buffer.text(),
2277 "
2278 use a::{b, c};
2279
2280 fn f() {
2281 b();
2282 c();
2283 }
2284 "
2285 .unindent()
2286 );
2287 });
2288}
2289
2290#[gpui::test]
2291async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2292 init_test(cx);
2293
2294 let text = "
2295 use a::b;
2296 use a::c;
2297
2298 fn f() {
2299 b();
2300 c();
2301 }
2302 "
2303 .unindent();
2304
2305 let fs = FakeFs::new(cx.executor());
2306 fs.insert_tree(
2307 "/dir",
2308 json!({
2309 "a.rs": text.clone(),
2310 }),
2311 )
2312 .await;
2313
2314 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2315 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2316 let buffer = project
2317 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2318 .await
2319 .unwrap();
2320
2321 // Simulate the language server sending us edits in a non-ordered fashion,
2322 // with ranges sometimes being inverted or pointing to invalid locations.
2323 let edits = lsp_store
2324 .update(cx, |lsp_store, cx| {
2325 lsp_store.edits_from_lsp(
2326 &buffer,
2327 [
2328 lsp::TextEdit {
2329 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2330 new_text: "\n\n".into(),
2331 },
2332 lsp::TextEdit {
2333 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2334 new_text: "a::{b, c}".into(),
2335 },
2336 lsp::TextEdit {
2337 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2338 new_text: "".into(),
2339 },
2340 lsp::TextEdit {
2341 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2342 new_text: "
2343 fn f() {
2344 b();
2345 c();
2346 }"
2347 .unindent(),
2348 },
2349 ],
2350 LanguageServerId(0),
2351 None,
2352 cx,
2353 )
2354 })
2355 .await
2356 .unwrap();
2357
2358 buffer.update(cx, |buffer, cx| {
2359 let edits = edits
2360 .into_iter()
2361 .map(|(range, text)| {
2362 (
2363 range.start.to_point(buffer)..range.end.to_point(buffer),
2364 text,
2365 )
2366 })
2367 .collect::<Vec<_>>();
2368
2369 assert_eq!(
2370 edits,
2371 [
2372 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2373 (Point::new(1, 0)..Point::new(2, 0), "".into())
2374 ]
2375 );
2376
2377 for (range, new_text) in edits {
2378 buffer.edit([(range, new_text)], None, cx);
2379 }
2380 assert_eq!(
2381 buffer.text(),
2382 "
2383 use a::{b, c};
2384
2385 fn f() {
2386 b();
2387 c();
2388 }
2389 "
2390 .unindent()
2391 );
2392 });
2393}
2394
2395fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2396 buffer: &Buffer,
2397 range: Range<T>,
2398) -> Vec<(String, Option<DiagnosticSeverity>)> {
2399 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2400 for chunk in buffer.snapshot().chunks(range, true) {
2401 if chunks.last().map_or(false, |prev_chunk| {
2402 prev_chunk.1 == chunk.diagnostic_severity
2403 }) {
2404 chunks.last_mut().unwrap().0.push_str(chunk.text);
2405 } else {
2406 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2407 }
2408 }
2409 chunks
2410}
2411
2412#[gpui::test(iterations = 10)]
2413async fn test_definition(cx: &mut gpui::TestAppContext) {
2414 init_test(cx);
2415
2416 let fs = FakeFs::new(cx.executor());
2417 fs.insert_tree(
2418 "/dir",
2419 json!({
2420 "a.rs": "const fn a() { A }",
2421 "b.rs": "const y: i32 = crate::a()",
2422 }),
2423 )
2424 .await;
2425
2426 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2427
2428 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2429 language_registry.add(rust_lang());
2430 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2431
2432 let buffer = project
2433 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2434 .await
2435 .unwrap();
2436
2437 let fake_server = fake_servers.next().await.unwrap();
2438 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2439 let params = params.text_document_position_params;
2440 assert_eq!(
2441 params.text_document.uri.to_file_path().unwrap(),
2442 Path::new("/dir/b.rs"),
2443 );
2444 assert_eq!(params.position, lsp::Position::new(0, 22));
2445
2446 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2447 lsp::Location::new(
2448 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2449 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2450 ),
2451 )))
2452 });
2453
2454 let mut definitions = project
2455 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2456 .await
2457 .unwrap();
2458
2459 // Assert no new language server started
2460 cx.executor().run_until_parked();
2461 assert!(fake_servers.try_next().is_err());
2462
2463 assert_eq!(definitions.len(), 1);
2464 let definition = definitions.pop().unwrap();
2465 cx.update(|cx| {
2466 let target_buffer = definition.target.buffer.read(cx);
2467 assert_eq!(
2468 target_buffer
2469 .file()
2470 .unwrap()
2471 .as_local()
2472 .unwrap()
2473 .abs_path(cx),
2474 Path::new("/dir/a.rs"),
2475 );
2476 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2477 assert_eq!(
2478 list_worktrees(&project, cx),
2479 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2480 );
2481
2482 drop(definition);
2483 });
2484 cx.update(|cx| {
2485 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2486 });
2487
2488 fn list_worktrees<'a>(
2489 project: &'a Model<Project>,
2490 cx: &'a AppContext,
2491 ) -> Vec<(&'a Path, bool)> {
2492 project
2493 .read(cx)
2494 .worktrees(cx)
2495 .map(|worktree| {
2496 let worktree = worktree.read(cx);
2497 (
2498 worktree.as_local().unwrap().abs_path().as_ref(),
2499 worktree.is_visible(),
2500 )
2501 })
2502 .collect::<Vec<_>>()
2503 }
2504}
2505
2506#[gpui::test]
2507async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2508 init_test(cx);
2509
2510 let fs = FakeFs::new(cx.executor());
2511 fs.insert_tree(
2512 "/dir",
2513 json!({
2514 "a.ts": "",
2515 }),
2516 )
2517 .await;
2518
2519 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2520
2521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2522 language_registry.add(typescript_lang());
2523 let mut fake_language_servers = language_registry.register_fake_lsp(
2524 "TypeScript",
2525 FakeLspAdapter {
2526 capabilities: lsp::ServerCapabilities {
2527 completion_provider: Some(lsp::CompletionOptions {
2528 trigger_characters: Some(vec![":".to_string()]),
2529 ..Default::default()
2530 }),
2531 ..Default::default()
2532 },
2533 ..Default::default()
2534 },
2535 );
2536
2537 let buffer = project
2538 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2539 .await
2540 .unwrap();
2541
2542 let fake_server = fake_language_servers.next().await.unwrap();
2543
2544 let text = "let a = b.fqn";
2545 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2546 let completions = project.update(cx, |project, cx| {
2547 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2548 });
2549
2550 fake_server
2551 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2552 Ok(Some(lsp::CompletionResponse::Array(vec![
2553 lsp::CompletionItem {
2554 label: "fullyQualifiedName?".into(),
2555 insert_text: Some("fullyQualifiedName".into()),
2556 ..Default::default()
2557 },
2558 ])))
2559 })
2560 .next()
2561 .await;
2562 let completions = completions.await.unwrap();
2563 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2564 assert_eq!(completions.len(), 1);
2565 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2566 assert_eq!(
2567 completions[0].old_range.to_offset(&snapshot),
2568 text.len() - 3..text.len()
2569 );
2570
2571 let text = "let a = \"atoms/cmp\"";
2572 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2573 let completions = project.update(cx, |project, cx| {
2574 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2575 });
2576
2577 fake_server
2578 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2579 Ok(Some(lsp::CompletionResponse::Array(vec![
2580 lsp::CompletionItem {
2581 label: "component".into(),
2582 ..Default::default()
2583 },
2584 ])))
2585 })
2586 .next()
2587 .await;
2588 let completions = completions.await.unwrap();
2589 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2590 assert_eq!(completions.len(), 1);
2591 assert_eq!(completions[0].new_text, "component");
2592 assert_eq!(
2593 completions[0].old_range.to_offset(&snapshot),
2594 text.len() - 4..text.len() - 1
2595 );
2596}
2597
2598#[gpui::test]
2599async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2600 init_test(cx);
2601
2602 let fs = FakeFs::new(cx.executor());
2603 fs.insert_tree(
2604 "/dir",
2605 json!({
2606 "a.ts": "",
2607 }),
2608 )
2609 .await;
2610
2611 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2612
2613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2614 language_registry.add(typescript_lang());
2615 let mut fake_language_servers = language_registry.register_fake_lsp(
2616 "TypeScript",
2617 FakeLspAdapter {
2618 capabilities: lsp::ServerCapabilities {
2619 completion_provider: Some(lsp::CompletionOptions {
2620 trigger_characters: Some(vec![":".to_string()]),
2621 ..Default::default()
2622 }),
2623 ..Default::default()
2624 },
2625 ..Default::default()
2626 },
2627 );
2628
2629 let buffer = project
2630 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2631 .await
2632 .unwrap();
2633
2634 let fake_server = fake_language_servers.next().await.unwrap();
2635
2636 let text = "let a = b.fqn";
2637 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2638 let completions = project.update(cx, |project, cx| {
2639 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2640 });
2641
2642 fake_server
2643 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2644 Ok(Some(lsp::CompletionResponse::Array(vec![
2645 lsp::CompletionItem {
2646 label: "fullyQualifiedName?".into(),
2647 insert_text: Some("fully\rQualified\r\nName".into()),
2648 ..Default::default()
2649 },
2650 ])))
2651 })
2652 .next()
2653 .await;
2654 let completions = completions.await.unwrap();
2655 assert_eq!(completions.len(), 1);
2656 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2657}
2658
2659#[gpui::test(iterations = 10)]
2660async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2661 init_test(cx);
2662
2663 let fs = FakeFs::new(cx.executor());
2664 fs.insert_tree(
2665 "/dir",
2666 json!({
2667 "a.ts": "a",
2668 }),
2669 )
2670 .await;
2671
2672 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2673
2674 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2675 language_registry.add(typescript_lang());
2676 let mut fake_language_servers = language_registry.register_fake_lsp(
2677 "TypeScript",
2678 FakeLspAdapter {
2679 capabilities: lsp::ServerCapabilities {
2680 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2681 lsp::CodeActionOptions {
2682 resolve_provider: Some(true),
2683 ..lsp::CodeActionOptions::default()
2684 },
2685 )),
2686 ..lsp::ServerCapabilities::default()
2687 },
2688 ..FakeLspAdapter::default()
2689 },
2690 );
2691
2692 let buffer = project
2693 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2694 .await
2695 .unwrap();
2696
2697 let fake_server = fake_language_servers.next().await.unwrap();
2698
2699 // Language server returns code actions that contain commands, and not edits.
2700 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2701 fake_server
2702 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2703 Ok(Some(vec![
2704 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2705 title: "The code action".into(),
2706 data: Some(serde_json::json!({
2707 "command": "_the/command",
2708 })),
2709 ..lsp::CodeAction::default()
2710 }),
2711 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2712 title: "two".into(),
2713 ..lsp::CodeAction::default()
2714 }),
2715 ]))
2716 })
2717 .next()
2718 .await;
2719
2720 let action = actions.await.unwrap()[0].clone();
2721 let apply = project.update(cx, |project, cx| {
2722 project.apply_code_action(buffer.clone(), action, true, cx)
2723 });
2724
2725 // Resolving the code action does not populate its edits. In absence of
2726 // edits, we must execute the given command.
2727 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2728 |mut action, _| async move {
2729 if action.data.is_some() {
2730 action.command = Some(lsp::Command {
2731 title: "The command".into(),
2732 command: "_the/command".into(),
2733 arguments: Some(vec![json!("the-argument")]),
2734 });
2735 }
2736 Ok(action)
2737 },
2738 );
2739
2740 // While executing the command, the language server sends the editor
2741 // a `workspaceEdit` request.
2742 fake_server
2743 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2744 let fake = fake_server.clone();
2745 move |params, _| {
2746 assert_eq!(params.command, "_the/command");
2747 let fake = fake.clone();
2748 async move {
2749 fake.server
2750 .request::<lsp::request::ApplyWorkspaceEdit>(
2751 lsp::ApplyWorkspaceEditParams {
2752 label: None,
2753 edit: lsp::WorkspaceEdit {
2754 changes: Some(
2755 [(
2756 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2757 vec![lsp::TextEdit {
2758 range: lsp::Range::new(
2759 lsp::Position::new(0, 0),
2760 lsp::Position::new(0, 0),
2761 ),
2762 new_text: "X".into(),
2763 }],
2764 )]
2765 .into_iter()
2766 .collect(),
2767 ),
2768 ..Default::default()
2769 },
2770 },
2771 )
2772 .await
2773 .unwrap();
2774 Ok(Some(json!(null)))
2775 }
2776 }
2777 })
2778 .next()
2779 .await;
2780
2781 // Applying the code action returns a project transaction containing the edits
2782 // sent by the language server in its `workspaceEdit` request.
2783 let transaction = apply.await.unwrap();
2784 assert!(transaction.0.contains_key(&buffer));
2785 buffer.update(cx, |buffer, cx| {
2786 assert_eq!(buffer.text(), "Xa");
2787 buffer.undo(cx);
2788 assert_eq!(buffer.text(), "a");
2789 });
2790}
2791
2792#[gpui::test(iterations = 10)]
2793async fn test_save_file(cx: &mut gpui::TestAppContext) {
2794 init_test(cx);
2795
2796 let fs = FakeFs::new(cx.executor());
2797 fs.insert_tree(
2798 "/dir",
2799 json!({
2800 "file1": "the old contents",
2801 }),
2802 )
2803 .await;
2804
2805 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2806 let buffer = project
2807 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2808 .await
2809 .unwrap();
2810 buffer.update(cx, |buffer, cx| {
2811 assert_eq!(buffer.text(), "the old contents");
2812 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2813 });
2814
2815 project
2816 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2817 .await
2818 .unwrap();
2819
2820 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2821 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2822}
2823
2824#[gpui::test(iterations = 30)]
2825async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2826 init_test(cx);
2827
2828 let fs = FakeFs::new(cx.executor().clone());
2829 fs.insert_tree(
2830 "/dir",
2831 json!({
2832 "file1": "the original contents",
2833 }),
2834 )
2835 .await;
2836
2837 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2838 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2839 let buffer = project
2840 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2841 .await
2842 .unwrap();
2843
2844 // Simulate buffer diffs being slow, so that they don't complete before
2845 // the next file change occurs.
2846 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2847
2848 // Change the buffer's file on disk, and then wait for the file change
2849 // to be detected by the worktree, so that the buffer starts reloading.
2850 fs.save(
2851 "/dir/file1".as_ref(),
2852 &"the first contents".into(),
2853 Default::default(),
2854 )
2855 .await
2856 .unwrap();
2857 worktree.next_event(cx).await;
2858
2859 // Change the buffer's file again. Depending on the random seed, the
2860 // previous file change may still be in progress.
2861 fs.save(
2862 "/dir/file1".as_ref(),
2863 &"the second contents".into(),
2864 Default::default(),
2865 )
2866 .await
2867 .unwrap();
2868 worktree.next_event(cx).await;
2869
2870 cx.executor().run_until_parked();
2871 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2872 buffer.read_with(cx, |buffer, _| {
2873 assert_eq!(buffer.text(), on_disk_text);
2874 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2875 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2876 });
2877}
2878
2879#[gpui::test(iterations = 30)]
2880async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2881 init_test(cx);
2882
2883 let fs = FakeFs::new(cx.executor().clone());
2884 fs.insert_tree(
2885 "/dir",
2886 json!({
2887 "file1": "the original contents",
2888 }),
2889 )
2890 .await;
2891
2892 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2893 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2894 let buffer = project
2895 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2896 .await
2897 .unwrap();
2898
2899 // Simulate buffer diffs being slow, so that they don't complete before
2900 // the next file change occurs.
2901 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2902
2903 // Change the buffer's file on disk, and then wait for the file change
2904 // to be detected by the worktree, so that the buffer starts reloading.
2905 fs.save(
2906 "/dir/file1".as_ref(),
2907 &"the first contents".into(),
2908 Default::default(),
2909 )
2910 .await
2911 .unwrap();
2912 worktree.next_event(cx).await;
2913
2914 cx.executor()
2915 .spawn(cx.executor().simulate_random_delay())
2916 .await;
2917
2918 // Perform a noop edit, causing the buffer's version to increase.
2919 buffer.update(cx, |buffer, cx| {
2920 buffer.edit([(0..0, " ")], None, cx);
2921 buffer.undo(cx);
2922 });
2923
2924 cx.executor().run_until_parked();
2925 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2926 buffer.read_with(cx, |buffer, _| {
2927 let buffer_text = buffer.text();
2928 if buffer_text == on_disk_text {
2929 assert!(
2930 !buffer.is_dirty() && !buffer.has_conflict(),
2931 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2932 );
2933 }
2934 // If the file change occurred while the buffer was processing the first
2935 // change, the buffer will be in a conflicting state.
2936 else {
2937 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2938 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2939 }
2940 });
2941}
2942
2943#[gpui::test]
2944async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2945 init_test(cx);
2946
2947 let fs = FakeFs::new(cx.executor());
2948 fs.insert_tree(
2949 "/dir",
2950 json!({
2951 "file1": "the old contents",
2952 }),
2953 )
2954 .await;
2955
2956 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2957 let buffer = project
2958 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2959 .await
2960 .unwrap();
2961 buffer.update(cx, |buffer, cx| {
2962 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2963 });
2964
2965 project
2966 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2967 .await
2968 .unwrap();
2969
2970 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2971 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2972}
2973
2974#[gpui::test]
2975async fn test_save_as(cx: &mut gpui::TestAppContext) {
2976 init_test(cx);
2977
2978 let fs = FakeFs::new(cx.executor());
2979 fs.insert_tree("/dir", json!({})).await;
2980
2981 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2982
2983 let languages = project.update(cx, |project, _| project.languages().clone());
2984 languages.add(rust_lang());
2985
2986 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2987 buffer.update(cx, |buffer, cx| {
2988 buffer.edit([(0..0, "abc")], None, cx);
2989 assert!(buffer.is_dirty());
2990 assert!(!buffer.has_conflict());
2991 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
2992 });
2993 project
2994 .update(cx, |project, cx| {
2995 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
2996 let path = ProjectPath {
2997 worktree_id,
2998 path: Arc::from(Path::new("file1.rs")),
2999 };
3000 project.save_buffer_as(buffer.clone(), path, cx)
3001 })
3002 .await
3003 .unwrap();
3004 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3005
3006 cx.executor().run_until_parked();
3007 buffer.update(cx, |buffer, cx| {
3008 assert_eq!(
3009 buffer.file().unwrap().full_path(cx),
3010 Path::new("dir/file1.rs")
3011 );
3012 assert!(!buffer.is_dirty());
3013 assert!(!buffer.has_conflict());
3014 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3015 });
3016
3017 let opened_buffer = project
3018 .update(cx, |project, cx| {
3019 project.open_local_buffer("/dir/file1.rs", cx)
3020 })
3021 .await
3022 .unwrap();
3023 assert_eq!(opened_buffer, buffer);
3024}
3025
3026#[gpui::test(retries = 5)]
3027async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3028 use worktree::WorktreeModelHandle as _;
3029
3030 init_test(cx);
3031 cx.executor().allow_parking();
3032
3033 let dir = temp_tree(json!({
3034 "a": {
3035 "file1": "",
3036 "file2": "",
3037 "file3": "",
3038 },
3039 "b": {
3040 "c": {
3041 "file4": "",
3042 "file5": "",
3043 }
3044 }
3045 }));
3046
3047 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3048
3049 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3050 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3051 async move { buffer.await.unwrap() }
3052 };
3053 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3054 project.update(cx, |project, cx| {
3055 let tree = project.worktrees(cx).next().unwrap();
3056 tree.read(cx)
3057 .entry_for_path(path)
3058 .unwrap_or_else(|| panic!("no entry for path {}", path))
3059 .id
3060 })
3061 };
3062
3063 let buffer2 = buffer_for_path("a/file2", cx).await;
3064 let buffer3 = buffer_for_path("a/file3", cx).await;
3065 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3066 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3067
3068 let file2_id = id_for_path("a/file2", cx);
3069 let file3_id = id_for_path("a/file3", cx);
3070 let file4_id = id_for_path("b/c/file4", cx);
3071
3072 // Create a remote copy of this worktree.
3073 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3074 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3075
3076 let updates = Arc::new(Mutex::new(Vec::new()));
3077 tree.update(cx, |tree, cx| {
3078 let updates = updates.clone();
3079 tree.observe_updates(0, cx, move |update| {
3080 updates.lock().push(update);
3081 async { true }
3082 });
3083 });
3084
3085 let remote =
3086 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3087
3088 cx.executor().run_until_parked();
3089
3090 cx.update(|cx| {
3091 assert!(!buffer2.read(cx).is_dirty());
3092 assert!(!buffer3.read(cx).is_dirty());
3093 assert!(!buffer4.read(cx).is_dirty());
3094 assert!(!buffer5.read(cx).is_dirty());
3095 });
3096
3097 // Rename and delete files and directories.
3098 tree.flush_fs_events(cx).await;
3099 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3100 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3101 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3102 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3103 tree.flush_fs_events(cx).await;
3104
3105 let expected_paths = vec![
3106 "a",
3107 "a/file1",
3108 "a/file2.new",
3109 "b",
3110 "d",
3111 "d/file3",
3112 "d/file4",
3113 ];
3114
3115 cx.update(|app| {
3116 assert_eq!(
3117 tree.read(app)
3118 .paths()
3119 .map(|p| p.to_str().unwrap())
3120 .collect::<Vec<_>>(),
3121 expected_paths
3122 );
3123 });
3124
3125 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3126 assert_eq!(id_for_path("d/file3", cx), file3_id);
3127 assert_eq!(id_for_path("d/file4", cx), file4_id);
3128
3129 cx.update(|cx| {
3130 assert_eq!(
3131 buffer2.read(cx).file().unwrap().path().as_ref(),
3132 Path::new("a/file2.new")
3133 );
3134 assert_eq!(
3135 buffer3.read(cx).file().unwrap().path().as_ref(),
3136 Path::new("d/file3")
3137 );
3138 assert_eq!(
3139 buffer4.read(cx).file().unwrap().path().as_ref(),
3140 Path::new("d/file4")
3141 );
3142 assert_eq!(
3143 buffer5.read(cx).file().unwrap().path().as_ref(),
3144 Path::new("b/c/file5")
3145 );
3146
3147 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3148 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3149 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3150 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3151 });
3152
3153 // Update the remote worktree. Check that it becomes consistent with the
3154 // local worktree.
3155 cx.executor().run_until_parked();
3156
3157 remote.update(cx, |remote, _| {
3158 for update in updates.lock().drain(..) {
3159 remote.as_remote_mut().unwrap().update_from_remote(update);
3160 }
3161 });
3162 cx.executor().run_until_parked();
3163 remote.update(cx, |remote, _| {
3164 assert_eq!(
3165 remote
3166 .paths()
3167 .map(|p| p.to_str().unwrap())
3168 .collect::<Vec<_>>(),
3169 expected_paths
3170 );
3171 });
3172}
3173
3174#[gpui::test(iterations = 10)]
3175async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3176 init_test(cx);
3177
3178 let fs = FakeFs::new(cx.executor());
3179 fs.insert_tree(
3180 "/dir",
3181 json!({
3182 "a": {
3183 "file1": "",
3184 }
3185 }),
3186 )
3187 .await;
3188
3189 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3190 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3191 let tree_id = tree.update(cx, |tree, _| tree.id());
3192
3193 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3194 project.update(cx, |project, cx| {
3195 let tree = project.worktrees(cx).next().unwrap();
3196 tree.read(cx)
3197 .entry_for_path(path)
3198 .unwrap_or_else(|| panic!("no entry for path {}", path))
3199 .id
3200 })
3201 };
3202
3203 let dir_id = id_for_path("a", cx);
3204 let file_id = id_for_path("a/file1", cx);
3205 let buffer = project
3206 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3207 .await
3208 .unwrap();
3209 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3210
3211 project
3212 .update(cx, |project, cx| {
3213 project.rename_entry(dir_id, Path::new("b"), cx)
3214 })
3215 .unwrap()
3216 .await
3217 .to_included()
3218 .unwrap();
3219 cx.executor().run_until_parked();
3220
3221 assert_eq!(id_for_path("b", cx), dir_id);
3222 assert_eq!(id_for_path("b/file1", cx), file_id);
3223 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3224}
3225
3226#[gpui::test]
3227async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3228 init_test(cx);
3229
3230 let fs = FakeFs::new(cx.executor());
3231 fs.insert_tree(
3232 "/dir",
3233 json!({
3234 "a.txt": "a-contents",
3235 "b.txt": "b-contents",
3236 }),
3237 )
3238 .await;
3239
3240 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3241
3242 // Spawn multiple tasks to open paths, repeating some paths.
3243 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3244 (
3245 p.open_local_buffer("/dir/a.txt", cx),
3246 p.open_local_buffer("/dir/b.txt", cx),
3247 p.open_local_buffer("/dir/a.txt", cx),
3248 )
3249 });
3250
3251 let buffer_a_1 = buffer_a_1.await.unwrap();
3252 let buffer_a_2 = buffer_a_2.await.unwrap();
3253 let buffer_b = buffer_b.await.unwrap();
3254 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3255 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3256
3257 // There is only one buffer per path.
3258 let buffer_a_id = buffer_a_1.entity_id();
3259 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3260
3261 // Open the same path again while it is still open.
3262 drop(buffer_a_1);
3263 let buffer_a_3 = project
3264 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3265 .await
3266 .unwrap();
3267
3268 // There's still only one buffer per path.
3269 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3270}
3271
3272#[gpui::test]
3273async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3274 init_test(cx);
3275
3276 let fs = FakeFs::new(cx.executor());
3277 fs.insert_tree(
3278 "/dir",
3279 json!({
3280 "file1": "abc",
3281 "file2": "def",
3282 "file3": "ghi",
3283 }),
3284 )
3285 .await;
3286
3287 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3288
3289 let buffer1 = project
3290 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3291 .await
3292 .unwrap();
3293 let events = Arc::new(Mutex::new(Vec::new()));
3294
3295 // initially, the buffer isn't dirty.
3296 buffer1.update(cx, |buffer, cx| {
3297 cx.subscribe(&buffer1, {
3298 let events = events.clone();
3299 move |_, _, event, _| match event {
3300 BufferEvent::Operation { .. } => {}
3301 _ => events.lock().push(event.clone()),
3302 }
3303 })
3304 .detach();
3305
3306 assert!(!buffer.is_dirty());
3307 assert!(events.lock().is_empty());
3308
3309 buffer.edit([(1..2, "")], None, cx);
3310 });
3311
3312 // after the first edit, the buffer is dirty, and emits a dirtied event.
3313 buffer1.update(cx, |buffer, cx| {
3314 assert!(buffer.text() == "ac");
3315 assert!(buffer.is_dirty());
3316 assert_eq!(
3317 *events.lock(),
3318 &[
3319 language::BufferEvent::Edited,
3320 language::BufferEvent::DirtyChanged
3321 ]
3322 );
3323 events.lock().clear();
3324 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3325 });
3326
3327 // after saving, the buffer is not dirty, and emits a saved event.
3328 buffer1.update(cx, |buffer, cx| {
3329 assert!(!buffer.is_dirty());
3330 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3331 events.lock().clear();
3332
3333 buffer.edit([(1..1, "B")], None, cx);
3334 buffer.edit([(2..2, "D")], None, cx);
3335 });
3336
3337 // after editing again, the buffer is dirty, and emits another dirty event.
3338 buffer1.update(cx, |buffer, cx| {
3339 assert!(buffer.text() == "aBDc");
3340 assert!(buffer.is_dirty());
3341 assert_eq!(
3342 *events.lock(),
3343 &[
3344 language::BufferEvent::Edited,
3345 language::BufferEvent::DirtyChanged,
3346 language::BufferEvent::Edited,
3347 ],
3348 );
3349 events.lock().clear();
3350
3351 // After restoring the buffer to its previously-saved state,
3352 // the buffer is not considered dirty anymore.
3353 buffer.edit([(1..3, "")], None, cx);
3354 assert!(buffer.text() == "ac");
3355 assert!(!buffer.is_dirty());
3356 });
3357
3358 assert_eq!(
3359 *events.lock(),
3360 &[
3361 language::BufferEvent::Edited,
3362 language::BufferEvent::DirtyChanged
3363 ]
3364 );
3365
3366 // When a file is deleted, the buffer is considered dirty.
3367 let events = Arc::new(Mutex::new(Vec::new()));
3368 let buffer2 = project
3369 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3370 .await
3371 .unwrap();
3372 buffer2.update(cx, |_, cx| {
3373 cx.subscribe(&buffer2, {
3374 let events = events.clone();
3375 move |_, _, event, _| events.lock().push(event.clone())
3376 })
3377 .detach();
3378 });
3379
3380 fs.remove_file("/dir/file2".as_ref(), Default::default())
3381 .await
3382 .unwrap();
3383 cx.executor().run_until_parked();
3384 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3385 assert_eq!(
3386 *events.lock(),
3387 &[
3388 language::BufferEvent::DirtyChanged,
3389 language::BufferEvent::FileHandleChanged
3390 ]
3391 );
3392
3393 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3394 let events = Arc::new(Mutex::new(Vec::new()));
3395 let buffer3 = project
3396 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3397 .await
3398 .unwrap();
3399 buffer3.update(cx, |_, cx| {
3400 cx.subscribe(&buffer3, {
3401 let events = events.clone();
3402 move |_, _, event, _| events.lock().push(event.clone())
3403 })
3404 .detach();
3405 });
3406
3407 buffer3.update(cx, |buffer, cx| {
3408 buffer.edit([(0..0, "x")], None, cx);
3409 });
3410 events.lock().clear();
3411 fs.remove_file("/dir/file3".as_ref(), Default::default())
3412 .await
3413 .unwrap();
3414 cx.executor().run_until_parked();
3415 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3416 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3417}
3418
3419#[gpui::test]
3420async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3421 init_test(cx);
3422
3423 let initial_contents = "aaa\nbbbbb\nc\n";
3424 let fs = FakeFs::new(cx.executor());
3425 fs.insert_tree(
3426 "/dir",
3427 json!({
3428 "the-file": initial_contents,
3429 }),
3430 )
3431 .await;
3432 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3433 let buffer = project
3434 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3435 .await
3436 .unwrap();
3437
3438 let anchors = (0..3)
3439 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3440 .collect::<Vec<_>>();
3441
3442 // Change the file on disk, adding two new lines of text, and removing
3443 // one line.
3444 buffer.update(cx, |buffer, _| {
3445 assert!(!buffer.is_dirty());
3446 assert!(!buffer.has_conflict());
3447 });
3448 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3449 fs.save(
3450 "/dir/the-file".as_ref(),
3451 &new_contents.into(),
3452 LineEnding::Unix,
3453 )
3454 .await
3455 .unwrap();
3456
3457 // Because the buffer was not modified, it is reloaded from disk. Its
3458 // contents are edited according to the diff between the old and new
3459 // file contents.
3460 cx.executor().run_until_parked();
3461 buffer.update(cx, |buffer, _| {
3462 assert_eq!(buffer.text(), new_contents);
3463 assert!(!buffer.is_dirty());
3464 assert!(!buffer.has_conflict());
3465
3466 let anchor_positions = anchors
3467 .iter()
3468 .map(|anchor| anchor.to_point(&*buffer))
3469 .collect::<Vec<_>>();
3470 assert_eq!(
3471 anchor_positions,
3472 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3473 );
3474 });
3475
3476 // Modify the buffer
3477 buffer.update(cx, |buffer, cx| {
3478 buffer.edit([(0..0, " ")], None, cx);
3479 assert!(buffer.is_dirty());
3480 assert!(!buffer.has_conflict());
3481 });
3482
3483 // Change the file on disk again, adding blank lines to the beginning.
3484 fs.save(
3485 "/dir/the-file".as_ref(),
3486 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3487 LineEnding::Unix,
3488 )
3489 .await
3490 .unwrap();
3491
3492 // Because the buffer is modified, it doesn't reload from disk, but is
3493 // marked as having a conflict.
3494 cx.executor().run_until_parked();
3495 buffer.update(cx, |buffer, _| {
3496 assert!(buffer.has_conflict());
3497 });
3498}
3499
3500#[gpui::test]
3501async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3502 init_test(cx);
3503
3504 let fs = FakeFs::new(cx.executor());
3505 fs.insert_tree(
3506 "/dir",
3507 json!({
3508 "file1": "a\nb\nc\n",
3509 "file2": "one\r\ntwo\r\nthree\r\n",
3510 }),
3511 )
3512 .await;
3513
3514 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3515 let buffer1 = project
3516 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3517 .await
3518 .unwrap();
3519 let buffer2 = project
3520 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3521 .await
3522 .unwrap();
3523
3524 buffer1.update(cx, |buffer, _| {
3525 assert_eq!(buffer.text(), "a\nb\nc\n");
3526 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3527 });
3528 buffer2.update(cx, |buffer, _| {
3529 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3530 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3531 });
3532
3533 // Change a file's line endings on disk from unix to windows. The buffer's
3534 // state updates correctly.
3535 fs.save(
3536 "/dir/file1".as_ref(),
3537 &"aaa\nb\nc\n".into(),
3538 LineEnding::Windows,
3539 )
3540 .await
3541 .unwrap();
3542 cx.executor().run_until_parked();
3543 buffer1.update(cx, |buffer, _| {
3544 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3545 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3546 });
3547
3548 // Save a file with windows line endings. The file is written correctly.
3549 buffer2.update(cx, |buffer, cx| {
3550 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3551 });
3552 project
3553 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3554 .await
3555 .unwrap();
3556 assert_eq!(
3557 fs.load("/dir/file2".as_ref()).await.unwrap(),
3558 "one\r\ntwo\r\nthree\r\nfour\r\n",
3559 );
3560}
3561
3562#[gpui::test]
3563async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3564 init_test(cx);
3565
3566 let fs = FakeFs::new(cx.executor());
3567 fs.insert_tree(
3568 "/the-dir",
3569 json!({
3570 "a.rs": "
3571 fn foo(mut v: Vec<usize>) {
3572 for x in &v {
3573 v.push(1);
3574 }
3575 }
3576 "
3577 .unindent(),
3578 }),
3579 )
3580 .await;
3581
3582 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3583 let buffer = project
3584 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3585 .await
3586 .unwrap();
3587
3588 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3589 let message = lsp::PublishDiagnosticsParams {
3590 uri: buffer_uri.clone(),
3591 diagnostics: vec![
3592 lsp::Diagnostic {
3593 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3594 severity: Some(DiagnosticSeverity::WARNING),
3595 message: "error 1".to_string(),
3596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3597 location: lsp::Location {
3598 uri: buffer_uri.clone(),
3599 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3600 },
3601 message: "error 1 hint 1".to_string(),
3602 }]),
3603 ..Default::default()
3604 },
3605 lsp::Diagnostic {
3606 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3607 severity: Some(DiagnosticSeverity::HINT),
3608 message: "error 1 hint 1".to_string(),
3609 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3610 location: lsp::Location {
3611 uri: buffer_uri.clone(),
3612 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3613 },
3614 message: "original diagnostic".to_string(),
3615 }]),
3616 ..Default::default()
3617 },
3618 lsp::Diagnostic {
3619 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3620 severity: Some(DiagnosticSeverity::ERROR),
3621 message: "error 2".to_string(),
3622 related_information: Some(vec![
3623 lsp::DiagnosticRelatedInformation {
3624 location: lsp::Location {
3625 uri: buffer_uri.clone(),
3626 range: lsp::Range::new(
3627 lsp::Position::new(1, 13),
3628 lsp::Position::new(1, 15),
3629 ),
3630 },
3631 message: "error 2 hint 1".to_string(),
3632 },
3633 lsp::DiagnosticRelatedInformation {
3634 location: lsp::Location {
3635 uri: buffer_uri.clone(),
3636 range: lsp::Range::new(
3637 lsp::Position::new(1, 13),
3638 lsp::Position::new(1, 15),
3639 ),
3640 },
3641 message: "error 2 hint 2".to_string(),
3642 },
3643 ]),
3644 ..Default::default()
3645 },
3646 lsp::Diagnostic {
3647 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3648 severity: Some(DiagnosticSeverity::HINT),
3649 message: "error 2 hint 1".to_string(),
3650 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3651 location: lsp::Location {
3652 uri: buffer_uri.clone(),
3653 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3654 },
3655 message: "original diagnostic".to_string(),
3656 }]),
3657 ..Default::default()
3658 },
3659 lsp::Diagnostic {
3660 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3661 severity: Some(DiagnosticSeverity::HINT),
3662 message: "error 2 hint 2".to_string(),
3663 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3664 location: lsp::Location {
3665 uri: buffer_uri,
3666 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3667 },
3668 message: "original diagnostic".to_string(),
3669 }]),
3670 ..Default::default()
3671 },
3672 ],
3673 version: None,
3674 };
3675
3676 project
3677 .update(cx, |p, cx| {
3678 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3679 })
3680 .unwrap();
3681 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3682
3683 assert_eq!(
3684 buffer
3685 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3686 .collect::<Vec<_>>(),
3687 &[
3688 DiagnosticEntry {
3689 range: Point::new(1, 8)..Point::new(1, 9),
3690 diagnostic: Diagnostic {
3691 severity: DiagnosticSeverity::WARNING,
3692 message: "error 1".to_string(),
3693 group_id: 1,
3694 is_primary: true,
3695 ..Default::default()
3696 }
3697 },
3698 DiagnosticEntry {
3699 range: Point::new(1, 8)..Point::new(1, 9),
3700 diagnostic: Diagnostic {
3701 severity: DiagnosticSeverity::HINT,
3702 message: "error 1 hint 1".to_string(),
3703 group_id: 1,
3704 is_primary: false,
3705 ..Default::default()
3706 }
3707 },
3708 DiagnosticEntry {
3709 range: Point::new(1, 13)..Point::new(1, 15),
3710 diagnostic: Diagnostic {
3711 severity: DiagnosticSeverity::HINT,
3712 message: "error 2 hint 1".to_string(),
3713 group_id: 0,
3714 is_primary: false,
3715 ..Default::default()
3716 }
3717 },
3718 DiagnosticEntry {
3719 range: Point::new(1, 13)..Point::new(1, 15),
3720 diagnostic: Diagnostic {
3721 severity: DiagnosticSeverity::HINT,
3722 message: "error 2 hint 2".to_string(),
3723 group_id: 0,
3724 is_primary: false,
3725 ..Default::default()
3726 }
3727 },
3728 DiagnosticEntry {
3729 range: Point::new(2, 8)..Point::new(2, 17),
3730 diagnostic: Diagnostic {
3731 severity: DiagnosticSeverity::ERROR,
3732 message: "error 2".to_string(),
3733 group_id: 0,
3734 is_primary: true,
3735 ..Default::default()
3736 }
3737 }
3738 ]
3739 );
3740
3741 assert_eq!(
3742 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3743 &[
3744 DiagnosticEntry {
3745 range: Point::new(1, 13)..Point::new(1, 15),
3746 diagnostic: Diagnostic {
3747 severity: DiagnosticSeverity::HINT,
3748 message: "error 2 hint 1".to_string(),
3749 group_id: 0,
3750 is_primary: false,
3751 ..Default::default()
3752 }
3753 },
3754 DiagnosticEntry {
3755 range: Point::new(1, 13)..Point::new(1, 15),
3756 diagnostic: Diagnostic {
3757 severity: DiagnosticSeverity::HINT,
3758 message: "error 2 hint 2".to_string(),
3759 group_id: 0,
3760 is_primary: false,
3761 ..Default::default()
3762 }
3763 },
3764 DiagnosticEntry {
3765 range: Point::new(2, 8)..Point::new(2, 17),
3766 diagnostic: Diagnostic {
3767 severity: DiagnosticSeverity::ERROR,
3768 message: "error 2".to_string(),
3769 group_id: 0,
3770 is_primary: true,
3771 ..Default::default()
3772 }
3773 }
3774 ]
3775 );
3776
3777 assert_eq!(
3778 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3779 &[
3780 DiagnosticEntry {
3781 range: Point::new(1, 8)..Point::new(1, 9),
3782 diagnostic: Diagnostic {
3783 severity: DiagnosticSeverity::WARNING,
3784 message: "error 1".to_string(),
3785 group_id: 1,
3786 is_primary: true,
3787 ..Default::default()
3788 }
3789 },
3790 DiagnosticEntry {
3791 range: Point::new(1, 8)..Point::new(1, 9),
3792 diagnostic: Diagnostic {
3793 severity: DiagnosticSeverity::HINT,
3794 message: "error 1 hint 1".to_string(),
3795 group_id: 1,
3796 is_primary: false,
3797 ..Default::default()
3798 }
3799 },
3800 ]
3801 );
3802}
3803
3804#[gpui::test]
3805async fn test_rename(cx: &mut gpui::TestAppContext) {
3806 // hi
3807 init_test(cx);
3808
3809 let fs = FakeFs::new(cx.executor());
3810 fs.insert_tree(
3811 "/dir",
3812 json!({
3813 "one.rs": "const ONE: usize = 1;",
3814 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3815 }),
3816 )
3817 .await;
3818
3819 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3820
3821 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3822 language_registry.add(rust_lang());
3823 let mut fake_servers = language_registry.register_fake_lsp(
3824 "Rust",
3825 FakeLspAdapter {
3826 capabilities: lsp::ServerCapabilities {
3827 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3828 prepare_provider: Some(true),
3829 work_done_progress_options: Default::default(),
3830 })),
3831 ..Default::default()
3832 },
3833 ..Default::default()
3834 },
3835 );
3836
3837 let buffer = project
3838 .update(cx, |project, cx| {
3839 project.open_local_buffer("/dir/one.rs", cx)
3840 })
3841 .await
3842 .unwrap();
3843
3844 let fake_server = fake_servers.next().await.unwrap();
3845
3846 let response = project.update(cx, |project, cx| {
3847 project.prepare_rename(buffer.clone(), 7, cx)
3848 });
3849 fake_server
3850 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3851 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3852 assert_eq!(params.position, lsp::Position::new(0, 7));
3853 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3854 lsp::Position::new(0, 6),
3855 lsp::Position::new(0, 9),
3856 ))))
3857 })
3858 .next()
3859 .await
3860 .unwrap();
3861 let range = response.await.unwrap().unwrap();
3862 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3863 assert_eq!(range, 6..9);
3864
3865 let response = project.update(cx, |project, cx| {
3866 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3867 });
3868 fake_server
3869 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3870 assert_eq!(
3871 params.text_document_position.text_document.uri.as_str(),
3872 "file:///dir/one.rs"
3873 );
3874 assert_eq!(
3875 params.text_document_position.position,
3876 lsp::Position::new(0, 7)
3877 );
3878 assert_eq!(params.new_name, "THREE");
3879 Ok(Some(lsp::WorkspaceEdit {
3880 changes: Some(
3881 [
3882 (
3883 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3884 vec![lsp::TextEdit::new(
3885 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3886 "THREE".to_string(),
3887 )],
3888 ),
3889 (
3890 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3891 vec![
3892 lsp::TextEdit::new(
3893 lsp::Range::new(
3894 lsp::Position::new(0, 24),
3895 lsp::Position::new(0, 27),
3896 ),
3897 "THREE".to_string(),
3898 ),
3899 lsp::TextEdit::new(
3900 lsp::Range::new(
3901 lsp::Position::new(0, 35),
3902 lsp::Position::new(0, 38),
3903 ),
3904 "THREE".to_string(),
3905 ),
3906 ],
3907 ),
3908 ]
3909 .into_iter()
3910 .collect(),
3911 ),
3912 ..Default::default()
3913 }))
3914 })
3915 .next()
3916 .await
3917 .unwrap();
3918 let mut transaction = response.await.unwrap().0;
3919 assert_eq!(transaction.len(), 2);
3920 assert_eq!(
3921 transaction
3922 .remove_entry(&buffer)
3923 .unwrap()
3924 .0
3925 .update(cx, |buffer, _| buffer.text()),
3926 "const THREE: usize = 1;"
3927 );
3928 assert_eq!(
3929 transaction
3930 .into_keys()
3931 .next()
3932 .unwrap()
3933 .update(cx, |buffer, _| buffer.text()),
3934 "const TWO: usize = one::THREE + one::THREE;"
3935 );
3936}
3937
3938#[gpui::test]
3939async fn test_search(cx: &mut gpui::TestAppContext) {
3940 init_test(cx);
3941
3942 let fs = FakeFs::new(cx.executor());
3943 fs.insert_tree(
3944 "/dir",
3945 json!({
3946 "one.rs": "const ONE: usize = 1;",
3947 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3948 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3949 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3950 }),
3951 )
3952 .await;
3953 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3954 assert_eq!(
3955 search(
3956 &project,
3957 SearchQuery::text(
3958 "TWO",
3959 false,
3960 true,
3961 false,
3962 Default::default(),
3963 Default::default(),
3964 None
3965 )
3966 .unwrap(),
3967 cx
3968 )
3969 .await
3970 .unwrap(),
3971 HashMap::from_iter([
3972 ("dir/two.rs".to_string(), vec![6..9]),
3973 ("dir/three.rs".to_string(), vec![37..40])
3974 ])
3975 );
3976
3977 let buffer_4 = project
3978 .update(cx, |project, cx| {
3979 project.open_local_buffer("/dir/four.rs", cx)
3980 })
3981 .await
3982 .unwrap();
3983 buffer_4.update(cx, |buffer, cx| {
3984 let text = "two::TWO";
3985 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3986 });
3987
3988 assert_eq!(
3989 search(
3990 &project,
3991 SearchQuery::text(
3992 "TWO",
3993 false,
3994 true,
3995 false,
3996 Default::default(),
3997 Default::default(),
3998 None,
3999 )
4000 .unwrap(),
4001 cx
4002 )
4003 .await
4004 .unwrap(),
4005 HashMap::from_iter([
4006 ("dir/two.rs".to_string(), vec![6..9]),
4007 ("dir/three.rs".to_string(), vec![37..40]),
4008 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4009 ])
4010 );
4011}
4012
4013#[gpui::test]
4014async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4015 init_test(cx);
4016
4017 let search_query = "file";
4018
4019 let fs = FakeFs::new(cx.executor());
4020 fs.insert_tree(
4021 "/dir",
4022 json!({
4023 "one.rs": r#"// Rust file one"#,
4024 "one.ts": r#"// TypeScript file one"#,
4025 "two.rs": r#"// Rust file two"#,
4026 "two.ts": r#"// TypeScript file two"#,
4027 }),
4028 )
4029 .await;
4030 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4031
4032 assert!(
4033 search(
4034 &project,
4035 SearchQuery::text(
4036 search_query,
4037 false,
4038 true,
4039 false,
4040 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4041 Default::default(),
4042 None
4043 )
4044 .unwrap(),
4045 cx
4046 )
4047 .await
4048 .unwrap()
4049 .is_empty(),
4050 "If no inclusions match, no files should be returned"
4051 );
4052
4053 assert_eq!(
4054 search(
4055 &project,
4056 SearchQuery::text(
4057 search_query,
4058 false,
4059 true,
4060 false,
4061 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4062 Default::default(),
4063 None
4064 )
4065 .unwrap(),
4066 cx
4067 )
4068 .await
4069 .unwrap(),
4070 HashMap::from_iter([
4071 ("dir/one.rs".to_string(), vec![8..12]),
4072 ("dir/two.rs".to_string(), vec![8..12]),
4073 ]),
4074 "Rust only search should give only Rust files"
4075 );
4076
4077 assert_eq!(
4078 search(
4079 &project,
4080 SearchQuery::text(
4081 search_query,
4082 false,
4083 true,
4084 false,
4085
4086 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4087
4088 Default::default(),
4089 None,
4090 ).unwrap(),
4091 cx
4092 )
4093 .await
4094 .unwrap(),
4095 HashMap::from_iter([
4096 ("dir/one.ts".to_string(), vec![14..18]),
4097 ("dir/two.ts".to_string(), vec![14..18]),
4098 ]),
4099 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4100 );
4101
4102 assert_eq!(
4103 search(
4104 &project,
4105 SearchQuery::text(
4106 search_query,
4107 false,
4108 true,
4109 false,
4110
4111 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4112
4113 Default::default(),
4114 None,
4115 ).unwrap(),
4116 cx
4117 )
4118 .await
4119 .unwrap(),
4120 HashMap::from_iter([
4121 ("dir/two.ts".to_string(), vec![14..18]),
4122 ("dir/one.rs".to_string(), vec![8..12]),
4123 ("dir/one.ts".to_string(), vec![14..18]),
4124 ("dir/two.rs".to_string(), vec![8..12]),
4125 ]),
4126 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4127 );
4128}
4129
4130#[gpui::test]
4131async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4132 init_test(cx);
4133
4134 let search_query = "file";
4135
4136 let fs = FakeFs::new(cx.executor());
4137 fs.insert_tree(
4138 "/dir",
4139 json!({
4140 "one.rs": r#"// Rust file one"#,
4141 "one.ts": r#"// TypeScript file one"#,
4142 "two.rs": r#"// Rust file two"#,
4143 "two.ts": r#"// TypeScript file two"#,
4144 }),
4145 )
4146 .await;
4147 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4148
4149 assert_eq!(
4150 search(
4151 &project,
4152 SearchQuery::text(
4153 search_query,
4154 false,
4155 true,
4156 false,
4157 Default::default(),
4158 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4159 None,
4160 )
4161 .unwrap(),
4162 cx
4163 )
4164 .await
4165 .unwrap(),
4166 HashMap::from_iter([
4167 ("dir/one.rs".to_string(), vec![8..12]),
4168 ("dir/one.ts".to_string(), vec![14..18]),
4169 ("dir/two.rs".to_string(), vec![8..12]),
4170 ("dir/two.ts".to_string(), vec![14..18]),
4171 ]),
4172 "If no exclusions match, all files should be returned"
4173 );
4174
4175 assert_eq!(
4176 search(
4177 &project,
4178 SearchQuery::text(
4179 search_query,
4180 false,
4181 true,
4182 false,
4183 Default::default(),
4184 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4185 None,
4186 )
4187 .unwrap(),
4188 cx
4189 )
4190 .await
4191 .unwrap(),
4192 HashMap::from_iter([
4193 ("dir/one.ts".to_string(), vec![14..18]),
4194 ("dir/two.ts".to_string(), vec![14..18]),
4195 ]),
4196 "Rust exclusion search should give only TypeScript files"
4197 );
4198
4199 assert_eq!(
4200 search(
4201 &project,
4202 SearchQuery::text(
4203 search_query,
4204 false,
4205 true,
4206 false,
4207 Default::default(),
4208 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4209 None,
4210 ).unwrap(),
4211 cx
4212 )
4213 .await
4214 .unwrap(),
4215 HashMap::from_iter([
4216 ("dir/one.rs".to_string(), vec![8..12]),
4217 ("dir/two.rs".to_string(), vec![8..12]),
4218 ]),
4219 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4220 );
4221
4222 assert!(
4223 search(
4224 &project,
4225 SearchQuery::text(
4226 search_query,
4227 false,
4228 true,
4229 false,
4230 Default::default(),
4231
4232 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4233 None,
4234
4235 ).unwrap(),
4236 cx
4237 )
4238 .await
4239 .unwrap().is_empty(),
4240 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4241 );
4242}
4243
4244#[gpui::test]
4245async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4246 init_test(cx);
4247
4248 let search_query = "file";
4249
4250 let fs = FakeFs::new(cx.executor());
4251 fs.insert_tree(
4252 "/dir",
4253 json!({
4254 "one.rs": r#"// Rust file one"#,
4255 "one.ts": r#"// TypeScript file one"#,
4256 "two.rs": r#"// Rust file two"#,
4257 "two.ts": r#"// TypeScript file two"#,
4258 }),
4259 )
4260 .await;
4261 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4262
4263 assert!(
4264 search(
4265 &project,
4266 SearchQuery::text(
4267 search_query,
4268 false,
4269 true,
4270 false,
4271 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4272 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4273 None,
4274 )
4275 .unwrap(),
4276 cx
4277 )
4278 .await
4279 .unwrap()
4280 .is_empty(),
4281 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4282 );
4283
4284 assert!(
4285 search(
4286 &project,
4287 SearchQuery::text(
4288 search_query,
4289 false,
4290 true,
4291 false,
4292 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4293 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4294 None,
4295 ).unwrap(),
4296 cx
4297 )
4298 .await
4299 .unwrap()
4300 .is_empty(),
4301 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4302 );
4303
4304 assert!(
4305 search(
4306 &project,
4307 SearchQuery::text(
4308 search_query,
4309 false,
4310 true,
4311 false,
4312 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4313 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4314 None,
4315 )
4316 .unwrap(),
4317 cx
4318 )
4319 .await
4320 .unwrap()
4321 .is_empty(),
4322 "Non-matching inclusions and exclusions should not change that."
4323 );
4324
4325 assert_eq!(
4326 search(
4327 &project,
4328 SearchQuery::text(
4329 search_query,
4330 false,
4331 true,
4332 false,
4333 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4334 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4335 None,
4336 )
4337 .unwrap(),
4338 cx
4339 )
4340 .await
4341 .unwrap(),
4342 HashMap::from_iter([
4343 ("dir/one.ts".to_string(), vec![14..18]),
4344 ("dir/two.ts".to_string(), vec![14..18]),
4345 ]),
4346 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4347 );
4348}
4349
4350#[gpui::test]
4351async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4352 init_test(cx);
4353
4354 let fs = FakeFs::new(cx.executor());
4355 fs.insert_tree(
4356 "/worktree-a",
4357 json!({
4358 "haystack.rs": r#"// NEEDLE"#,
4359 "haystack.ts": r#"// NEEDLE"#,
4360 }),
4361 )
4362 .await;
4363 fs.insert_tree(
4364 "/worktree-b",
4365 json!({
4366 "haystack.rs": r#"// NEEDLE"#,
4367 "haystack.ts": r#"// NEEDLE"#,
4368 }),
4369 )
4370 .await;
4371
4372 let project = Project::test(
4373 fs.clone(),
4374 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4375 cx,
4376 )
4377 .await;
4378
4379 assert_eq!(
4380 search(
4381 &project,
4382 SearchQuery::text(
4383 "NEEDLE",
4384 false,
4385 true,
4386 false,
4387 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4388 Default::default(),
4389 None,
4390 )
4391 .unwrap(),
4392 cx
4393 )
4394 .await
4395 .unwrap(),
4396 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4397 "should only return results from included worktree"
4398 );
4399 assert_eq!(
4400 search(
4401 &project,
4402 SearchQuery::text(
4403 "NEEDLE",
4404 false,
4405 true,
4406 false,
4407 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4408 Default::default(),
4409 None,
4410 )
4411 .unwrap(),
4412 cx
4413 )
4414 .await
4415 .unwrap(),
4416 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4417 "should only return results from included worktree"
4418 );
4419
4420 assert_eq!(
4421 search(
4422 &project,
4423 SearchQuery::text(
4424 "NEEDLE",
4425 false,
4426 true,
4427 false,
4428 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4429 Default::default(),
4430 None,
4431 )
4432 .unwrap(),
4433 cx
4434 )
4435 .await
4436 .unwrap(),
4437 HashMap::from_iter([
4438 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4439 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4440 ]),
4441 "should return results from both worktrees"
4442 );
4443}
4444
4445#[gpui::test]
4446async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4447 init_test(cx);
4448
4449 let fs = FakeFs::new(cx.background_executor.clone());
4450 fs.insert_tree(
4451 "/dir",
4452 json!({
4453 ".git": {},
4454 ".gitignore": "**/target\n/node_modules\n",
4455 "target": {
4456 "index.txt": "index_key:index_value"
4457 },
4458 "node_modules": {
4459 "eslint": {
4460 "index.ts": "const eslint_key = 'eslint value'",
4461 "package.json": r#"{ "some_key": "some value" }"#,
4462 },
4463 "prettier": {
4464 "index.ts": "const prettier_key = 'prettier value'",
4465 "package.json": r#"{ "other_key": "other value" }"#,
4466 },
4467 },
4468 "package.json": r#"{ "main_key": "main value" }"#,
4469 }),
4470 )
4471 .await;
4472 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4473
4474 let query = "key";
4475 assert_eq!(
4476 search(
4477 &project,
4478 SearchQuery::text(
4479 query,
4480 false,
4481 false,
4482 false,
4483 Default::default(),
4484 Default::default(),
4485 None,
4486 )
4487 .unwrap(),
4488 cx
4489 )
4490 .await
4491 .unwrap(),
4492 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4493 "Only one non-ignored file should have the query"
4494 );
4495
4496 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4497 assert_eq!(
4498 search(
4499 &project,
4500 SearchQuery::text(
4501 query,
4502 false,
4503 false,
4504 true,
4505 Default::default(),
4506 Default::default(),
4507 None,
4508 )
4509 .unwrap(),
4510 cx
4511 )
4512 .await
4513 .unwrap(),
4514 HashMap::from_iter([
4515 ("dir/package.json".to_string(), vec![8..11]),
4516 ("dir/target/index.txt".to_string(), vec![6..9]),
4517 (
4518 "dir/node_modules/prettier/package.json".to_string(),
4519 vec![9..12]
4520 ),
4521 (
4522 "dir/node_modules/prettier/index.ts".to_string(),
4523 vec![15..18]
4524 ),
4525 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4526 (
4527 "dir/node_modules/eslint/package.json".to_string(),
4528 vec![8..11]
4529 ),
4530 ]),
4531 "Unrestricted search with ignored directories should find every file with the query"
4532 );
4533
4534 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4535 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4536 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4537 assert_eq!(
4538 search(
4539 &project,
4540 SearchQuery::text(
4541 query,
4542 false,
4543 false,
4544 true,
4545 files_to_include,
4546 files_to_exclude,
4547 None,
4548 )
4549 .unwrap(),
4550 cx
4551 )
4552 .await
4553 .unwrap(),
4554 HashMap::from_iter([(
4555 "dir/node_modules/prettier/package.json".to_string(),
4556 vec![9..12]
4557 )]),
4558 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4559 );
4560}
4561
4562#[gpui::test]
4563async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4564 init_test(cx);
4565
4566 let fs = FakeFs::new(cx.background_executor.clone());
4567 fs.insert_tree(
4568 "/dir",
4569 json!({
4570 ".git": {},
4571 ".gitignore": "**/target\n/node_modules\n",
4572 "aaa.txt": "key:value",
4573 "bbb": {
4574 "index.txt": "index_key:index_value"
4575 },
4576 "node_modules": {
4577 "10 eleven": "key",
4578 "1 two": "key"
4579 },
4580 }),
4581 )
4582 .await;
4583 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4584
4585 let mut search = project.update(cx, |project, cx| {
4586 project.search(
4587 SearchQuery::text(
4588 "key",
4589 false,
4590 false,
4591 true,
4592 Default::default(),
4593 Default::default(),
4594 None,
4595 )
4596 .unwrap(),
4597 cx,
4598 )
4599 });
4600
4601 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4602 match search_result.unwrap() {
4603 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4604 buffer.file().unwrap().path().to_string_lossy().to_string()
4605 }),
4606 _ => panic!("Expected buffer"),
4607 }
4608 }
4609
4610 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4611 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4612 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4613 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4614 assert!(search.next().await.is_none())
4615}
4616
4617#[gpui::test]
4618async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4619 init_test(cx);
4620
4621 let fs = FakeFs::new(cx.executor().clone());
4622 fs.insert_tree(
4623 "/one/two",
4624 json!({
4625 "three": {
4626 "a.txt": "",
4627 "four": {}
4628 },
4629 "c.rs": ""
4630 }),
4631 )
4632 .await;
4633
4634 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4635 project
4636 .update(cx, |project, cx| {
4637 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4638 project.create_entry((id, "b.."), true, cx)
4639 })
4640 .await
4641 .unwrap()
4642 .to_included()
4643 .unwrap();
4644
4645 // Can't create paths outside the project
4646 let result = project
4647 .update(cx, |project, cx| {
4648 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4649 project.create_entry((id, "../../boop"), true, cx)
4650 })
4651 .await;
4652 assert!(result.is_err());
4653
4654 // Can't create paths with '..'
4655 let result = project
4656 .update(cx, |project, cx| {
4657 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4658 project.create_entry((id, "four/../beep"), true, cx)
4659 })
4660 .await;
4661 assert!(result.is_err());
4662
4663 assert_eq!(
4664 fs.paths(true),
4665 vec![
4666 PathBuf::from("/"),
4667 PathBuf::from("/one"),
4668 PathBuf::from("/one/two"),
4669 PathBuf::from("/one/two/c.rs"),
4670 PathBuf::from("/one/two/three"),
4671 PathBuf::from("/one/two/three/a.txt"),
4672 PathBuf::from("/one/two/three/b.."),
4673 PathBuf::from("/one/two/three/four"),
4674 ]
4675 );
4676
4677 // And we cannot open buffers with '..'
4678 let result = project
4679 .update(cx, |project, cx| {
4680 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4681 project.open_buffer((id, "../c.rs"), cx)
4682 })
4683 .await;
4684 assert!(result.is_err())
4685}
4686
4687#[gpui::test]
4688async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4689 init_test(cx);
4690
4691 let fs = FakeFs::new(cx.executor());
4692 fs.insert_tree(
4693 "/dir",
4694 json!({
4695 "a.tsx": "a",
4696 }),
4697 )
4698 .await;
4699
4700 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4701
4702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4703 language_registry.add(tsx_lang());
4704 let language_server_names = [
4705 "TypeScriptServer",
4706 "TailwindServer",
4707 "ESLintServer",
4708 "NoHoverCapabilitiesServer",
4709 ];
4710 let mut language_servers = [
4711 language_registry.register_fake_lsp(
4712 "tsx",
4713 FakeLspAdapter {
4714 name: language_server_names[0],
4715 capabilities: lsp::ServerCapabilities {
4716 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4717 ..lsp::ServerCapabilities::default()
4718 },
4719 ..FakeLspAdapter::default()
4720 },
4721 ),
4722 language_registry.register_fake_lsp(
4723 "tsx",
4724 FakeLspAdapter {
4725 name: language_server_names[1],
4726 capabilities: lsp::ServerCapabilities {
4727 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4728 ..lsp::ServerCapabilities::default()
4729 },
4730 ..FakeLspAdapter::default()
4731 },
4732 ),
4733 language_registry.register_fake_lsp(
4734 "tsx",
4735 FakeLspAdapter {
4736 name: language_server_names[2],
4737 capabilities: lsp::ServerCapabilities {
4738 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4739 ..lsp::ServerCapabilities::default()
4740 },
4741 ..FakeLspAdapter::default()
4742 },
4743 ),
4744 language_registry.register_fake_lsp(
4745 "tsx",
4746 FakeLspAdapter {
4747 name: language_server_names[3],
4748 capabilities: lsp::ServerCapabilities {
4749 hover_provider: None,
4750 ..lsp::ServerCapabilities::default()
4751 },
4752 ..FakeLspAdapter::default()
4753 },
4754 ),
4755 ];
4756
4757 let buffer = project
4758 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4759 .await
4760 .unwrap();
4761 cx.executor().run_until_parked();
4762
4763 let mut servers_with_hover_requests = HashMap::default();
4764 for i in 0..language_server_names.len() {
4765 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4766 panic!(
4767 "Failed to get language server #{i} with name {}",
4768 &language_server_names[i]
4769 )
4770 });
4771 let new_server_name = new_server.server.name();
4772 assert!(
4773 !servers_with_hover_requests.contains_key(new_server_name),
4774 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4775 );
4776 let new_server_name = new_server_name.to_string();
4777 match new_server_name.as_str() {
4778 "TailwindServer" | "TypeScriptServer" => {
4779 servers_with_hover_requests.insert(
4780 new_server_name.clone(),
4781 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4782 let name = new_server_name.clone();
4783 async move {
4784 Ok(Some(lsp::Hover {
4785 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4786 format!("{name} hover"),
4787 )),
4788 range: None,
4789 }))
4790 }
4791 }),
4792 );
4793 }
4794 "ESLintServer" => {
4795 servers_with_hover_requests.insert(
4796 new_server_name,
4797 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4798 |_, _| async move { Ok(None) },
4799 ),
4800 );
4801 }
4802 "NoHoverCapabilitiesServer" => {
4803 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4804 |_, _| async move {
4805 panic!(
4806 "Should not call for hovers server with no corresponding capabilities"
4807 )
4808 },
4809 );
4810 }
4811 unexpected => panic!("Unexpected server name: {unexpected}"),
4812 }
4813 }
4814
4815 let hover_task = project.update(cx, |project, cx| {
4816 project.hover(&buffer, Point::new(0, 0), cx)
4817 });
4818 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4819 |mut hover_request| async move {
4820 hover_request
4821 .next()
4822 .await
4823 .expect("All hover requests should have been triggered")
4824 },
4825 ))
4826 .await;
4827 assert_eq!(
4828 vec!["TailwindServer hover", "TypeScriptServer hover"],
4829 hover_task
4830 .await
4831 .into_iter()
4832 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4833 .sorted()
4834 .collect::<Vec<_>>(),
4835 "Should receive hover responses from all related servers with hover capabilities"
4836 );
4837}
4838
4839#[gpui::test]
4840async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4841 init_test(cx);
4842
4843 let fs = FakeFs::new(cx.executor());
4844 fs.insert_tree(
4845 "/dir",
4846 json!({
4847 "a.ts": "a",
4848 }),
4849 )
4850 .await;
4851
4852 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4853
4854 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4855 language_registry.add(typescript_lang());
4856 let mut fake_language_servers = language_registry.register_fake_lsp(
4857 "TypeScript",
4858 FakeLspAdapter {
4859 capabilities: lsp::ServerCapabilities {
4860 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4861 ..lsp::ServerCapabilities::default()
4862 },
4863 ..FakeLspAdapter::default()
4864 },
4865 );
4866
4867 let buffer = project
4868 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4869 .await
4870 .unwrap();
4871 cx.executor().run_until_parked();
4872
4873 let fake_server = fake_language_servers
4874 .next()
4875 .await
4876 .expect("failed to get the language server");
4877
4878 let mut request_handled =
4879 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4880 Ok(Some(lsp::Hover {
4881 contents: lsp::HoverContents::Array(vec![
4882 lsp::MarkedString::String("".to_string()),
4883 lsp::MarkedString::String(" ".to_string()),
4884 lsp::MarkedString::String("\n\n\n".to_string()),
4885 ]),
4886 range: None,
4887 }))
4888 });
4889
4890 let hover_task = project.update(cx, |project, cx| {
4891 project.hover(&buffer, Point::new(0, 0), cx)
4892 });
4893 let () = request_handled
4894 .next()
4895 .await
4896 .expect("All hover requests should have been triggered");
4897 assert_eq!(
4898 Vec::<String>::new(),
4899 hover_task
4900 .await
4901 .into_iter()
4902 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4903 .sorted()
4904 .collect::<Vec<_>>(),
4905 "Empty hover parts should be ignored"
4906 );
4907}
4908
4909#[gpui::test]
4910async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4911 init_test(cx);
4912
4913 let fs = FakeFs::new(cx.executor());
4914 fs.insert_tree(
4915 "/dir",
4916 json!({
4917 "a.tsx": "a",
4918 }),
4919 )
4920 .await;
4921
4922 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4923
4924 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4925 language_registry.add(tsx_lang());
4926 let language_server_names = [
4927 "TypeScriptServer",
4928 "TailwindServer",
4929 "ESLintServer",
4930 "NoActionsCapabilitiesServer",
4931 ];
4932
4933 let mut language_server_rxs = [
4934 language_registry.register_fake_lsp(
4935 "tsx",
4936 FakeLspAdapter {
4937 name: language_server_names[0],
4938 capabilities: lsp::ServerCapabilities {
4939 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4940 ..lsp::ServerCapabilities::default()
4941 },
4942 ..FakeLspAdapter::default()
4943 },
4944 ),
4945 language_registry.register_fake_lsp(
4946 "tsx",
4947 FakeLspAdapter {
4948 name: language_server_names[1],
4949 capabilities: lsp::ServerCapabilities {
4950 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4951 ..lsp::ServerCapabilities::default()
4952 },
4953 ..FakeLspAdapter::default()
4954 },
4955 ),
4956 language_registry.register_fake_lsp(
4957 "tsx",
4958 FakeLspAdapter {
4959 name: language_server_names[2],
4960 capabilities: lsp::ServerCapabilities {
4961 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4962 ..lsp::ServerCapabilities::default()
4963 },
4964 ..FakeLspAdapter::default()
4965 },
4966 ),
4967 language_registry.register_fake_lsp(
4968 "tsx",
4969 FakeLspAdapter {
4970 name: language_server_names[3],
4971 capabilities: lsp::ServerCapabilities {
4972 code_action_provider: None,
4973 ..lsp::ServerCapabilities::default()
4974 },
4975 ..FakeLspAdapter::default()
4976 },
4977 ),
4978 ];
4979
4980 let buffer = project
4981 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4982 .await
4983 .unwrap();
4984 cx.executor().run_until_parked();
4985
4986 let mut servers_with_actions_requests = HashMap::default();
4987 for i in 0..language_server_names.len() {
4988 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
4989 panic!(
4990 "Failed to get language server #{i} with name {}",
4991 &language_server_names[i]
4992 )
4993 });
4994 let new_server_name = new_server.server.name();
4995
4996 assert!(
4997 !servers_with_actions_requests.contains_key(new_server_name),
4998 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4999 );
5000 let new_server_name = new_server_name.to_string();
5001 match new_server_name.as_str() {
5002 "TailwindServer" | "TypeScriptServer" => {
5003 servers_with_actions_requests.insert(
5004 new_server_name.clone(),
5005 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5006 move |_, _| {
5007 let name = new_server_name.clone();
5008 async move {
5009 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5010 lsp::CodeAction {
5011 title: format!("{name} code action"),
5012 ..lsp::CodeAction::default()
5013 },
5014 )]))
5015 }
5016 },
5017 ),
5018 );
5019 }
5020 "ESLintServer" => {
5021 servers_with_actions_requests.insert(
5022 new_server_name,
5023 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5024 |_, _| async move { Ok(None) },
5025 ),
5026 );
5027 }
5028 "NoActionsCapabilitiesServer" => {
5029 let _never_handled = new_server
5030 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5031 panic!(
5032 "Should not call for code actions server with no corresponding capabilities"
5033 )
5034 });
5035 }
5036 unexpected => panic!("Unexpected server name: {unexpected}"),
5037 }
5038 }
5039
5040 let code_actions_task = project.update(cx, |project, cx| {
5041 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5042 });
5043
5044 // cx.run_until_parked();
5045 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5046 |mut code_actions_request| async move {
5047 code_actions_request
5048 .next()
5049 .await
5050 .expect("All code actions requests should have been triggered")
5051 },
5052 ))
5053 .await;
5054 assert_eq!(
5055 vec!["TailwindServer code action", "TypeScriptServer code action"],
5056 code_actions_task
5057 .await
5058 .unwrap()
5059 .into_iter()
5060 .map(|code_action| code_action.lsp_action.title)
5061 .sorted()
5062 .collect::<Vec<_>>(),
5063 "Should receive code actions responses from all related servers with hover capabilities"
5064 );
5065}
5066
5067#[gpui::test]
5068async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5069 init_test(cx);
5070
5071 let fs = FakeFs::new(cx.executor());
5072 fs.insert_tree(
5073 "/dir",
5074 json!({
5075 "a.rs": "let a = 1;",
5076 "b.rs": "let b = 2;",
5077 "c.rs": "let c = 2;",
5078 }),
5079 )
5080 .await;
5081
5082 let project = Project::test(
5083 fs,
5084 [
5085 "/dir/a.rs".as_ref(),
5086 "/dir/b.rs".as_ref(),
5087 "/dir/c.rs".as_ref(),
5088 ],
5089 cx,
5090 )
5091 .await;
5092
5093 // check the initial state and get the worktrees
5094 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5095 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5096 assert_eq!(worktrees.len(), 3);
5097
5098 let worktree_a = worktrees[0].read(cx);
5099 let worktree_b = worktrees[1].read(cx);
5100 let worktree_c = worktrees[2].read(cx);
5101
5102 // check they start in the right order
5103 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5104 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5105 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5106
5107 (
5108 worktrees[0].clone(),
5109 worktrees[1].clone(),
5110 worktrees[2].clone(),
5111 )
5112 });
5113
5114 // move first worktree to after the second
5115 // [a, b, c] -> [b, a, c]
5116 project
5117 .update(cx, |project, cx| {
5118 let first = worktree_a.read(cx);
5119 let second = worktree_b.read(cx);
5120 project.move_worktree(first.id(), second.id(), cx)
5121 })
5122 .expect("moving first after second");
5123
5124 // check the state after moving
5125 project.update(cx, |project, cx| {
5126 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5127 assert_eq!(worktrees.len(), 3);
5128
5129 let first = worktrees[0].read(cx);
5130 let second = worktrees[1].read(cx);
5131 let third = worktrees[2].read(cx);
5132
5133 // check they are now in the right order
5134 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5135 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5136 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5137 });
5138
5139 // move the second worktree to before the first
5140 // [b, a, c] -> [a, b, c]
5141 project
5142 .update(cx, |project, cx| {
5143 let second = worktree_a.read(cx);
5144 let first = worktree_b.read(cx);
5145 project.move_worktree(first.id(), second.id(), cx)
5146 })
5147 .expect("moving second before first");
5148
5149 // check the state after moving
5150 project.update(cx, |project, cx| {
5151 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5152 assert_eq!(worktrees.len(), 3);
5153
5154 let first = worktrees[0].read(cx);
5155 let second = worktrees[1].read(cx);
5156 let third = worktrees[2].read(cx);
5157
5158 // check they are now in the right order
5159 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5160 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5161 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5162 });
5163
5164 // move the second worktree to after the third
5165 // [a, b, c] -> [a, c, b]
5166 project
5167 .update(cx, |project, cx| {
5168 let second = worktree_b.read(cx);
5169 let third = worktree_c.read(cx);
5170 project.move_worktree(second.id(), third.id(), cx)
5171 })
5172 .expect("moving second after third");
5173
5174 // check the state after moving
5175 project.update(cx, |project, cx| {
5176 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5177 assert_eq!(worktrees.len(), 3);
5178
5179 let first = worktrees[0].read(cx);
5180 let second = worktrees[1].read(cx);
5181 let third = worktrees[2].read(cx);
5182
5183 // check they are now in the right order
5184 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5185 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5186 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5187 });
5188
5189 // move the third worktree to before the second
5190 // [a, c, b] -> [a, b, c]
5191 project
5192 .update(cx, |project, cx| {
5193 let third = worktree_c.read(cx);
5194 let second = worktree_b.read(cx);
5195 project.move_worktree(third.id(), second.id(), cx)
5196 })
5197 .expect("moving third before second");
5198
5199 // check the state after moving
5200 project.update(cx, |project, cx| {
5201 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5202 assert_eq!(worktrees.len(), 3);
5203
5204 let first = worktrees[0].read(cx);
5205 let second = worktrees[1].read(cx);
5206 let third = worktrees[2].read(cx);
5207
5208 // check they are now in the right order
5209 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5210 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5211 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5212 });
5213
5214 // move the first worktree to after the third
5215 // [a, b, c] -> [b, c, a]
5216 project
5217 .update(cx, |project, cx| {
5218 let first = worktree_a.read(cx);
5219 let third = worktree_c.read(cx);
5220 project.move_worktree(first.id(), third.id(), cx)
5221 })
5222 .expect("moving first after third");
5223
5224 // check the state after moving
5225 project.update(cx, |project, cx| {
5226 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5227 assert_eq!(worktrees.len(), 3);
5228
5229 let first = worktrees[0].read(cx);
5230 let second = worktrees[1].read(cx);
5231 let third = worktrees[2].read(cx);
5232
5233 // check they are now in the right order
5234 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5235 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5236 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5237 });
5238
5239 // move the third worktree to before the first
5240 // [b, c, a] -> [a, b, c]
5241 project
5242 .update(cx, |project, cx| {
5243 let third = worktree_a.read(cx);
5244 let first = worktree_b.read(cx);
5245 project.move_worktree(third.id(), first.id(), cx)
5246 })
5247 .expect("moving third before first");
5248
5249 // check the state after moving
5250 project.update(cx, |project, cx| {
5251 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5252 assert_eq!(worktrees.len(), 3);
5253
5254 let first = worktrees[0].read(cx);
5255 let second = worktrees[1].read(cx);
5256 let third = worktrees[2].read(cx);
5257
5258 // check they are now in the right order
5259 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5260 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5261 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5262 });
5263}
5264
5265async fn search(
5266 project: &Model<Project>,
5267 query: SearchQuery,
5268 cx: &mut gpui::TestAppContext,
5269) -> Result<HashMap<String, Vec<Range<usize>>>> {
5270 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5271 let mut results = HashMap::default();
5272 while let Some(search_result) = search_rx.next().await {
5273 match search_result {
5274 SearchResult::Buffer { buffer, ranges } => {
5275 results.entry(buffer).or_insert(ranges);
5276 }
5277 SearchResult::LimitReached => {}
5278 }
5279 }
5280 Ok(results
5281 .into_iter()
5282 .map(|(buffer, ranges)| {
5283 buffer.update(cx, |buffer, cx| {
5284 let path = buffer
5285 .file()
5286 .unwrap()
5287 .full_path(cx)
5288 .to_string_lossy()
5289 .to_string();
5290 let ranges = ranges
5291 .into_iter()
5292 .map(|range| range.to_offset(buffer))
5293 .collect::<Vec<_>>();
5294 (path, ranges)
5295 })
5296 })
5297 .collect())
5298}
5299
5300pub fn init_test(cx: &mut gpui::TestAppContext) {
5301 if std::env::var("RUST_LOG").is_ok() {
5302 env_logger::try_init().ok();
5303 }
5304
5305 cx.update(|cx| {
5306 let settings_store = SettingsStore::test(cx);
5307 cx.set_global(settings_store);
5308 release_channel::init(SemanticVersion::default(), cx);
5309 language::init(cx);
5310 Project::init_settings(cx);
5311 });
5312}
5313
5314fn json_lang() -> Arc<Language> {
5315 Arc::new(Language::new(
5316 LanguageConfig {
5317 name: "JSON".into(),
5318 matcher: LanguageMatcher {
5319 path_suffixes: vec!["json".to_string()],
5320 ..Default::default()
5321 },
5322 ..Default::default()
5323 },
5324 None,
5325 ))
5326}
5327
5328fn js_lang() -> Arc<Language> {
5329 Arc::new(Language::new(
5330 LanguageConfig {
5331 name: "JavaScript".into(),
5332 matcher: LanguageMatcher {
5333 path_suffixes: vec!["js".to_string()],
5334 ..Default::default()
5335 },
5336 ..Default::default()
5337 },
5338 None,
5339 ))
5340}
5341
5342fn rust_lang() -> Arc<Language> {
5343 Arc::new(Language::new(
5344 LanguageConfig {
5345 name: "Rust".into(),
5346 matcher: LanguageMatcher {
5347 path_suffixes: vec!["rs".to_string()],
5348 ..Default::default()
5349 },
5350 ..Default::default()
5351 },
5352 Some(tree_sitter_rust::LANGUAGE.into()),
5353 ))
5354}
5355
5356fn typescript_lang() -> Arc<Language> {
5357 Arc::new(Language::new(
5358 LanguageConfig {
5359 name: "TypeScript".into(),
5360 matcher: LanguageMatcher {
5361 path_suffixes: vec!["ts".to_string()],
5362 ..Default::default()
5363 },
5364 ..Default::default()
5365 },
5366 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5367 ))
5368}
5369
5370fn tsx_lang() -> Arc<Language> {
5371 Arc::new(Language::new(
5372 LanguageConfig {
5373 name: "tsx".into(),
5374 matcher: LanguageMatcher {
5375 path_suffixes: vec!["tsx".to_string()],
5376 ..Default::default()
5377 },
5378 ..Default::default()
5379 },
5380 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5381 ))
5382}
5383
5384fn get_all_tasks(
5385 project: &Model<Project>,
5386 worktree_id: Option<WorktreeId>,
5387 task_context: &TaskContext,
5388 cx: &mut AppContext,
5389) -> Vec<(TaskSourceKind, ResolvedTask)> {
5390 let (mut old, new) = project.update(cx, |project, cx| {
5391 project
5392 .task_store
5393 .read(cx)
5394 .task_inventory()
5395 .unwrap()
5396 .read(cx)
5397 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5398 });
5399 old.extend(new);
5400 old
5401}