1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97 TaskStore::init(None);
98
99 let fs = FakeFs::new(cx.executor());
100 fs.insert_tree(
101 "/the-root",
102 json!({
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 8 }"#,
105 "tasks.json": r#"[{
106 "label": "cargo check all",
107 "command": "cargo",
108 "args": ["check", "--all"]
109 },]"#,
110 },
111 "a": {
112 "a.rs": "fn a() {\n A\n}"
113 },
114 "b": {
115 ".zed": {
116 "settings.json": r#"{ "tab_size": 2 }"#,
117 "tasks.json": r#"[{
118 "label": "cargo check",
119 "command": "cargo",
120 "args": ["check"]
121 },]"#,
122 },
123 "b.rs": "fn b() {\n B\n}"
124 }
125 }),
126 )
127 .await;
128
129 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
130 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
131 let task_context = TaskContext::default();
132
133 cx.executor().run_until_parked();
134 let worktree_id = cx.update(|cx| {
135 project.update(cx, |project, cx| {
136 project.worktrees(cx).next().unwrap().read(cx).id()
137 })
138 });
139 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
140 id: worktree_id,
141 directory_in_worktree: PathBuf::from(".zed"),
142 id_base: "local worktree tasks from directory \".zed\"".into(),
143 };
144
145 let all_tasks = cx
146 .update(|cx| {
147 let tree = worktree.read(cx);
148
149 let settings_a = language_settings(
150 None,
151 Some(
152 &(File::for_entry(
153 tree.entry_for_path("a/a.rs").unwrap().clone(),
154 worktree.clone(),
155 ) as _),
156 ),
157 cx,
158 );
159 let settings_b = language_settings(
160 None,
161 Some(
162 &(File::for_entry(
163 tree.entry_for_path("b/b.rs").unwrap().clone(),
164 worktree.clone(),
165 ) as _),
166 ),
167 cx,
168 );
169
170 assert_eq!(settings_a.tab_size.get(), 8);
171 assert_eq!(settings_b.tab_size.get(), 2);
172
173 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
174 })
175 .into_iter()
176 .map(|(source_kind, task)| {
177 let resolved = task.resolved.unwrap();
178 (
179 source_kind,
180 task.resolved_label,
181 resolved.args,
182 resolved.env,
183 )
184 })
185 .collect::<Vec<_>>();
186 assert_eq!(
187 all_tasks,
188 vec![
189 (
190 TaskSourceKind::Worktree {
191 id: worktree_id,
192 directory_in_worktree: PathBuf::from("b/.zed"),
193 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
194 },
195 "cargo check".to_string(),
196 vec!["check".to_string()],
197 HashMap::default(),
198 ),
199 (
200 topmost_local_task_source_kind.clone(),
201 "cargo check all".to_string(),
202 vec!["check".to_string(), "--all".to_string()],
203 HashMap::default(),
204 ),
205 ]
206 );
207
208 let (_, resolved_task) = cx
209 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
210 .into_iter()
211 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
212 .expect("should have one global task");
213 project.update(cx, |project, cx| {
214 let task_inventory = project
215 .task_store
216 .read(cx)
217 .task_inventory()
218 .cloned()
219 .unwrap();
220 task_inventory.update(cx, |inventory, _| {
221 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
222 inventory
223 .update_file_based_tasks(
224 None,
225 Some(
226 &json!([{
227 "label": "cargo check unstable",
228 "command": "cargo",
229 "args": [
230 "check",
231 "--all",
232 "--all-targets"
233 ],
234 "env": {
235 "RUSTFLAGS": "-Zunstable-options"
236 }
237 }])
238 .to_string(),
239 ),
240 )
241 .unwrap();
242 });
243 });
244 cx.run_until_parked();
245
246 let all_tasks = cx
247 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
248 .into_iter()
249 .map(|(source_kind, task)| {
250 let resolved = task.resolved.unwrap();
251 (
252 source_kind,
253 task.resolved_label,
254 resolved.args,
255 resolved.env,
256 )
257 })
258 .collect::<Vec<_>>();
259 assert_eq!(
260 all_tasks,
261 vec![
262 (
263 topmost_local_task_source_kind.clone(),
264 "cargo check all".to_string(),
265 vec!["check".to_string(), "--all".to_string()],
266 HashMap::default(),
267 ),
268 (
269 TaskSourceKind::Worktree {
270 id: worktree_id,
271 directory_in_worktree: PathBuf::from("b/.zed"),
272 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
273 },
274 "cargo check".to_string(),
275 vec!["check".to_string()],
276 HashMap::default(),
277 ),
278 (
279 TaskSourceKind::AbsPath {
280 abs_path: paths::tasks_file().clone(),
281 id_base: "global tasks.json".into(),
282 },
283 "cargo check unstable".to_string(),
284 vec![
285 "check".to_string(),
286 "--all".to_string(),
287 "--all-targets".to_string(),
288 ],
289 HashMap::from_iter(Some((
290 "RUSTFLAGS".to_string(),
291 "-Zunstable-options".to_string()
292 ))),
293 ),
294 ]
295 );
296}
297
298#[gpui::test]
299async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301
302 let fs = FakeFs::new(cx.executor());
303 fs.insert_tree(
304 "/the-root",
305 json!({
306 "test.rs": "const A: i32 = 1;",
307 "test2.rs": "",
308 "Cargo.toml": "a = 1",
309 "package.json": "{\"a\": 1}",
310 }),
311 )
312 .await;
313
314 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
316
317 let mut fake_rust_servers = language_registry.register_fake_lsp(
318 "Rust",
319 FakeLspAdapter {
320 name: "the-rust-language-server",
321 capabilities: lsp::ServerCapabilities {
322 completion_provider: Some(lsp::CompletionOptions {
323 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
324 ..Default::default()
325 }),
326 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
327 lsp::TextDocumentSyncOptions {
328 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
329 ..Default::default()
330 },
331 )),
332 ..Default::default()
333 },
334 ..Default::default()
335 },
336 );
337 let mut fake_json_servers = language_registry.register_fake_lsp(
338 "JSON",
339 FakeLspAdapter {
340 name: "the-json-language-server",
341 capabilities: lsp::ServerCapabilities {
342 completion_provider: Some(lsp::CompletionOptions {
343 trigger_characters: Some(vec![":".to_string()]),
344 ..Default::default()
345 }),
346 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
347 lsp::TextDocumentSyncOptions {
348 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
349 ..Default::default()
350 },
351 )),
352 ..Default::default()
353 },
354 ..Default::default()
355 },
356 );
357
358 // Open a buffer without an associated language server.
359 let toml_buffer = project
360 .update(cx, |project, cx| {
361 project.open_local_buffer("/the-root/Cargo.toml", cx)
362 })
363 .await
364 .unwrap();
365
366 // Open a buffer with an associated language server before the language for it has been loaded.
367 let rust_buffer = project
368 .update(cx, |project, cx| {
369 project.open_local_buffer("/the-root/test.rs", cx)
370 })
371 .await
372 .unwrap();
373 rust_buffer.update(cx, |buffer, _| {
374 assert_eq!(buffer.language().map(|l| l.name()), None);
375 });
376
377 // Now we add the languages to the project, and ensure they get assigned to all
378 // the relevant open buffers.
379 language_registry.add(json_lang());
380 language_registry.add(rust_lang());
381 cx.executor().run_until_parked();
382 rust_buffer.update(cx, |buffer, _| {
383 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
384 });
385
386 // A server is started up, and it is notified about Rust files.
387 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
388 fake_rust_server
389 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
390 registrations: vec![lsp::Registration {
391 id: Default::default(),
392 method: "workspace/didChangeWatchedFiles".to_string(),
393 register_options: serde_json::to_value(
394 lsp::DidChangeWatchedFilesRegistrationOptions {
395 watchers: vec![
396 lsp::FileSystemWatcher {
397 glob_pattern: lsp::GlobPattern::String(
398 "/the-root/Cargo.toml".to_string(),
399 ),
400 kind: None,
401 },
402 lsp::FileSystemWatcher {
403 glob_pattern: lsp::GlobPattern::String(
404 "/the-root/*.rs".to_string(),
405 ),
406 kind: None,
407 },
408 ],
409 },
410 )
411 .ok(),
412 }],
413 })
414 .await
415 .unwrap();
416 assert_eq!(
417 fake_rust_server
418 .receive_notification::<lsp::notification::DidOpenTextDocument>()
419 .await
420 .text_document,
421 lsp::TextDocumentItem {
422 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
423 version: 0,
424 text: "const A: i32 = 1;".to_string(),
425 language_id: "rust".to_string(),
426 }
427 );
428
429 // The buffer is configured based on the language server's capabilities.
430 rust_buffer.update(cx, |buffer, _| {
431 assert_eq!(
432 buffer.completion_triggers(),
433 &[".".to_string(), "::".to_string()]
434 );
435 });
436 toml_buffer.update(cx, |buffer, _| {
437 assert!(buffer.completion_triggers().is_empty());
438 });
439
440 // Edit a buffer. The changes are reported to the language server.
441 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
442 assert_eq!(
443 fake_rust_server
444 .receive_notification::<lsp::notification::DidChangeTextDocument>()
445 .await
446 .text_document,
447 lsp::VersionedTextDocumentIdentifier::new(
448 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
449 1
450 )
451 );
452
453 // Open a third buffer with a different associated language server.
454 let json_buffer = project
455 .update(cx, |project, cx| {
456 project.open_local_buffer("/the-root/package.json", cx)
457 })
458 .await
459 .unwrap();
460
461 // A json language server is started up and is only notified about the json buffer.
462 let mut fake_json_server = fake_json_servers.next().await.unwrap();
463 fake_json_server
464 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
465 registrations: vec![lsp::Registration {
466 id: Default::default(),
467 method: "workspace/didChangeWatchedFiles".to_string(),
468 register_options: serde_json::to_value(
469 lsp::DidChangeWatchedFilesRegistrationOptions {
470 watchers: vec![lsp::FileSystemWatcher {
471 glob_pattern: lsp::GlobPattern::String("/the-root/*.json".to_string()),
472 kind: None,
473 }],
474 },
475 )
476 .ok(),
477 }],
478 })
479 .await
480 .unwrap();
481 assert_eq!(
482 fake_json_server
483 .receive_notification::<lsp::notification::DidOpenTextDocument>()
484 .await
485 .text_document,
486 lsp::TextDocumentItem {
487 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
488 version: 0,
489 text: "{\"a\": 1}".to_string(),
490 language_id: "json".to_string(),
491 }
492 );
493
494 // This buffer is configured based on the second language server's
495 // capabilities.
496 json_buffer.update(cx, |buffer, _| {
497 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
498 });
499
500 // When opening another buffer whose language server is already running,
501 // it is also configured based on the existing language server's capabilities.
502 let rust_buffer2 = project
503 .update(cx, |project, cx| {
504 project.open_local_buffer("/the-root/test2.rs", cx)
505 })
506 .await
507 .unwrap();
508 rust_buffer2.update(cx, |buffer, _| {
509 assert_eq!(
510 buffer.completion_triggers(),
511 &[".".to_string(), "::".to_string()]
512 );
513 });
514
515 // Changes are reported only to servers matching the buffer's language.
516 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
517 rust_buffer2.update(cx, |buffer, cx| {
518 buffer.edit([(0..0, "let x = 1;")], None, cx)
519 });
520 assert_eq!(
521 fake_rust_server
522 .receive_notification::<lsp::notification::DidChangeTextDocument>()
523 .await
524 .text_document,
525 lsp::VersionedTextDocumentIdentifier::new(
526 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
527 1
528 )
529 );
530
531 // Save notifications are reported only to servers that signed up for a given extension.
532 project
533 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
534 .await
535 .unwrap();
536 assert_eq!(
537 fake_rust_server
538 .receive_notification::<lsp::notification::DidSaveTextDocument>()
539 .await
540 .text_document,
541 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
542 );
543
544 // Renames are reported only to servers matching the buffer's language.
545 fs.rename(
546 Path::new("/the-root/test2.rs"),
547 Path::new("/the-root/test3.rs"),
548 Default::default(),
549 )
550 .await
551 .unwrap();
552 assert_eq!(
553 fake_rust_server
554 .receive_notification::<lsp::notification::DidCloseTextDocument>()
555 .await
556 .text_document,
557 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
558 );
559 assert_eq!(
560 fake_rust_server
561 .receive_notification::<lsp::notification::DidOpenTextDocument>()
562 .await
563 .text_document,
564 lsp::TextDocumentItem {
565 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
566 version: 0,
567 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
568 language_id: "rust".to_string(),
569 },
570 );
571
572 rust_buffer2.update(cx, |buffer, cx| {
573 buffer.update_diagnostics(
574 LanguageServerId(0),
575 DiagnosticSet::from_sorted_entries(
576 vec![DiagnosticEntry {
577 diagnostic: Default::default(),
578 range: Anchor::MIN..Anchor::MAX,
579 }],
580 &buffer.snapshot(),
581 ),
582 cx,
583 );
584 assert_eq!(
585 buffer
586 .snapshot()
587 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
588 .count(),
589 1
590 );
591 });
592
593 // When the rename changes the extension of the file, the buffer gets closed on the old
594 // language server and gets opened on the new one.
595 fs.rename(
596 Path::new("/the-root/test3.rs"),
597 Path::new("/the-root/test3.json"),
598 Default::default(),
599 )
600 .await
601 .unwrap();
602 assert_eq!(
603 fake_rust_server
604 .receive_notification::<lsp::notification::DidCloseTextDocument>()
605 .await
606 .text_document,
607 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
608 );
609 assert_eq!(
610 fake_json_server
611 .receive_notification::<lsp::notification::DidOpenTextDocument>()
612 .await
613 .text_document,
614 lsp::TextDocumentItem {
615 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
616 version: 0,
617 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
618 language_id: "json".to_string(),
619 },
620 );
621
622 // We clear the diagnostics, since the language has changed.
623 rust_buffer2.update(cx, |buffer, _| {
624 assert_eq!(
625 buffer
626 .snapshot()
627 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
628 .count(),
629 0
630 );
631 });
632
633 // The renamed file's version resets after changing language server.
634 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
635 assert_eq!(
636 fake_json_server
637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
638 .await
639 .text_document,
640 lsp::VersionedTextDocumentIdentifier::new(
641 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
642 1
643 )
644 );
645
646 // Restart language servers
647 project.update(cx, |project, cx| {
648 project.restart_language_servers_for_buffers(
649 vec![rust_buffer.clone(), json_buffer.clone()],
650 cx,
651 );
652 });
653
654 let mut rust_shutdown_requests = fake_rust_server
655 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
656 let mut json_shutdown_requests = fake_json_server
657 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
658 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
659
660 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
661 let mut fake_json_server = fake_json_servers.next().await.unwrap();
662
663 // Ensure rust document is reopened in new rust language server
664 assert_eq!(
665 fake_rust_server
666 .receive_notification::<lsp::notification::DidOpenTextDocument>()
667 .await
668 .text_document,
669 lsp::TextDocumentItem {
670 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
671 version: 0,
672 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
673 language_id: "rust".to_string(),
674 }
675 );
676
677 // Ensure json documents are reopened in new json language server
678 assert_set_eq!(
679 [
680 fake_json_server
681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
682 .await
683 .text_document,
684 fake_json_server
685 .receive_notification::<lsp::notification::DidOpenTextDocument>()
686 .await
687 .text_document,
688 ],
689 [
690 lsp::TextDocumentItem {
691 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
692 version: 0,
693 text: json_buffer.update(cx, |buffer, _| buffer.text()),
694 language_id: "json".to_string(),
695 },
696 lsp::TextDocumentItem {
697 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
698 version: 0,
699 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
700 language_id: "json".to_string(),
701 }
702 ]
703 );
704
705 // Close notifications are reported only to servers matching the buffer's language.
706 cx.update(|_| drop(json_buffer));
707 let close_message = lsp::DidCloseTextDocumentParams {
708 text_document: lsp::TextDocumentIdentifier::new(
709 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
710 ),
711 };
712 assert_eq!(
713 fake_json_server
714 .receive_notification::<lsp::notification::DidCloseTextDocument>()
715 .await,
716 close_message,
717 );
718}
719
720#[gpui::test]
721async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
722 init_test(cx);
723
724 let fs = FakeFs::new(cx.executor());
725 fs.insert_tree(
726 "/the-root",
727 json!({
728 ".gitignore": "target\n",
729 "src": {
730 "a.rs": "",
731 "b.rs": "",
732 },
733 "target": {
734 "x": {
735 "out": {
736 "x.rs": ""
737 }
738 },
739 "y": {
740 "out": {
741 "y.rs": "",
742 }
743 },
744 "z": {
745 "out": {
746 "z.rs": ""
747 }
748 }
749 }
750 }),
751 )
752 .await;
753
754 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
755 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
756 language_registry.add(rust_lang());
757 let mut fake_servers = language_registry.register_fake_lsp(
758 "Rust",
759 FakeLspAdapter {
760 name: "the-language-server",
761 ..Default::default()
762 },
763 );
764
765 cx.executor().run_until_parked();
766
767 // Start the language server by opening a buffer with a compatible file extension.
768 let _buffer = project
769 .update(cx, |project, cx| {
770 project.open_local_buffer("/the-root/src/a.rs", cx)
771 })
772 .await
773 .unwrap();
774
775 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
776 project.update(cx, |project, cx| {
777 let worktree = project.worktrees(cx).next().unwrap();
778 assert_eq!(
779 worktree
780 .read(cx)
781 .snapshot()
782 .entries(true, 0)
783 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
784 .collect::<Vec<_>>(),
785 &[
786 (Path::new(""), false),
787 (Path::new(".gitignore"), false),
788 (Path::new("src"), false),
789 (Path::new("src/a.rs"), false),
790 (Path::new("src/b.rs"), false),
791 (Path::new("target"), true),
792 ]
793 );
794 });
795
796 let prev_read_dir_count = fs.read_dir_call_count();
797
798 // Keep track of the FS events reported to the language server.
799 let fake_server = fake_servers.next().await.unwrap();
800 let file_changes = Arc::new(Mutex::new(Vec::new()));
801 fake_server
802 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
803 registrations: vec![lsp::Registration {
804 id: Default::default(),
805 method: "workspace/didChangeWatchedFiles".to_string(),
806 register_options: serde_json::to_value(
807 lsp::DidChangeWatchedFilesRegistrationOptions {
808 watchers: vec![
809 lsp::FileSystemWatcher {
810 glob_pattern: lsp::GlobPattern::String(
811 "/the-root/Cargo.toml".to_string(),
812 ),
813 kind: None,
814 },
815 lsp::FileSystemWatcher {
816 glob_pattern: lsp::GlobPattern::String(
817 "/the-root/src/*.{rs,c}".to_string(),
818 ),
819 kind: None,
820 },
821 lsp::FileSystemWatcher {
822 glob_pattern: lsp::GlobPattern::String(
823 "/the-root/target/y/**/*.rs".to_string(),
824 ),
825 kind: None,
826 },
827 ],
828 },
829 )
830 .ok(),
831 }],
832 })
833 .await
834 .unwrap();
835 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
836 let file_changes = file_changes.clone();
837 move |params, _| {
838 let mut file_changes = file_changes.lock();
839 file_changes.extend(params.changes);
840 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
841 }
842 });
843
844 cx.executor().run_until_parked();
845 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
846 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
847
848 // Now the language server has asked us to watch an ignored directory path,
849 // so we recursively load it.
850 project.update(cx, |project, cx| {
851 let worktree = project.worktrees(cx).next().unwrap();
852 assert_eq!(
853 worktree
854 .read(cx)
855 .snapshot()
856 .entries(true, 0)
857 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
858 .collect::<Vec<_>>(),
859 &[
860 (Path::new(""), false),
861 (Path::new(".gitignore"), false),
862 (Path::new("src"), false),
863 (Path::new("src/a.rs"), false),
864 (Path::new("src/b.rs"), false),
865 (Path::new("target"), true),
866 (Path::new("target/x"), true),
867 (Path::new("target/y"), true),
868 (Path::new("target/y/out"), true),
869 (Path::new("target/y/out/y.rs"), true),
870 (Path::new("target/z"), true),
871 ]
872 );
873 });
874
875 // Perform some file system mutations, two of which match the watched patterns,
876 // and one of which does not.
877 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
878 .await
879 .unwrap();
880 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
881 .await
882 .unwrap();
883 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
884 .await
885 .unwrap();
886 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
887 .await
888 .unwrap();
889 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
890 .await
891 .unwrap();
892
893 // The language server receives events for the FS mutations that match its watch patterns.
894 cx.executor().run_until_parked();
895 assert_eq!(
896 &*file_changes.lock(),
897 &[
898 lsp::FileEvent {
899 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
900 typ: lsp::FileChangeType::DELETED,
901 },
902 lsp::FileEvent {
903 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
904 typ: lsp::FileChangeType::CREATED,
905 },
906 lsp::FileEvent {
907 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
908 typ: lsp::FileChangeType::CREATED,
909 },
910 ]
911 );
912}
913
914#[gpui::test]
915async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
916 init_test(cx);
917
918 let fs = FakeFs::new(cx.executor());
919 fs.insert_tree(
920 "/dir",
921 json!({
922 "a.rs": "let a = 1;",
923 "b.rs": "let b = 2;"
924 }),
925 )
926 .await;
927
928 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
929
930 let buffer_a = project
931 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
932 .await
933 .unwrap();
934 let buffer_b = project
935 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
936 .await
937 .unwrap();
938
939 project.update(cx, |project, cx| {
940 project
941 .update_diagnostics(
942 LanguageServerId(0),
943 lsp::PublishDiagnosticsParams {
944 uri: Url::from_file_path("/dir/a.rs").unwrap(),
945 version: None,
946 diagnostics: vec![lsp::Diagnostic {
947 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
948 severity: Some(lsp::DiagnosticSeverity::ERROR),
949 message: "error 1".to_string(),
950 ..Default::default()
951 }],
952 },
953 &[],
954 cx,
955 )
956 .unwrap();
957 project
958 .update_diagnostics(
959 LanguageServerId(0),
960 lsp::PublishDiagnosticsParams {
961 uri: Url::from_file_path("/dir/b.rs").unwrap(),
962 version: None,
963 diagnostics: vec![lsp::Diagnostic {
964 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
965 severity: Some(DiagnosticSeverity::WARNING),
966 message: "error 2".to_string(),
967 ..Default::default()
968 }],
969 },
970 &[],
971 cx,
972 )
973 .unwrap();
974 });
975
976 buffer_a.update(cx, |buffer, _| {
977 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
978 assert_eq!(
979 chunks
980 .iter()
981 .map(|(s, d)| (s.as_str(), *d))
982 .collect::<Vec<_>>(),
983 &[
984 ("let ", None),
985 ("a", Some(DiagnosticSeverity::ERROR)),
986 (" = 1;", None),
987 ]
988 );
989 });
990 buffer_b.update(cx, |buffer, _| {
991 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
992 assert_eq!(
993 chunks
994 .iter()
995 .map(|(s, d)| (s.as_str(), *d))
996 .collect::<Vec<_>>(),
997 &[
998 ("let ", None),
999 ("b", Some(DiagnosticSeverity::WARNING)),
1000 (" = 2;", None),
1001 ]
1002 );
1003 });
1004}
1005
1006#[gpui::test]
1007async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1008 init_test(cx);
1009
1010 let fs = FakeFs::new(cx.executor());
1011 fs.insert_tree(
1012 "/root",
1013 json!({
1014 "dir": {
1015 ".git": {
1016 "HEAD": "ref: refs/heads/main",
1017 },
1018 ".gitignore": "b.rs",
1019 "a.rs": "let a = 1;",
1020 "b.rs": "let b = 2;",
1021 },
1022 "other.rs": "let b = c;"
1023 }),
1024 )
1025 .await;
1026
1027 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1028 let (worktree, _) = project
1029 .update(cx, |project, cx| {
1030 project.find_or_create_worktree("/root/dir", true, cx)
1031 })
1032 .await
1033 .unwrap();
1034 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1035
1036 let (worktree, _) = project
1037 .update(cx, |project, cx| {
1038 project.find_or_create_worktree("/root/other.rs", false, cx)
1039 })
1040 .await
1041 .unwrap();
1042 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1043
1044 let server_id = LanguageServerId(0);
1045 project.update(cx, |project, cx| {
1046 project
1047 .update_diagnostics(
1048 server_id,
1049 lsp::PublishDiagnosticsParams {
1050 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1051 version: None,
1052 diagnostics: vec![lsp::Diagnostic {
1053 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1054 severity: Some(lsp::DiagnosticSeverity::ERROR),
1055 message: "unused variable 'b'".to_string(),
1056 ..Default::default()
1057 }],
1058 },
1059 &[],
1060 cx,
1061 )
1062 .unwrap();
1063 project
1064 .update_diagnostics(
1065 server_id,
1066 lsp::PublishDiagnosticsParams {
1067 uri: Url::from_file_path("/root/other.rs").unwrap(),
1068 version: None,
1069 diagnostics: vec![lsp::Diagnostic {
1070 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1071 severity: Some(lsp::DiagnosticSeverity::ERROR),
1072 message: "unknown variable 'c'".to_string(),
1073 ..Default::default()
1074 }],
1075 },
1076 &[],
1077 cx,
1078 )
1079 .unwrap();
1080 });
1081
1082 let main_ignored_buffer = project
1083 .update(cx, |project, cx| {
1084 project.open_buffer((main_worktree_id, "b.rs"), cx)
1085 })
1086 .await
1087 .unwrap();
1088 main_ignored_buffer.update(cx, |buffer, _| {
1089 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1090 assert_eq!(
1091 chunks
1092 .iter()
1093 .map(|(s, d)| (s.as_str(), *d))
1094 .collect::<Vec<_>>(),
1095 &[
1096 ("let ", None),
1097 ("b", Some(DiagnosticSeverity::ERROR)),
1098 (" = 2;", None),
1099 ],
1100 "Gigitnored buffers should still get in-buffer diagnostics",
1101 );
1102 });
1103 let other_buffer = project
1104 .update(cx, |project, cx| {
1105 project.open_buffer((other_worktree_id, ""), cx)
1106 })
1107 .await
1108 .unwrap();
1109 other_buffer.update(cx, |buffer, _| {
1110 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1111 assert_eq!(
1112 chunks
1113 .iter()
1114 .map(|(s, d)| (s.as_str(), *d))
1115 .collect::<Vec<_>>(),
1116 &[
1117 ("let b = ", None),
1118 ("c", Some(DiagnosticSeverity::ERROR)),
1119 (";", None),
1120 ],
1121 "Buffers from hidden projects should still get in-buffer diagnostics"
1122 );
1123 });
1124
1125 project.update(cx, |project, cx| {
1126 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1127 assert_eq!(
1128 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1129 vec![(
1130 ProjectPath {
1131 worktree_id: main_worktree_id,
1132 path: Arc::from(Path::new("b.rs")),
1133 },
1134 server_id,
1135 DiagnosticSummary {
1136 error_count: 1,
1137 warning_count: 0,
1138 }
1139 )]
1140 );
1141 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1142 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1143 });
1144}
1145
1146#[gpui::test]
1147async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1148 init_test(cx);
1149
1150 let progress_token = "the-progress-token";
1151
1152 let fs = FakeFs::new(cx.executor());
1153 fs.insert_tree(
1154 "/dir",
1155 json!({
1156 "a.rs": "fn a() { A }",
1157 "b.rs": "const y: i32 = 1",
1158 }),
1159 )
1160 .await;
1161
1162 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1163 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1164
1165 language_registry.add(rust_lang());
1166 let mut fake_servers = language_registry.register_fake_lsp(
1167 "Rust",
1168 FakeLspAdapter {
1169 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1170 disk_based_diagnostics_sources: vec!["disk".into()],
1171 ..Default::default()
1172 },
1173 );
1174
1175 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1176
1177 // Cause worktree to start the fake language server
1178 let _buffer = project
1179 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1180 .await
1181 .unwrap();
1182
1183 let mut events = cx.events(&project);
1184
1185 let fake_server = fake_servers.next().await.unwrap();
1186 assert_eq!(
1187 events.next().await.unwrap(),
1188 Event::LanguageServerAdded(LanguageServerId(0)),
1189 );
1190
1191 fake_server
1192 .start_progress(format!("{}/0", progress_token))
1193 .await;
1194 assert_eq!(
1195 events.next().await.unwrap(),
1196 Event::DiskBasedDiagnosticsStarted {
1197 language_server_id: LanguageServerId(0),
1198 }
1199 );
1200
1201 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1202 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1203 version: None,
1204 diagnostics: vec![lsp::Diagnostic {
1205 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1206 severity: Some(lsp::DiagnosticSeverity::ERROR),
1207 message: "undefined variable 'A'".to_string(),
1208 ..Default::default()
1209 }],
1210 });
1211 assert_eq!(
1212 events.next().await.unwrap(),
1213 Event::DiagnosticsUpdated {
1214 language_server_id: LanguageServerId(0),
1215 path: (worktree_id, Path::new("a.rs")).into()
1216 }
1217 );
1218
1219 fake_server.end_progress(format!("{}/0", progress_token));
1220 assert_eq!(
1221 events.next().await.unwrap(),
1222 Event::DiskBasedDiagnosticsFinished {
1223 language_server_id: LanguageServerId(0)
1224 }
1225 );
1226
1227 let buffer = project
1228 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1229 .await
1230 .unwrap();
1231
1232 buffer.update(cx, |buffer, _| {
1233 let snapshot = buffer.snapshot();
1234 let diagnostics = snapshot
1235 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1236 .collect::<Vec<_>>();
1237 assert_eq!(
1238 diagnostics,
1239 &[DiagnosticEntry {
1240 range: Point::new(0, 9)..Point::new(0, 10),
1241 diagnostic: Diagnostic {
1242 severity: lsp::DiagnosticSeverity::ERROR,
1243 message: "undefined variable 'A'".to_string(),
1244 group_id: 0,
1245 is_primary: true,
1246 ..Default::default()
1247 }
1248 }]
1249 )
1250 });
1251
1252 // Ensure publishing empty diagnostics twice only results in one update event.
1253 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1254 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1255 version: None,
1256 diagnostics: Default::default(),
1257 });
1258 assert_eq!(
1259 events.next().await.unwrap(),
1260 Event::DiagnosticsUpdated {
1261 language_server_id: LanguageServerId(0),
1262 path: (worktree_id, Path::new("a.rs")).into()
1263 }
1264 );
1265
1266 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1267 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1268 version: None,
1269 diagnostics: Default::default(),
1270 });
1271 cx.executor().run_until_parked();
1272 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1273}
1274
1275#[gpui::test]
1276async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1277 init_test(cx);
1278
1279 let progress_token = "the-progress-token";
1280
1281 let fs = FakeFs::new(cx.executor());
1282 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1283
1284 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1285
1286 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1287 language_registry.add(rust_lang());
1288 let mut fake_servers = language_registry.register_fake_lsp(
1289 "Rust",
1290 FakeLspAdapter {
1291 name: "the-language-server",
1292 disk_based_diagnostics_sources: vec!["disk".into()],
1293 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1294 ..Default::default()
1295 },
1296 );
1297
1298 let buffer = project
1299 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1300 .await
1301 .unwrap();
1302
1303 // Simulate diagnostics starting to update.
1304 let fake_server = fake_servers.next().await.unwrap();
1305 fake_server.start_progress(progress_token).await;
1306
1307 // Restart the server before the diagnostics finish updating.
1308 project.update(cx, |project, cx| {
1309 project.restart_language_servers_for_buffers([buffer], cx);
1310 });
1311 let mut events = cx.events(&project);
1312
1313 // Simulate the newly started server sending more diagnostics.
1314 let fake_server = fake_servers.next().await.unwrap();
1315 assert_eq!(
1316 events.next().await.unwrap(),
1317 Event::LanguageServerAdded(LanguageServerId(1))
1318 );
1319 fake_server.start_progress(progress_token).await;
1320 assert_eq!(
1321 events.next().await.unwrap(),
1322 Event::DiskBasedDiagnosticsStarted {
1323 language_server_id: LanguageServerId(1)
1324 }
1325 );
1326 project.update(cx, |project, cx| {
1327 assert_eq!(
1328 project
1329 .language_servers_running_disk_based_diagnostics(cx)
1330 .collect::<Vec<_>>(),
1331 [LanguageServerId(1)]
1332 );
1333 });
1334
1335 // All diagnostics are considered done, despite the old server's diagnostic
1336 // task never completing.
1337 fake_server.end_progress(progress_token);
1338 assert_eq!(
1339 events.next().await.unwrap(),
1340 Event::DiskBasedDiagnosticsFinished {
1341 language_server_id: LanguageServerId(1)
1342 }
1343 );
1344 project.update(cx, |project, cx| {
1345 assert_eq!(
1346 project
1347 .language_servers_running_disk_based_diagnostics(cx)
1348 .collect::<Vec<_>>(),
1349 [] as [language::LanguageServerId; 0]
1350 );
1351 });
1352}
1353
1354#[gpui::test]
1355async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1356 init_test(cx);
1357
1358 let fs = FakeFs::new(cx.executor());
1359 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1360
1361 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1362
1363 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1364 language_registry.add(rust_lang());
1365 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1366
1367 let buffer = project
1368 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1369 .await
1370 .unwrap();
1371
1372 // Publish diagnostics
1373 let fake_server = fake_servers.next().await.unwrap();
1374 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1375 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1376 version: None,
1377 diagnostics: vec![lsp::Diagnostic {
1378 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1379 severity: Some(lsp::DiagnosticSeverity::ERROR),
1380 message: "the message".to_string(),
1381 ..Default::default()
1382 }],
1383 });
1384
1385 cx.executor().run_until_parked();
1386 buffer.update(cx, |buffer, _| {
1387 assert_eq!(
1388 buffer
1389 .snapshot()
1390 .diagnostics_in_range::<_, usize>(0..1, false)
1391 .map(|entry| entry.diagnostic.message.clone())
1392 .collect::<Vec<_>>(),
1393 ["the message".to_string()]
1394 );
1395 });
1396 project.update(cx, |project, cx| {
1397 assert_eq!(
1398 project.diagnostic_summary(false, cx),
1399 DiagnosticSummary {
1400 error_count: 1,
1401 warning_count: 0,
1402 }
1403 );
1404 });
1405
1406 project.update(cx, |project, cx| {
1407 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1408 });
1409
1410 // The diagnostics are cleared.
1411 cx.executor().run_until_parked();
1412 buffer.update(cx, |buffer, _| {
1413 assert_eq!(
1414 buffer
1415 .snapshot()
1416 .diagnostics_in_range::<_, usize>(0..1, false)
1417 .map(|entry| entry.diagnostic.message.clone())
1418 .collect::<Vec<_>>(),
1419 Vec::<String>::new(),
1420 );
1421 });
1422 project.update(cx, |project, cx| {
1423 assert_eq!(
1424 project.diagnostic_summary(false, cx),
1425 DiagnosticSummary {
1426 error_count: 0,
1427 warning_count: 0,
1428 }
1429 );
1430 });
1431}
1432
1433#[gpui::test]
1434async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1435 init_test(cx);
1436
1437 let fs = FakeFs::new(cx.executor());
1438 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1439
1440 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1441 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1442
1443 language_registry.add(rust_lang());
1444 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1445
1446 let buffer = project
1447 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1448 .await
1449 .unwrap();
1450
1451 // Before restarting the server, report diagnostics with an unknown buffer version.
1452 let fake_server = fake_servers.next().await.unwrap();
1453 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1454 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1455 version: Some(10000),
1456 diagnostics: Vec::new(),
1457 });
1458 cx.executor().run_until_parked();
1459
1460 project.update(cx, |project, cx| {
1461 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1462 });
1463 let mut fake_server = fake_servers.next().await.unwrap();
1464 let notification = fake_server
1465 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1466 .await
1467 .text_document;
1468 assert_eq!(notification.version, 0);
1469}
1470
1471#[gpui::test]
1472async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1473 init_test(cx);
1474
1475 let progress_token = "the-progress-token";
1476
1477 let fs = FakeFs::new(cx.executor());
1478 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1479
1480 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1481
1482 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1483 language_registry.add(rust_lang());
1484 let mut fake_servers = language_registry.register_fake_lsp(
1485 "Rust",
1486 FakeLspAdapter {
1487 name: "the-language-server",
1488 disk_based_diagnostics_sources: vec!["disk".into()],
1489 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1490 ..Default::default()
1491 },
1492 );
1493
1494 let buffer = project
1495 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1496 .await
1497 .unwrap();
1498
1499 // Simulate diagnostics starting to update.
1500 let mut fake_server = fake_servers.next().await.unwrap();
1501 fake_server
1502 .start_progress_with(
1503 "another-token",
1504 lsp::WorkDoneProgressBegin {
1505 cancellable: Some(false),
1506 ..Default::default()
1507 },
1508 )
1509 .await;
1510 fake_server
1511 .start_progress_with(
1512 progress_token,
1513 lsp::WorkDoneProgressBegin {
1514 cancellable: Some(true),
1515 ..Default::default()
1516 },
1517 )
1518 .await;
1519 cx.executor().run_until_parked();
1520
1521 project.update(cx, |project, cx| {
1522 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1523 });
1524
1525 let cancel_notification = fake_server
1526 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1527 .await;
1528 assert_eq!(
1529 cancel_notification.token,
1530 NumberOrString::String(progress_token.into())
1531 );
1532}
1533
1534#[gpui::test]
1535async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1536 init_test(cx);
1537
1538 let fs = FakeFs::new(cx.executor());
1539 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1540 .await;
1541
1542 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1543 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1544
1545 let mut fake_rust_servers = language_registry.register_fake_lsp(
1546 "Rust",
1547 FakeLspAdapter {
1548 name: "rust-lsp",
1549 ..Default::default()
1550 },
1551 );
1552 let mut fake_js_servers = language_registry.register_fake_lsp(
1553 "JavaScript",
1554 FakeLspAdapter {
1555 name: "js-lsp",
1556 ..Default::default()
1557 },
1558 );
1559 language_registry.add(rust_lang());
1560 language_registry.add(js_lang());
1561
1562 let _rs_buffer = project
1563 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1564 .await
1565 .unwrap();
1566 let _js_buffer = project
1567 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1568 .await
1569 .unwrap();
1570
1571 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1572 assert_eq!(
1573 fake_rust_server_1
1574 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1575 .await
1576 .text_document
1577 .uri
1578 .as_str(),
1579 "file:///dir/a.rs"
1580 );
1581
1582 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1583 assert_eq!(
1584 fake_js_server
1585 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1586 .await
1587 .text_document
1588 .uri
1589 .as_str(),
1590 "file:///dir/b.js"
1591 );
1592
1593 // Disable Rust language server, ensuring only that server gets stopped.
1594 cx.update(|cx| {
1595 SettingsStore::update_global(cx, |settings, cx| {
1596 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1597 settings.languages.insert(
1598 "Rust".into(),
1599 LanguageSettingsContent {
1600 enable_language_server: Some(false),
1601 ..Default::default()
1602 },
1603 );
1604 });
1605 })
1606 });
1607 fake_rust_server_1
1608 .receive_notification::<lsp::notification::Exit>()
1609 .await;
1610
1611 // Enable Rust and disable JavaScript language servers, ensuring that the
1612 // former gets started again and that the latter stops.
1613 cx.update(|cx| {
1614 SettingsStore::update_global(cx, |settings, cx| {
1615 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1616 settings.languages.insert(
1617 LanguageName::new("Rust"),
1618 LanguageSettingsContent {
1619 enable_language_server: Some(true),
1620 ..Default::default()
1621 },
1622 );
1623 settings.languages.insert(
1624 LanguageName::new("JavaScript"),
1625 LanguageSettingsContent {
1626 enable_language_server: Some(false),
1627 ..Default::default()
1628 },
1629 );
1630 });
1631 })
1632 });
1633 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1634 assert_eq!(
1635 fake_rust_server_2
1636 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1637 .await
1638 .text_document
1639 .uri
1640 .as_str(),
1641 "file:///dir/a.rs"
1642 );
1643 fake_js_server
1644 .receive_notification::<lsp::notification::Exit>()
1645 .await;
1646}
1647
1648#[gpui::test(iterations = 3)]
1649async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1650 init_test(cx);
1651
1652 let text = "
1653 fn a() { A }
1654 fn b() { BB }
1655 fn c() { CCC }
1656 "
1657 .unindent();
1658
1659 let fs = FakeFs::new(cx.executor());
1660 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1661
1662 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1663 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1664
1665 language_registry.add(rust_lang());
1666 let mut fake_servers = language_registry.register_fake_lsp(
1667 "Rust",
1668 FakeLspAdapter {
1669 disk_based_diagnostics_sources: vec!["disk".into()],
1670 ..Default::default()
1671 },
1672 );
1673
1674 let buffer = project
1675 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1676 .await
1677 .unwrap();
1678
1679 let mut fake_server = fake_servers.next().await.unwrap();
1680 let open_notification = fake_server
1681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1682 .await;
1683
1684 // Edit the buffer, moving the content down
1685 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1686 let change_notification_1 = fake_server
1687 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1688 .await;
1689 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1690
1691 // Report some diagnostics for the initial version of the buffer
1692 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1693 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1694 version: Some(open_notification.text_document.version),
1695 diagnostics: vec![
1696 lsp::Diagnostic {
1697 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1698 severity: Some(DiagnosticSeverity::ERROR),
1699 message: "undefined variable 'A'".to_string(),
1700 source: Some("disk".to_string()),
1701 ..Default::default()
1702 },
1703 lsp::Diagnostic {
1704 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1705 severity: Some(DiagnosticSeverity::ERROR),
1706 message: "undefined variable 'BB'".to_string(),
1707 source: Some("disk".to_string()),
1708 ..Default::default()
1709 },
1710 lsp::Diagnostic {
1711 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1712 severity: Some(DiagnosticSeverity::ERROR),
1713 source: Some("disk".to_string()),
1714 message: "undefined variable 'CCC'".to_string(),
1715 ..Default::default()
1716 },
1717 ],
1718 });
1719
1720 // The diagnostics have moved down since they were created.
1721 cx.executor().run_until_parked();
1722 buffer.update(cx, |buffer, _| {
1723 assert_eq!(
1724 buffer
1725 .snapshot()
1726 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1727 .collect::<Vec<_>>(),
1728 &[
1729 DiagnosticEntry {
1730 range: Point::new(3, 9)..Point::new(3, 11),
1731 diagnostic: Diagnostic {
1732 source: Some("disk".into()),
1733 severity: DiagnosticSeverity::ERROR,
1734 message: "undefined variable 'BB'".to_string(),
1735 is_disk_based: true,
1736 group_id: 1,
1737 is_primary: true,
1738 ..Default::default()
1739 },
1740 },
1741 DiagnosticEntry {
1742 range: Point::new(4, 9)..Point::new(4, 12),
1743 diagnostic: Diagnostic {
1744 source: Some("disk".into()),
1745 severity: DiagnosticSeverity::ERROR,
1746 message: "undefined variable 'CCC'".to_string(),
1747 is_disk_based: true,
1748 group_id: 2,
1749 is_primary: true,
1750 ..Default::default()
1751 }
1752 }
1753 ]
1754 );
1755 assert_eq!(
1756 chunks_with_diagnostics(buffer, 0..buffer.len()),
1757 [
1758 ("\n\nfn a() { ".to_string(), None),
1759 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1760 (" }\nfn b() { ".to_string(), None),
1761 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1762 (" }\nfn c() { ".to_string(), None),
1763 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1764 (" }\n".to_string(), None),
1765 ]
1766 );
1767 assert_eq!(
1768 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1769 [
1770 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1771 (" }\nfn c() { ".to_string(), None),
1772 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1773 ]
1774 );
1775 });
1776
1777 // Ensure overlapping diagnostics are highlighted correctly.
1778 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1779 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1780 version: Some(open_notification.text_document.version),
1781 diagnostics: vec![
1782 lsp::Diagnostic {
1783 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1784 severity: Some(DiagnosticSeverity::ERROR),
1785 message: "undefined variable 'A'".to_string(),
1786 source: Some("disk".to_string()),
1787 ..Default::default()
1788 },
1789 lsp::Diagnostic {
1790 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1791 severity: Some(DiagnosticSeverity::WARNING),
1792 message: "unreachable statement".to_string(),
1793 source: Some("disk".to_string()),
1794 ..Default::default()
1795 },
1796 ],
1797 });
1798
1799 cx.executor().run_until_parked();
1800 buffer.update(cx, |buffer, _| {
1801 assert_eq!(
1802 buffer
1803 .snapshot()
1804 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1805 .collect::<Vec<_>>(),
1806 &[
1807 DiagnosticEntry {
1808 range: Point::new(2, 9)..Point::new(2, 12),
1809 diagnostic: Diagnostic {
1810 source: Some("disk".into()),
1811 severity: DiagnosticSeverity::WARNING,
1812 message: "unreachable statement".to_string(),
1813 is_disk_based: true,
1814 group_id: 4,
1815 is_primary: true,
1816 ..Default::default()
1817 }
1818 },
1819 DiagnosticEntry {
1820 range: Point::new(2, 9)..Point::new(2, 10),
1821 diagnostic: Diagnostic {
1822 source: Some("disk".into()),
1823 severity: DiagnosticSeverity::ERROR,
1824 message: "undefined variable 'A'".to_string(),
1825 is_disk_based: true,
1826 group_id: 3,
1827 is_primary: true,
1828 ..Default::default()
1829 },
1830 }
1831 ]
1832 );
1833 assert_eq!(
1834 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1835 [
1836 ("fn a() { ".to_string(), None),
1837 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1838 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1839 ("\n".to_string(), None),
1840 ]
1841 );
1842 assert_eq!(
1843 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1844 [
1845 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1846 ("\n".to_string(), None),
1847 ]
1848 );
1849 });
1850
1851 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1852 // changes since the last save.
1853 buffer.update(cx, |buffer, cx| {
1854 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1855 buffer.edit(
1856 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1857 None,
1858 cx,
1859 );
1860 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1861 });
1862 let change_notification_2 = fake_server
1863 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1864 .await;
1865 assert!(
1866 change_notification_2.text_document.version > change_notification_1.text_document.version
1867 );
1868
1869 // Handle out-of-order diagnostics
1870 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1871 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1872 version: Some(change_notification_2.text_document.version),
1873 diagnostics: vec![
1874 lsp::Diagnostic {
1875 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1876 severity: Some(DiagnosticSeverity::ERROR),
1877 message: "undefined variable 'BB'".to_string(),
1878 source: Some("disk".to_string()),
1879 ..Default::default()
1880 },
1881 lsp::Diagnostic {
1882 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1883 severity: Some(DiagnosticSeverity::WARNING),
1884 message: "undefined variable 'A'".to_string(),
1885 source: Some("disk".to_string()),
1886 ..Default::default()
1887 },
1888 ],
1889 });
1890
1891 cx.executor().run_until_parked();
1892 buffer.update(cx, |buffer, _| {
1893 assert_eq!(
1894 buffer
1895 .snapshot()
1896 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1897 .collect::<Vec<_>>(),
1898 &[
1899 DiagnosticEntry {
1900 range: Point::new(2, 21)..Point::new(2, 22),
1901 diagnostic: Diagnostic {
1902 source: Some("disk".into()),
1903 severity: DiagnosticSeverity::WARNING,
1904 message: "undefined variable 'A'".to_string(),
1905 is_disk_based: true,
1906 group_id: 6,
1907 is_primary: true,
1908 ..Default::default()
1909 }
1910 },
1911 DiagnosticEntry {
1912 range: Point::new(3, 9)..Point::new(3, 14),
1913 diagnostic: Diagnostic {
1914 source: Some("disk".into()),
1915 severity: DiagnosticSeverity::ERROR,
1916 message: "undefined variable 'BB'".to_string(),
1917 is_disk_based: true,
1918 group_id: 5,
1919 is_primary: true,
1920 ..Default::default()
1921 },
1922 }
1923 ]
1924 );
1925 });
1926}
1927
1928#[gpui::test]
1929async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1930 init_test(cx);
1931
1932 let text = concat!(
1933 "let one = ;\n", //
1934 "let two = \n",
1935 "let three = 3;\n",
1936 );
1937
1938 let fs = FakeFs::new(cx.executor());
1939 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1940
1941 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1942 let buffer = project
1943 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1944 .await
1945 .unwrap();
1946
1947 project.update(cx, |project, cx| {
1948 project.lsp_store.update(cx, |lsp_store, cx| {
1949 lsp_store
1950 .update_buffer_diagnostics(
1951 &buffer,
1952 LanguageServerId(0),
1953 None,
1954 vec![
1955 DiagnosticEntry {
1956 range: Unclipped(PointUtf16::new(0, 10))
1957 ..Unclipped(PointUtf16::new(0, 10)),
1958 diagnostic: Diagnostic {
1959 severity: DiagnosticSeverity::ERROR,
1960 message: "syntax error 1".to_string(),
1961 ..Default::default()
1962 },
1963 },
1964 DiagnosticEntry {
1965 range: Unclipped(PointUtf16::new(1, 10))
1966 ..Unclipped(PointUtf16::new(1, 10)),
1967 diagnostic: Diagnostic {
1968 severity: DiagnosticSeverity::ERROR,
1969 message: "syntax error 2".to_string(),
1970 ..Default::default()
1971 },
1972 },
1973 ],
1974 cx,
1975 )
1976 .unwrap();
1977 })
1978 });
1979
1980 // An empty range is extended forward to include the following character.
1981 // At the end of a line, an empty range is extended backward to include
1982 // the preceding character.
1983 buffer.update(cx, |buffer, _| {
1984 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1985 assert_eq!(
1986 chunks
1987 .iter()
1988 .map(|(s, d)| (s.as_str(), *d))
1989 .collect::<Vec<_>>(),
1990 &[
1991 ("let one = ", None),
1992 (";", Some(DiagnosticSeverity::ERROR)),
1993 ("\nlet two =", None),
1994 (" ", Some(DiagnosticSeverity::ERROR)),
1995 ("\nlet three = 3;\n", None)
1996 ]
1997 );
1998 });
1999}
2000
2001#[gpui::test]
2002async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2003 init_test(cx);
2004
2005 let fs = FakeFs::new(cx.executor());
2006 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2007 .await;
2008
2009 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2010
2011 project.update(cx, |project, cx| {
2012 project
2013 .update_diagnostic_entries(
2014 LanguageServerId(0),
2015 Path::new("/dir/a.rs").to_owned(),
2016 None,
2017 vec![DiagnosticEntry {
2018 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2019 diagnostic: Diagnostic {
2020 severity: DiagnosticSeverity::ERROR,
2021 is_primary: true,
2022 message: "syntax error a1".to_string(),
2023 ..Default::default()
2024 },
2025 }],
2026 cx,
2027 )
2028 .unwrap();
2029 project
2030 .update_diagnostic_entries(
2031 LanguageServerId(1),
2032 Path::new("/dir/a.rs").to_owned(),
2033 None,
2034 vec![DiagnosticEntry {
2035 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2036 diagnostic: Diagnostic {
2037 severity: DiagnosticSeverity::ERROR,
2038 is_primary: true,
2039 message: "syntax error b1".to_string(),
2040 ..Default::default()
2041 },
2042 }],
2043 cx,
2044 )
2045 .unwrap();
2046
2047 assert_eq!(
2048 project.diagnostic_summary(false, cx),
2049 DiagnosticSummary {
2050 error_count: 2,
2051 warning_count: 0,
2052 }
2053 );
2054 });
2055}
2056
2057#[gpui::test]
2058async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2059 init_test(cx);
2060
2061 let text = "
2062 fn a() {
2063 f1();
2064 }
2065 fn b() {
2066 f2();
2067 }
2068 fn c() {
2069 f3();
2070 }
2071 "
2072 .unindent();
2073
2074 let fs = FakeFs::new(cx.executor());
2075 fs.insert_tree(
2076 "/dir",
2077 json!({
2078 "a.rs": text.clone(),
2079 }),
2080 )
2081 .await;
2082
2083 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2084 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2085
2086 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2087 language_registry.add(rust_lang());
2088 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2089
2090 let buffer = project
2091 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2092 .await
2093 .unwrap();
2094
2095 let mut fake_server = fake_servers.next().await.unwrap();
2096 let lsp_document_version = fake_server
2097 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2098 .await
2099 .text_document
2100 .version;
2101
2102 // Simulate editing the buffer after the language server computes some edits.
2103 buffer.update(cx, |buffer, cx| {
2104 buffer.edit(
2105 [(
2106 Point::new(0, 0)..Point::new(0, 0),
2107 "// above first function\n",
2108 )],
2109 None,
2110 cx,
2111 );
2112 buffer.edit(
2113 [(
2114 Point::new(2, 0)..Point::new(2, 0),
2115 " // inside first function\n",
2116 )],
2117 None,
2118 cx,
2119 );
2120 buffer.edit(
2121 [(
2122 Point::new(6, 4)..Point::new(6, 4),
2123 "// inside second function ",
2124 )],
2125 None,
2126 cx,
2127 );
2128
2129 assert_eq!(
2130 buffer.text(),
2131 "
2132 // above first function
2133 fn a() {
2134 // inside first function
2135 f1();
2136 }
2137 fn b() {
2138 // inside second function f2();
2139 }
2140 fn c() {
2141 f3();
2142 }
2143 "
2144 .unindent()
2145 );
2146 });
2147
2148 let edits = lsp_store
2149 .update(cx, |lsp_store, cx| {
2150 lsp_store.edits_from_lsp(
2151 &buffer,
2152 vec![
2153 // replace body of first function
2154 lsp::TextEdit {
2155 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2156 new_text: "
2157 fn a() {
2158 f10();
2159 }
2160 "
2161 .unindent(),
2162 },
2163 // edit inside second function
2164 lsp::TextEdit {
2165 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2166 new_text: "00".into(),
2167 },
2168 // edit inside third function via two distinct edits
2169 lsp::TextEdit {
2170 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2171 new_text: "4000".into(),
2172 },
2173 lsp::TextEdit {
2174 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2175 new_text: "".into(),
2176 },
2177 ],
2178 LanguageServerId(0),
2179 Some(lsp_document_version),
2180 cx,
2181 )
2182 })
2183 .await
2184 .unwrap();
2185
2186 buffer.update(cx, |buffer, cx| {
2187 for (range, new_text) in edits {
2188 buffer.edit([(range, new_text)], None, cx);
2189 }
2190 assert_eq!(
2191 buffer.text(),
2192 "
2193 // above first function
2194 fn a() {
2195 // inside first function
2196 f10();
2197 }
2198 fn b() {
2199 // inside second function f200();
2200 }
2201 fn c() {
2202 f4000();
2203 }
2204 "
2205 .unindent()
2206 );
2207 });
2208}
2209
2210#[gpui::test]
2211async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2212 init_test(cx);
2213
2214 let text = "
2215 use a::b;
2216 use a::c;
2217
2218 fn f() {
2219 b();
2220 c();
2221 }
2222 "
2223 .unindent();
2224
2225 let fs = FakeFs::new(cx.executor());
2226 fs.insert_tree(
2227 "/dir",
2228 json!({
2229 "a.rs": text.clone(),
2230 }),
2231 )
2232 .await;
2233
2234 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2235 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2236 let buffer = project
2237 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2238 .await
2239 .unwrap();
2240
2241 // Simulate the language server sending us a small edit in the form of a very large diff.
2242 // Rust-analyzer does this when performing a merge-imports code action.
2243 let edits = lsp_store
2244 .update(cx, |lsp_store, cx| {
2245 lsp_store.edits_from_lsp(
2246 &buffer,
2247 [
2248 // Replace the first use statement without editing the semicolon.
2249 lsp::TextEdit {
2250 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2251 new_text: "a::{b, c}".into(),
2252 },
2253 // Reinsert the remainder of the file between the semicolon and the final
2254 // newline of the file.
2255 lsp::TextEdit {
2256 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2257 new_text: "\n\n".into(),
2258 },
2259 lsp::TextEdit {
2260 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2261 new_text: "
2262 fn f() {
2263 b();
2264 c();
2265 }"
2266 .unindent(),
2267 },
2268 // Delete everything after the first newline of the file.
2269 lsp::TextEdit {
2270 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2271 new_text: "".into(),
2272 },
2273 ],
2274 LanguageServerId(0),
2275 None,
2276 cx,
2277 )
2278 })
2279 .await
2280 .unwrap();
2281
2282 buffer.update(cx, |buffer, cx| {
2283 let edits = edits
2284 .into_iter()
2285 .map(|(range, text)| {
2286 (
2287 range.start.to_point(buffer)..range.end.to_point(buffer),
2288 text,
2289 )
2290 })
2291 .collect::<Vec<_>>();
2292
2293 assert_eq!(
2294 edits,
2295 [
2296 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2297 (Point::new(1, 0)..Point::new(2, 0), "".into())
2298 ]
2299 );
2300
2301 for (range, new_text) in edits {
2302 buffer.edit([(range, new_text)], None, cx);
2303 }
2304 assert_eq!(
2305 buffer.text(),
2306 "
2307 use a::{b, c};
2308
2309 fn f() {
2310 b();
2311 c();
2312 }
2313 "
2314 .unindent()
2315 );
2316 });
2317}
2318
2319#[gpui::test]
2320async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2321 init_test(cx);
2322
2323 let text = "
2324 use a::b;
2325 use a::c;
2326
2327 fn f() {
2328 b();
2329 c();
2330 }
2331 "
2332 .unindent();
2333
2334 let fs = FakeFs::new(cx.executor());
2335 fs.insert_tree(
2336 "/dir",
2337 json!({
2338 "a.rs": text.clone(),
2339 }),
2340 )
2341 .await;
2342
2343 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2344 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2345 let buffer = project
2346 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2347 .await
2348 .unwrap();
2349
2350 // Simulate the language server sending us edits in a non-ordered fashion,
2351 // with ranges sometimes being inverted or pointing to invalid locations.
2352 let edits = lsp_store
2353 .update(cx, |lsp_store, cx| {
2354 lsp_store.edits_from_lsp(
2355 &buffer,
2356 [
2357 lsp::TextEdit {
2358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2359 new_text: "\n\n".into(),
2360 },
2361 lsp::TextEdit {
2362 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2363 new_text: "a::{b, c}".into(),
2364 },
2365 lsp::TextEdit {
2366 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2367 new_text: "".into(),
2368 },
2369 lsp::TextEdit {
2370 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2371 new_text: "
2372 fn f() {
2373 b();
2374 c();
2375 }"
2376 .unindent(),
2377 },
2378 ],
2379 LanguageServerId(0),
2380 None,
2381 cx,
2382 )
2383 })
2384 .await
2385 .unwrap();
2386
2387 buffer.update(cx, |buffer, cx| {
2388 let edits = edits
2389 .into_iter()
2390 .map(|(range, text)| {
2391 (
2392 range.start.to_point(buffer)..range.end.to_point(buffer),
2393 text,
2394 )
2395 })
2396 .collect::<Vec<_>>();
2397
2398 assert_eq!(
2399 edits,
2400 [
2401 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2402 (Point::new(1, 0)..Point::new(2, 0), "".into())
2403 ]
2404 );
2405
2406 for (range, new_text) in edits {
2407 buffer.edit([(range, new_text)], None, cx);
2408 }
2409 assert_eq!(
2410 buffer.text(),
2411 "
2412 use a::{b, c};
2413
2414 fn f() {
2415 b();
2416 c();
2417 }
2418 "
2419 .unindent()
2420 );
2421 });
2422}
2423
2424fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2425 buffer: &Buffer,
2426 range: Range<T>,
2427) -> Vec<(String, Option<DiagnosticSeverity>)> {
2428 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2429 for chunk in buffer.snapshot().chunks(range, true) {
2430 if chunks.last().map_or(false, |prev_chunk| {
2431 prev_chunk.1 == chunk.diagnostic_severity
2432 }) {
2433 chunks.last_mut().unwrap().0.push_str(chunk.text);
2434 } else {
2435 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2436 }
2437 }
2438 chunks
2439}
2440
2441#[gpui::test(iterations = 10)]
2442async fn test_definition(cx: &mut gpui::TestAppContext) {
2443 init_test(cx);
2444
2445 let fs = FakeFs::new(cx.executor());
2446 fs.insert_tree(
2447 "/dir",
2448 json!({
2449 "a.rs": "const fn a() { A }",
2450 "b.rs": "const y: i32 = crate::a()",
2451 }),
2452 )
2453 .await;
2454
2455 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2456
2457 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2458 language_registry.add(rust_lang());
2459 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2460
2461 let buffer = project
2462 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2463 .await
2464 .unwrap();
2465
2466 let fake_server = fake_servers.next().await.unwrap();
2467 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2468 let params = params.text_document_position_params;
2469 assert_eq!(
2470 params.text_document.uri.to_file_path().unwrap(),
2471 Path::new("/dir/b.rs"),
2472 );
2473 assert_eq!(params.position, lsp::Position::new(0, 22));
2474
2475 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2476 lsp::Location::new(
2477 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2478 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2479 ),
2480 )))
2481 });
2482
2483 let mut definitions = project
2484 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2485 .await
2486 .unwrap();
2487
2488 // Assert no new language server started
2489 cx.executor().run_until_parked();
2490 assert!(fake_servers.try_next().is_err());
2491
2492 assert_eq!(definitions.len(), 1);
2493 let definition = definitions.pop().unwrap();
2494 cx.update(|cx| {
2495 let target_buffer = definition.target.buffer.read(cx);
2496 assert_eq!(
2497 target_buffer
2498 .file()
2499 .unwrap()
2500 .as_local()
2501 .unwrap()
2502 .abs_path(cx),
2503 Path::new("/dir/a.rs"),
2504 );
2505 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2506 assert_eq!(
2507 list_worktrees(&project, cx),
2508 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2509 );
2510
2511 drop(definition);
2512 });
2513 cx.update(|cx| {
2514 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2515 });
2516
2517 fn list_worktrees<'a>(
2518 project: &'a Model<Project>,
2519 cx: &'a AppContext,
2520 ) -> Vec<(&'a Path, bool)> {
2521 project
2522 .read(cx)
2523 .worktrees(cx)
2524 .map(|worktree| {
2525 let worktree = worktree.read(cx);
2526 (
2527 worktree.as_local().unwrap().abs_path().as_ref(),
2528 worktree.is_visible(),
2529 )
2530 })
2531 .collect::<Vec<_>>()
2532 }
2533}
2534
2535#[gpui::test]
2536async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2537 init_test(cx);
2538
2539 let fs = FakeFs::new(cx.executor());
2540 fs.insert_tree(
2541 "/dir",
2542 json!({
2543 "a.ts": "",
2544 }),
2545 )
2546 .await;
2547
2548 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2549
2550 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2551 language_registry.add(typescript_lang());
2552 let mut fake_language_servers = language_registry.register_fake_lsp(
2553 "TypeScript",
2554 FakeLspAdapter {
2555 capabilities: lsp::ServerCapabilities {
2556 completion_provider: Some(lsp::CompletionOptions {
2557 trigger_characters: Some(vec![":".to_string()]),
2558 ..Default::default()
2559 }),
2560 ..Default::default()
2561 },
2562 ..Default::default()
2563 },
2564 );
2565
2566 let buffer = project
2567 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2568 .await
2569 .unwrap();
2570
2571 let fake_server = fake_language_servers.next().await.unwrap();
2572
2573 let text = "let a = b.fqn";
2574 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2575 let completions = project.update(cx, |project, cx| {
2576 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2577 });
2578
2579 fake_server
2580 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2581 Ok(Some(lsp::CompletionResponse::Array(vec![
2582 lsp::CompletionItem {
2583 label: "fullyQualifiedName?".into(),
2584 insert_text: Some("fullyQualifiedName".into()),
2585 ..Default::default()
2586 },
2587 ])))
2588 })
2589 .next()
2590 .await;
2591 let completions = completions.await.unwrap();
2592 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2593 assert_eq!(completions.len(), 1);
2594 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2595 assert_eq!(
2596 completions[0].old_range.to_offset(&snapshot),
2597 text.len() - 3..text.len()
2598 );
2599
2600 let text = "let a = \"atoms/cmp\"";
2601 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2602 let completions = project.update(cx, |project, cx| {
2603 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2604 });
2605
2606 fake_server
2607 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2608 Ok(Some(lsp::CompletionResponse::Array(vec![
2609 lsp::CompletionItem {
2610 label: "component".into(),
2611 ..Default::default()
2612 },
2613 ])))
2614 })
2615 .next()
2616 .await;
2617 let completions = completions.await.unwrap();
2618 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2619 assert_eq!(completions.len(), 1);
2620 assert_eq!(completions[0].new_text, "component");
2621 assert_eq!(
2622 completions[0].old_range.to_offset(&snapshot),
2623 text.len() - 4..text.len() - 1
2624 );
2625}
2626
2627#[gpui::test]
2628async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2629 init_test(cx);
2630
2631 let fs = FakeFs::new(cx.executor());
2632 fs.insert_tree(
2633 "/dir",
2634 json!({
2635 "a.ts": "",
2636 }),
2637 )
2638 .await;
2639
2640 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2641
2642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2643 language_registry.add(typescript_lang());
2644 let mut fake_language_servers = language_registry.register_fake_lsp(
2645 "TypeScript",
2646 FakeLspAdapter {
2647 capabilities: lsp::ServerCapabilities {
2648 completion_provider: Some(lsp::CompletionOptions {
2649 trigger_characters: Some(vec![":".to_string()]),
2650 ..Default::default()
2651 }),
2652 ..Default::default()
2653 },
2654 ..Default::default()
2655 },
2656 );
2657
2658 let buffer = project
2659 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2660 .await
2661 .unwrap();
2662
2663 let fake_server = fake_language_servers.next().await.unwrap();
2664
2665 let text = "let a = b.fqn";
2666 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2667 let completions = project.update(cx, |project, cx| {
2668 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2669 });
2670
2671 fake_server
2672 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2673 Ok(Some(lsp::CompletionResponse::Array(vec![
2674 lsp::CompletionItem {
2675 label: "fullyQualifiedName?".into(),
2676 insert_text: Some("fully\rQualified\r\nName".into()),
2677 ..Default::default()
2678 },
2679 ])))
2680 })
2681 .next()
2682 .await;
2683 let completions = completions.await.unwrap();
2684 assert_eq!(completions.len(), 1);
2685 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2686}
2687
2688#[gpui::test(iterations = 10)]
2689async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2690 init_test(cx);
2691
2692 let fs = FakeFs::new(cx.executor());
2693 fs.insert_tree(
2694 "/dir",
2695 json!({
2696 "a.ts": "a",
2697 }),
2698 )
2699 .await;
2700
2701 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2702
2703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2704 language_registry.add(typescript_lang());
2705 let mut fake_language_servers = language_registry.register_fake_lsp(
2706 "TypeScript",
2707 FakeLspAdapter {
2708 capabilities: lsp::ServerCapabilities {
2709 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2710 lsp::CodeActionOptions {
2711 resolve_provider: Some(true),
2712 ..lsp::CodeActionOptions::default()
2713 },
2714 )),
2715 ..lsp::ServerCapabilities::default()
2716 },
2717 ..FakeLspAdapter::default()
2718 },
2719 );
2720
2721 let buffer = project
2722 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2723 .await
2724 .unwrap();
2725
2726 let fake_server = fake_language_servers.next().await.unwrap();
2727
2728 // Language server returns code actions that contain commands, and not edits.
2729 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2730 fake_server
2731 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2732 Ok(Some(vec![
2733 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2734 title: "The code action".into(),
2735 data: Some(serde_json::json!({
2736 "command": "_the/command",
2737 })),
2738 ..lsp::CodeAction::default()
2739 }),
2740 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2741 title: "two".into(),
2742 ..lsp::CodeAction::default()
2743 }),
2744 ]))
2745 })
2746 .next()
2747 .await;
2748
2749 let action = actions.await.unwrap()[0].clone();
2750 let apply = project.update(cx, |project, cx| {
2751 project.apply_code_action(buffer.clone(), action, true, cx)
2752 });
2753
2754 // Resolving the code action does not populate its edits. In absence of
2755 // edits, we must execute the given command.
2756 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2757 |mut action, _| async move {
2758 if action.data.is_some() {
2759 action.command = Some(lsp::Command {
2760 title: "The command".into(),
2761 command: "_the/command".into(),
2762 arguments: Some(vec![json!("the-argument")]),
2763 });
2764 }
2765 Ok(action)
2766 },
2767 );
2768
2769 // While executing the command, the language server sends the editor
2770 // a `workspaceEdit` request.
2771 fake_server
2772 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2773 let fake = fake_server.clone();
2774 move |params, _| {
2775 assert_eq!(params.command, "_the/command");
2776 let fake = fake.clone();
2777 async move {
2778 fake.server
2779 .request::<lsp::request::ApplyWorkspaceEdit>(
2780 lsp::ApplyWorkspaceEditParams {
2781 label: None,
2782 edit: lsp::WorkspaceEdit {
2783 changes: Some(
2784 [(
2785 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2786 vec![lsp::TextEdit {
2787 range: lsp::Range::new(
2788 lsp::Position::new(0, 0),
2789 lsp::Position::new(0, 0),
2790 ),
2791 new_text: "X".into(),
2792 }],
2793 )]
2794 .into_iter()
2795 .collect(),
2796 ),
2797 ..Default::default()
2798 },
2799 },
2800 )
2801 .await
2802 .unwrap();
2803 Ok(Some(json!(null)))
2804 }
2805 }
2806 })
2807 .next()
2808 .await;
2809
2810 // Applying the code action returns a project transaction containing the edits
2811 // sent by the language server in its `workspaceEdit` request.
2812 let transaction = apply.await.unwrap();
2813 assert!(transaction.0.contains_key(&buffer));
2814 buffer.update(cx, |buffer, cx| {
2815 assert_eq!(buffer.text(), "Xa");
2816 buffer.undo(cx);
2817 assert_eq!(buffer.text(), "a");
2818 });
2819}
2820
2821#[gpui::test(iterations = 10)]
2822async fn test_save_file(cx: &mut gpui::TestAppContext) {
2823 init_test(cx);
2824
2825 let fs = FakeFs::new(cx.executor());
2826 fs.insert_tree(
2827 "/dir",
2828 json!({
2829 "file1": "the old contents",
2830 }),
2831 )
2832 .await;
2833
2834 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2835 let buffer = project
2836 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2837 .await
2838 .unwrap();
2839 buffer.update(cx, |buffer, cx| {
2840 assert_eq!(buffer.text(), "the old contents");
2841 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2842 });
2843
2844 project
2845 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2846 .await
2847 .unwrap();
2848
2849 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2850 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2851}
2852
2853#[gpui::test(iterations = 30)]
2854async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2855 init_test(cx);
2856
2857 let fs = FakeFs::new(cx.executor().clone());
2858 fs.insert_tree(
2859 "/dir",
2860 json!({
2861 "file1": "the original contents",
2862 }),
2863 )
2864 .await;
2865
2866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2867 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2868 let buffer = project
2869 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2870 .await
2871 .unwrap();
2872
2873 // Simulate buffer diffs being slow, so that they don't complete before
2874 // the next file change occurs.
2875 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2876
2877 // Change the buffer's file on disk, and then wait for the file change
2878 // to be detected by the worktree, so that the buffer starts reloading.
2879 fs.save(
2880 "/dir/file1".as_ref(),
2881 &"the first contents".into(),
2882 Default::default(),
2883 )
2884 .await
2885 .unwrap();
2886 worktree.next_event(cx).await;
2887
2888 // Change the buffer's file again. Depending on the random seed, the
2889 // previous file change may still be in progress.
2890 fs.save(
2891 "/dir/file1".as_ref(),
2892 &"the second contents".into(),
2893 Default::default(),
2894 )
2895 .await
2896 .unwrap();
2897 worktree.next_event(cx).await;
2898
2899 cx.executor().run_until_parked();
2900 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2901 buffer.read_with(cx, |buffer, _| {
2902 assert_eq!(buffer.text(), on_disk_text);
2903 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2904 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2905 });
2906}
2907
2908#[gpui::test(iterations = 30)]
2909async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2910 init_test(cx);
2911
2912 let fs = FakeFs::new(cx.executor().clone());
2913 fs.insert_tree(
2914 "/dir",
2915 json!({
2916 "file1": "the original contents",
2917 }),
2918 )
2919 .await;
2920
2921 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2922 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2923 let buffer = project
2924 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2925 .await
2926 .unwrap();
2927
2928 // Simulate buffer diffs being slow, so that they don't complete before
2929 // the next file change occurs.
2930 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2931
2932 // Change the buffer's file on disk, and then wait for the file change
2933 // to be detected by the worktree, so that the buffer starts reloading.
2934 fs.save(
2935 "/dir/file1".as_ref(),
2936 &"the first contents".into(),
2937 Default::default(),
2938 )
2939 .await
2940 .unwrap();
2941 worktree.next_event(cx).await;
2942
2943 cx.executor()
2944 .spawn(cx.executor().simulate_random_delay())
2945 .await;
2946
2947 // Perform a noop edit, causing the buffer's version to increase.
2948 buffer.update(cx, |buffer, cx| {
2949 buffer.edit([(0..0, " ")], None, cx);
2950 buffer.undo(cx);
2951 });
2952
2953 cx.executor().run_until_parked();
2954 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2955 buffer.read_with(cx, |buffer, _| {
2956 let buffer_text = buffer.text();
2957 if buffer_text == on_disk_text {
2958 assert!(
2959 !buffer.is_dirty() && !buffer.has_conflict(),
2960 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2961 );
2962 }
2963 // If the file change occurred while the buffer was processing the first
2964 // change, the buffer will be in a conflicting state.
2965 else {
2966 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2967 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2968 }
2969 });
2970}
2971
2972#[gpui::test]
2973async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2974 init_test(cx);
2975
2976 let fs = FakeFs::new(cx.executor());
2977 fs.insert_tree(
2978 "/dir",
2979 json!({
2980 "file1": "the old contents",
2981 }),
2982 )
2983 .await;
2984
2985 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2986 let buffer = project
2987 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2988 .await
2989 .unwrap();
2990 buffer.update(cx, |buffer, cx| {
2991 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2992 });
2993
2994 project
2995 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2996 .await
2997 .unwrap();
2998
2999 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3000 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3001}
3002
3003#[gpui::test]
3004async fn test_save_as(cx: &mut gpui::TestAppContext) {
3005 init_test(cx);
3006
3007 let fs = FakeFs::new(cx.executor());
3008 fs.insert_tree("/dir", json!({})).await;
3009
3010 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3011
3012 let languages = project.update(cx, |project, _| project.languages().clone());
3013 languages.add(rust_lang());
3014
3015 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3016 buffer.update(cx, |buffer, cx| {
3017 buffer.edit([(0..0, "abc")], None, cx);
3018 assert!(buffer.is_dirty());
3019 assert!(!buffer.has_conflict());
3020 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3021 });
3022 project
3023 .update(cx, |project, cx| {
3024 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3025 let path = ProjectPath {
3026 worktree_id,
3027 path: Arc::from(Path::new("file1.rs")),
3028 };
3029 project.save_buffer_as(buffer.clone(), path, cx)
3030 })
3031 .await
3032 .unwrap();
3033 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3034
3035 cx.executor().run_until_parked();
3036 buffer.update(cx, |buffer, cx| {
3037 assert_eq!(
3038 buffer.file().unwrap().full_path(cx),
3039 Path::new("dir/file1.rs")
3040 );
3041 assert!(!buffer.is_dirty());
3042 assert!(!buffer.has_conflict());
3043 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3044 });
3045
3046 let opened_buffer = project
3047 .update(cx, |project, cx| {
3048 project.open_local_buffer("/dir/file1.rs", cx)
3049 })
3050 .await
3051 .unwrap();
3052 assert_eq!(opened_buffer, buffer);
3053}
3054
3055#[gpui::test(retries = 5)]
3056async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3057 use worktree::WorktreeModelHandle as _;
3058
3059 init_test(cx);
3060 cx.executor().allow_parking();
3061
3062 let dir = temp_tree(json!({
3063 "a": {
3064 "file1": "",
3065 "file2": "",
3066 "file3": "",
3067 },
3068 "b": {
3069 "c": {
3070 "file4": "",
3071 "file5": "",
3072 }
3073 }
3074 }));
3075
3076 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3077
3078 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3079 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3080 async move { buffer.await.unwrap() }
3081 };
3082 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3083 project.update(cx, |project, cx| {
3084 let tree = project.worktrees(cx).next().unwrap();
3085 tree.read(cx)
3086 .entry_for_path(path)
3087 .unwrap_or_else(|| panic!("no entry for path {}", path))
3088 .id
3089 })
3090 };
3091
3092 let buffer2 = buffer_for_path("a/file2", cx).await;
3093 let buffer3 = buffer_for_path("a/file3", cx).await;
3094 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3095 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3096
3097 let file2_id = id_for_path("a/file2", cx);
3098 let file3_id = id_for_path("a/file3", cx);
3099 let file4_id = id_for_path("b/c/file4", cx);
3100
3101 // Create a remote copy of this worktree.
3102 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3103 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3104
3105 let updates = Arc::new(Mutex::new(Vec::new()));
3106 tree.update(cx, |tree, cx| {
3107 let updates = updates.clone();
3108 tree.observe_updates(0, cx, move |update| {
3109 updates.lock().push(update);
3110 async { true }
3111 });
3112 });
3113
3114 let remote =
3115 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3116
3117 cx.executor().run_until_parked();
3118
3119 cx.update(|cx| {
3120 assert!(!buffer2.read(cx).is_dirty());
3121 assert!(!buffer3.read(cx).is_dirty());
3122 assert!(!buffer4.read(cx).is_dirty());
3123 assert!(!buffer5.read(cx).is_dirty());
3124 });
3125
3126 // Rename and delete files and directories.
3127 tree.flush_fs_events(cx).await;
3128 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3129 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3130 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3131 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3132 tree.flush_fs_events(cx).await;
3133
3134 let expected_paths = vec![
3135 "a",
3136 "a/file1",
3137 "a/file2.new",
3138 "b",
3139 "d",
3140 "d/file3",
3141 "d/file4",
3142 ];
3143
3144 cx.update(|app| {
3145 assert_eq!(
3146 tree.read(app)
3147 .paths()
3148 .map(|p| p.to_str().unwrap())
3149 .collect::<Vec<_>>(),
3150 expected_paths
3151 );
3152 });
3153
3154 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3155 assert_eq!(id_for_path("d/file3", cx), file3_id);
3156 assert_eq!(id_for_path("d/file4", cx), file4_id);
3157
3158 cx.update(|cx| {
3159 assert_eq!(
3160 buffer2.read(cx).file().unwrap().path().as_ref(),
3161 Path::new("a/file2.new")
3162 );
3163 assert_eq!(
3164 buffer3.read(cx).file().unwrap().path().as_ref(),
3165 Path::new("d/file3")
3166 );
3167 assert_eq!(
3168 buffer4.read(cx).file().unwrap().path().as_ref(),
3169 Path::new("d/file4")
3170 );
3171 assert_eq!(
3172 buffer5.read(cx).file().unwrap().path().as_ref(),
3173 Path::new("b/c/file5")
3174 );
3175
3176 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3177 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3178 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3179 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3180 });
3181
3182 // Update the remote worktree. Check that it becomes consistent with the
3183 // local worktree.
3184 cx.executor().run_until_parked();
3185
3186 remote.update(cx, |remote, _| {
3187 for update in updates.lock().drain(..) {
3188 remote.as_remote_mut().unwrap().update_from_remote(update);
3189 }
3190 });
3191 cx.executor().run_until_parked();
3192 remote.update(cx, |remote, _| {
3193 assert_eq!(
3194 remote
3195 .paths()
3196 .map(|p| p.to_str().unwrap())
3197 .collect::<Vec<_>>(),
3198 expected_paths
3199 );
3200 });
3201}
3202
3203#[gpui::test(iterations = 10)]
3204async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3205 init_test(cx);
3206
3207 let fs = FakeFs::new(cx.executor());
3208 fs.insert_tree(
3209 "/dir",
3210 json!({
3211 "a": {
3212 "file1": "",
3213 }
3214 }),
3215 )
3216 .await;
3217
3218 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3219 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3220 let tree_id = tree.update(cx, |tree, _| tree.id());
3221
3222 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3223 project.update(cx, |project, cx| {
3224 let tree = project.worktrees(cx).next().unwrap();
3225 tree.read(cx)
3226 .entry_for_path(path)
3227 .unwrap_or_else(|| panic!("no entry for path {}", path))
3228 .id
3229 })
3230 };
3231
3232 let dir_id = id_for_path("a", cx);
3233 let file_id = id_for_path("a/file1", cx);
3234 let buffer = project
3235 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3236 .await
3237 .unwrap();
3238 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3239
3240 project
3241 .update(cx, |project, cx| {
3242 project.rename_entry(dir_id, Path::new("b"), cx)
3243 })
3244 .unwrap()
3245 .await
3246 .to_included()
3247 .unwrap();
3248 cx.executor().run_until_parked();
3249
3250 assert_eq!(id_for_path("b", cx), dir_id);
3251 assert_eq!(id_for_path("b/file1", cx), file_id);
3252 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3253}
3254
3255#[gpui::test]
3256async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3257 init_test(cx);
3258
3259 let fs = FakeFs::new(cx.executor());
3260 fs.insert_tree(
3261 "/dir",
3262 json!({
3263 "a.txt": "a-contents",
3264 "b.txt": "b-contents",
3265 }),
3266 )
3267 .await;
3268
3269 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3270
3271 // Spawn multiple tasks to open paths, repeating some paths.
3272 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3273 (
3274 p.open_local_buffer("/dir/a.txt", cx),
3275 p.open_local_buffer("/dir/b.txt", cx),
3276 p.open_local_buffer("/dir/a.txt", cx),
3277 )
3278 });
3279
3280 let buffer_a_1 = buffer_a_1.await.unwrap();
3281 let buffer_a_2 = buffer_a_2.await.unwrap();
3282 let buffer_b = buffer_b.await.unwrap();
3283 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3284 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3285
3286 // There is only one buffer per path.
3287 let buffer_a_id = buffer_a_1.entity_id();
3288 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3289
3290 // Open the same path again while it is still open.
3291 drop(buffer_a_1);
3292 let buffer_a_3 = project
3293 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3294 .await
3295 .unwrap();
3296
3297 // There's still only one buffer per path.
3298 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3299}
3300
3301#[gpui::test]
3302async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3303 init_test(cx);
3304
3305 let fs = FakeFs::new(cx.executor());
3306 fs.insert_tree(
3307 "/dir",
3308 json!({
3309 "file1": "abc",
3310 "file2": "def",
3311 "file3": "ghi",
3312 }),
3313 )
3314 .await;
3315
3316 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3317
3318 let buffer1 = project
3319 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3320 .await
3321 .unwrap();
3322 let events = Arc::new(Mutex::new(Vec::new()));
3323
3324 // initially, the buffer isn't dirty.
3325 buffer1.update(cx, |buffer, cx| {
3326 cx.subscribe(&buffer1, {
3327 let events = events.clone();
3328 move |_, _, event, _| match event {
3329 BufferEvent::Operation { .. } => {}
3330 _ => events.lock().push(event.clone()),
3331 }
3332 })
3333 .detach();
3334
3335 assert!(!buffer.is_dirty());
3336 assert!(events.lock().is_empty());
3337
3338 buffer.edit([(1..2, "")], None, cx);
3339 });
3340
3341 // after the first edit, the buffer is dirty, and emits a dirtied event.
3342 buffer1.update(cx, |buffer, cx| {
3343 assert!(buffer.text() == "ac");
3344 assert!(buffer.is_dirty());
3345 assert_eq!(
3346 *events.lock(),
3347 &[
3348 language::BufferEvent::Edited,
3349 language::BufferEvent::DirtyChanged
3350 ]
3351 );
3352 events.lock().clear();
3353 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3354 });
3355
3356 // after saving, the buffer is not dirty, and emits a saved event.
3357 buffer1.update(cx, |buffer, cx| {
3358 assert!(!buffer.is_dirty());
3359 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3360 events.lock().clear();
3361
3362 buffer.edit([(1..1, "B")], None, cx);
3363 buffer.edit([(2..2, "D")], None, cx);
3364 });
3365
3366 // after editing again, the buffer is dirty, and emits another dirty event.
3367 buffer1.update(cx, |buffer, cx| {
3368 assert!(buffer.text() == "aBDc");
3369 assert!(buffer.is_dirty());
3370 assert_eq!(
3371 *events.lock(),
3372 &[
3373 language::BufferEvent::Edited,
3374 language::BufferEvent::DirtyChanged,
3375 language::BufferEvent::Edited,
3376 ],
3377 );
3378 events.lock().clear();
3379
3380 // After restoring the buffer to its previously-saved state,
3381 // the buffer is not considered dirty anymore.
3382 buffer.edit([(1..3, "")], None, cx);
3383 assert!(buffer.text() == "ac");
3384 assert!(!buffer.is_dirty());
3385 });
3386
3387 assert_eq!(
3388 *events.lock(),
3389 &[
3390 language::BufferEvent::Edited,
3391 language::BufferEvent::DirtyChanged
3392 ]
3393 );
3394
3395 // When a file is deleted, the buffer is considered dirty.
3396 let events = Arc::new(Mutex::new(Vec::new()));
3397 let buffer2 = project
3398 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3399 .await
3400 .unwrap();
3401 buffer2.update(cx, |_, cx| {
3402 cx.subscribe(&buffer2, {
3403 let events = events.clone();
3404 move |_, _, event, _| events.lock().push(event.clone())
3405 })
3406 .detach();
3407 });
3408
3409 fs.remove_file("/dir/file2".as_ref(), Default::default())
3410 .await
3411 .unwrap();
3412 cx.executor().run_until_parked();
3413 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3414 assert_eq!(
3415 *events.lock(),
3416 &[
3417 language::BufferEvent::DirtyChanged,
3418 language::BufferEvent::FileHandleChanged
3419 ]
3420 );
3421
3422 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3423 let events = Arc::new(Mutex::new(Vec::new()));
3424 let buffer3 = project
3425 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3426 .await
3427 .unwrap();
3428 buffer3.update(cx, |_, cx| {
3429 cx.subscribe(&buffer3, {
3430 let events = events.clone();
3431 move |_, _, event, _| events.lock().push(event.clone())
3432 })
3433 .detach();
3434 });
3435
3436 buffer3.update(cx, |buffer, cx| {
3437 buffer.edit([(0..0, "x")], None, cx);
3438 });
3439 events.lock().clear();
3440 fs.remove_file("/dir/file3".as_ref(), Default::default())
3441 .await
3442 .unwrap();
3443 cx.executor().run_until_parked();
3444 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3445 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3446}
3447
3448#[gpui::test]
3449async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3450 init_test(cx);
3451
3452 let initial_contents = "aaa\nbbbbb\nc\n";
3453 let fs = FakeFs::new(cx.executor());
3454 fs.insert_tree(
3455 "/dir",
3456 json!({
3457 "the-file": initial_contents,
3458 }),
3459 )
3460 .await;
3461 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3462 let buffer = project
3463 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3464 .await
3465 .unwrap();
3466
3467 let anchors = (0..3)
3468 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3469 .collect::<Vec<_>>();
3470
3471 // Change the file on disk, adding two new lines of text, and removing
3472 // one line.
3473 buffer.update(cx, |buffer, _| {
3474 assert!(!buffer.is_dirty());
3475 assert!(!buffer.has_conflict());
3476 });
3477 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3478 fs.save(
3479 "/dir/the-file".as_ref(),
3480 &new_contents.into(),
3481 LineEnding::Unix,
3482 )
3483 .await
3484 .unwrap();
3485
3486 // Because the buffer was not modified, it is reloaded from disk. Its
3487 // contents are edited according to the diff between the old and new
3488 // file contents.
3489 cx.executor().run_until_parked();
3490 buffer.update(cx, |buffer, _| {
3491 assert_eq!(buffer.text(), new_contents);
3492 assert!(!buffer.is_dirty());
3493 assert!(!buffer.has_conflict());
3494
3495 let anchor_positions = anchors
3496 .iter()
3497 .map(|anchor| anchor.to_point(&*buffer))
3498 .collect::<Vec<_>>();
3499 assert_eq!(
3500 anchor_positions,
3501 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3502 );
3503 });
3504
3505 // Modify the buffer
3506 buffer.update(cx, |buffer, cx| {
3507 buffer.edit([(0..0, " ")], None, cx);
3508 assert!(buffer.is_dirty());
3509 assert!(!buffer.has_conflict());
3510 });
3511
3512 // Change the file on disk again, adding blank lines to the beginning.
3513 fs.save(
3514 "/dir/the-file".as_ref(),
3515 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3516 LineEnding::Unix,
3517 )
3518 .await
3519 .unwrap();
3520
3521 // Because the buffer is modified, it doesn't reload from disk, but is
3522 // marked as having a conflict.
3523 cx.executor().run_until_parked();
3524 buffer.update(cx, |buffer, _| {
3525 assert!(buffer.has_conflict());
3526 });
3527}
3528
3529#[gpui::test]
3530async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3531 init_test(cx);
3532
3533 let fs = FakeFs::new(cx.executor());
3534 fs.insert_tree(
3535 "/dir",
3536 json!({
3537 "file1": "a\nb\nc\n",
3538 "file2": "one\r\ntwo\r\nthree\r\n",
3539 }),
3540 )
3541 .await;
3542
3543 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3544 let buffer1 = project
3545 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3546 .await
3547 .unwrap();
3548 let buffer2 = project
3549 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3550 .await
3551 .unwrap();
3552
3553 buffer1.update(cx, |buffer, _| {
3554 assert_eq!(buffer.text(), "a\nb\nc\n");
3555 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3556 });
3557 buffer2.update(cx, |buffer, _| {
3558 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3559 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3560 });
3561
3562 // Change a file's line endings on disk from unix to windows. The buffer's
3563 // state updates correctly.
3564 fs.save(
3565 "/dir/file1".as_ref(),
3566 &"aaa\nb\nc\n".into(),
3567 LineEnding::Windows,
3568 )
3569 .await
3570 .unwrap();
3571 cx.executor().run_until_parked();
3572 buffer1.update(cx, |buffer, _| {
3573 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3574 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3575 });
3576
3577 // Save a file with windows line endings. The file is written correctly.
3578 buffer2.update(cx, |buffer, cx| {
3579 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3580 });
3581 project
3582 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3583 .await
3584 .unwrap();
3585 assert_eq!(
3586 fs.load("/dir/file2".as_ref()).await.unwrap(),
3587 "one\r\ntwo\r\nthree\r\nfour\r\n",
3588 );
3589}
3590
3591#[gpui::test]
3592async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3593 init_test(cx);
3594
3595 let fs = FakeFs::new(cx.executor());
3596 fs.insert_tree(
3597 "/the-dir",
3598 json!({
3599 "a.rs": "
3600 fn foo(mut v: Vec<usize>) {
3601 for x in &v {
3602 v.push(1);
3603 }
3604 }
3605 "
3606 .unindent(),
3607 }),
3608 )
3609 .await;
3610
3611 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3612 let buffer = project
3613 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3614 .await
3615 .unwrap();
3616
3617 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3618 let message = lsp::PublishDiagnosticsParams {
3619 uri: buffer_uri.clone(),
3620 diagnostics: vec![
3621 lsp::Diagnostic {
3622 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3623 severity: Some(DiagnosticSeverity::WARNING),
3624 message: "error 1".to_string(),
3625 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3626 location: lsp::Location {
3627 uri: buffer_uri.clone(),
3628 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3629 },
3630 message: "error 1 hint 1".to_string(),
3631 }]),
3632 ..Default::default()
3633 },
3634 lsp::Diagnostic {
3635 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3636 severity: Some(DiagnosticSeverity::HINT),
3637 message: "error 1 hint 1".to_string(),
3638 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3639 location: lsp::Location {
3640 uri: buffer_uri.clone(),
3641 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3642 },
3643 message: "original diagnostic".to_string(),
3644 }]),
3645 ..Default::default()
3646 },
3647 lsp::Diagnostic {
3648 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3649 severity: Some(DiagnosticSeverity::ERROR),
3650 message: "error 2".to_string(),
3651 related_information: Some(vec![
3652 lsp::DiagnosticRelatedInformation {
3653 location: lsp::Location {
3654 uri: buffer_uri.clone(),
3655 range: lsp::Range::new(
3656 lsp::Position::new(1, 13),
3657 lsp::Position::new(1, 15),
3658 ),
3659 },
3660 message: "error 2 hint 1".to_string(),
3661 },
3662 lsp::DiagnosticRelatedInformation {
3663 location: lsp::Location {
3664 uri: buffer_uri.clone(),
3665 range: lsp::Range::new(
3666 lsp::Position::new(1, 13),
3667 lsp::Position::new(1, 15),
3668 ),
3669 },
3670 message: "error 2 hint 2".to_string(),
3671 },
3672 ]),
3673 ..Default::default()
3674 },
3675 lsp::Diagnostic {
3676 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3677 severity: Some(DiagnosticSeverity::HINT),
3678 message: "error 2 hint 1".to_string(),
3679 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3680 location: lsp::Location {
3681 uri: buffer_uri.clone(),
3682 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3683 },
3684 message: "original diagnostic".to_string(),
3685 }]),
3686 ..Default::default()
3687 },
3688 lsp::Diagnostic {
3689 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3690 severity: Some(DiagnosticSeverity::HINT),
3691 message: "error 2 hint 2".to_string(),
3692 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3693 location: lsp::Location {
3694 uri: buffer_uri,
3695 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3696 },
3697 message: "original diagnostic".to_string(),
3698 }]),
3699 ..Default::default()
3700 },
3701 ],
3702 version: None,
3703 };
3704
3705 project
3706 .update(cx, |p, cx| {
3707 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3708 })
3709 .unwrap();
3710 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3711
3712 assert_eq!(
3713 buffer
3714 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3715 .collect::<Vec<_>>(),
3716 &[
3717 DiagnosticEntry {
3718 range: Point::new(1, 8)..Point::new(1, 9),
3719 diagnostic: Diagnostic {
3720 severity: DiagnosticSeverity::WARNING,
3721 message: "error 1".to_string(),
3722 group_id: 1,
3723 is_primary: true,
3724 ..Default::default()
3725 }
3726 },
3727 DiagnosticEntry {
3728 range: Point::new(1, 8)..Point::new(1, 9),
3729 diagnostic: Diagnostic {
3730 severity: DiagnosticSeverity::HINT,
3731 message: "error 1 hint 1".to_string(),
3732 group_id: 1,
3733 is_primary: false,
3734 ..Default::default()
3735 }
3736 },
3737 DiagnosticEntry {
3738 range: Point::new(1, 13)..Point::new(1, 15),
3739 diagnostic: Diagnostic {
3740 severity: DiagnosticSeverity::HINT,
3741 message: "error 2 hint 1".to_string(),
3742 group_id: 0,
3743 is_primary: false,
3744 ..Default::default()
3745 }
3746 },
3747 DiagnosticEntry {
3748 range: Point::new(1, 13)..Point::new(1, 15),
3749 diagnostic: Diagnostic {
3750 severity: DiagnosticSeverity::HINT,
3751 message: "error 2 hint 2".to_string(),
3752 group_id: 0,
3753 is_primary: false,
3754 ..Default::default()
3755 }
3756 },
3757 DiagnosticEntry {
3758 range: Point::new(2, 8)..Point::new(2, 17),
3759 diagnostic: Diagnostic {
3760 severity: DiagnosticSeverity::ERROR,
3761 message: "error 2".to_string(),
3762 group_id: 0,
3763 is_primary: true,
3764 ..Default::default()
3765 }
3766 }
3767 ]
3768 );
3769
3770 assert_eq!(
3771 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3772 &[
3773 DiagnosticEntry {
3774 range: Point::new(1, 13)..Point::new(1, 15),
3775 diagnostic: Diagnostic {
3776 severity: DiagnosticSeverity::HINT,
3777 message: "error 2 hint 1".to_string(),
3778 group_id: 0,
3779 is_primary: false,
3780 ..Default::default()
3781 }
3782 },
3783 DiagnosticEntry {
3784 range: Point::new(1, 13)..Point::new(1, 15),
3785 diagnostic: Diagnostic {
3786 severity: DiagnosticSeverity::HINT,
3787 message: "error 2 hint 2".to_string(),
3788 group_id: 0,
3789 is_primary: false,
3790 ..Default::default()
3791 }
3792 },
3793 DiagnosticEntry {
3794 range: Point::new(2, 8)..Point::new(2, 17),
3795 diagnostic: Diagnostic {
3796 severity: DiagnosticSeverity::ERROR,
3797 message: "error 2".to_string(),
3798 group_id: 0,
3799 is_primary: true,
3800 ..Default::default()
3801 }
3802 }
3803 ]
3804 );
3805
3806 assert_eq!(
3807 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3808 &[
3809 DiagnosticEntry {
3810 range: Point::new(1, 8)..Point::new(1, 9),
3811 diagnostic: Diagnostic {
3812 severity: DiagnosticSeverity::WARNING,
3813 message: "error 1".to_string(),
3814 group_id: 1,
3815 is_primary: true,
3816 ..Default::default()
3817 }
3818 },
3819 DiagnosticEntry {
3820 range: Point::new(1, 8)..Point::new(1, 9),
3821 diagnostic: Diagnostic {
3822 severity: DiagnosticSeverity::HINT,
3823 message: "error 1 hint 1".to_string(),
3824 group_id: 1,
3825 is_primary: false,
3826 ..Default::default()
3827 }
3828 },
3829 ]
3830 );
3831}
3832
3833#[gpui::test]
3834async fn test_rename(cx: &mut gpui::TestAppContext) {
3835 // hi
3836 init_test(cx);
3837
3838 let fs = FakeFs::new(cx.executor());
3839 fs.insert_tree(
3840 "/dir",
3841 json!({
3842 "one.rs": "const ONE: usize = 1;",
3843 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3844 }),
3845 )
3846 .await;
3847
3848 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3849
3850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3851 language_registry.add(rust_lang());
3852 let mut fake_servers = language_registry.register_fake_lsp(
3853 "Rust",
3854 FakeLspAdapter {
3855 capabilities: lsp::ServerCapabilities {
3856 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3857 prepare_provider: Some(true),
3858 work_done_progress_options: Default::default(),
3859 })),
3860 ..Default::default()
3861 },
3862 ..Default::default()
3863 },
3864 );
3865
3866 let buffer = project
3867 .update(cx, |project, cx| {
3868 project.open_local_buffer("/dir/one.rs", cx)
3869 })
3870 .await
3871 .unwrap();
3872
3873 let fake_server = fake_servers.next().await.unwrap();
3874
3875 let response = project.update(cx, |project, cx| {
3876 project.prepare_rename(buffer.clone(), 7, cx)
3877 });
3878 fake_server
3879 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3880 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3881 assert_eq!(params.position, lsp::Position::new(0, 7));
3882 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3883 lsp::Position::new(0, 6),
3884 lsp::Position::new(0, 9),
3885 ))))
3886 })
3887 .next()
3888 .await
3889 .unwrap();
3890 let range = response.await.unwrap().unwrap();
3891 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3892 assert_eq!(range, 6..9);
3893
3894 let response = project.update(cx, |project, cx| {
3895 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3896 });
3897 fake_server
3898 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3899 assert_eq!(
3900 params.text_document_position.text_document.uri.as_str(),
3901 "file:///dir/one.rs"
3902 );
3903 assert_eq!(
3904 params.text_document_position.position,
3905 lsp::Position::new(0, 7)
3906 );
3907 assert_eq!(params.new_name, "THREE");
3908 Ok(Some(lsp::WorkspaceEdit {
3909 changes: Some(
3910 [
3911 (
3912 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3913 vec![lsp::TextEdit::new(
3914 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3915 "THREE".to_string(),
3916 )],
3917 ),
3918 (
3919 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3920 vec![
3921 lsp::TextEdit::new(
3922 lsp::Range::new(
3923 lsp::Position::new(0, 24),
3924 lsp::Position::new(0, 27),
3925 ),
3926 "THREE".to_string(),
3927 ),
3928 lsp::TextEdit::new(
3929 lsp::Range::new(
3930 lsp::Position::new(0, 35),
3931 lsp::Position::new(0, 38),
3932 ),
3933 "THREE".to_string(),
3934 ),
3935 ],
3936 ),
3937 ]
3938 .into_iter()
3939 .collect(),
3940 ),
3941 ..Default::default()
3942 }))
3943 })
3944 .next()
3945 .await
3946 .unwrap();
3947 let mut transaction = response.await.unwrap().0;
3948 assert_eq!(transaction.len(), 2);
3949 assert_eq!(
3950 transaction
3951 .remove_entry(&buffer)
3952 .unwrap()
3953 .0
3954 .update(cx, |buffer, _| buffer.text()),
3955 "const THREE: usize = 1;"
3956 );
3957 assert_eq!(
3958 transaction
3959 .into_keys()
3960 .next()
3961 .unwrap()
3962 .update(cx, |buffer, _| buffer.text()),
3963 "const TWO: usize = one::THREE + one::THREE;"
3964 );
3965}
3966
3967#[gpui::test]
3968async fn test_search(cx: &mut gpui::TestAppContext) {
3969 init_test(cx);
3970
3971 let fs = FakeFs::new(cx.executor());
3972 fs.insert_tree(
3973 "/dir",
3974 json!({
3975 "one.rs": "const ONE: usize = 1;",
3976 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3977 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3978 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3979 }),
3980 )
3981 .await;
3982 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3983 assert_eq!(
3984 search(
3985 &project,
3986 SearchQuery::text(
3987 "TWO",
3988 false,
3989 true,
3990 false,
3991 Default::default(),
3992 Default::default(),
3993 None
3994 )
3995 .unwrap(),
3996 cx
3997 )
3998 .await
3999 .unwrap(),
4000 HashMap::from_iter([
4001 ("dir/two.rs".to_string(), vec![6..9]),
4002 ("dir/three.rs".to_string(), vec![37..40])
4003 ])
4004 );
4005
4006 let buffer_4 = project
4007 .update(cx, |project, cx| {
4008 project.open_local_buffer("/dir/four.rs", cx)
4009 })
4010 .await
4011 .unwrap();
4012 buffer_4.update(cx, |buffer, cx| {
4013 let text = "two::TWO";
4014 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4015 });
4016
4017 assert_eq!(
4018 search(
4019 &project,
4020 SearchQuery::text(
4021 "TWO",
4022 false,
4023 true,
4024 false,
4025 Default::default(),
4026 Default::default(),
4027 None,
4028 )
4029 .unwrap(),
4030 cx
4031 )
4032 .await
4033 .unwrap(),
4034 HashMap::from_iter([
4035 ("dir/two.rs".to_string(), vec![6..9]),
4036 ("dir/three.rs".to_string(), vec![37..40]),
4037 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4038 ])
4039 );
4040}
4041
4042#[gpui::test]
4043async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4044 init_test(cx);
4045
4046 let search_query = "file";
4047
4048 let fs = FakeFs::new(cx.executor());
4049 fs.insert_tree(
4050 "/dir",
4051 json!({
4052 "one.rs": r#"// Rust file one"#,
4053 "one.ts": r#"// TypeScript file one"#,
4054 "two.rs": r#"// Rust file two"#,
4055 "two.ts": r#"// TypeScript file two"#,
4056 }),
4057 )
4058 .await;
4059 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4060
4061 assert!(
4062 search(
4063 &project,
4064 SearchQuery::text(
4065 search_query,
4066 false,
4067 true,
4068 false,
4069 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4070 Default::default(),
4071 None
4072 )
4073 .unwrap(),
4074 cx
4075 )
4076 .await
4077 .unwrap()
4078 .is_empty(),
4079 "If no inclusions match, no files should be returned"
4080 );
4081
4082 assert_eq!(
4083 search(
4084 &project,
4085 SearchQuery::text(
4086 search_query,
4087 false,
4088 true,
4089 false,
4090 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4091 Default::default(),
4092 None
4093 )
4094 .unwrap(),
4095 cx
4096 )
4097 .await
4098 .unwrap(),
4099 HashMap::from_iter([
4100 ("dir/one.rs".to_string(), vec![8..12]),
4101 ("dir/two.rs".to_string(), vec![8..12]),
4102 ]),
4103 "Rust only search should give only Rust files"
4104 );
4105
4106 assert_eq!(
4107 search(
4108 &project,
4109 SearchQuery::text(
4110 search_query,
4111 false,
4112 true,
4113 false,
4114
4115 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4116
4117 Default::default(),
4118 None,
4119 ).unwrap(),
4120 cx
4121 )
4122 .await
4123 .unwrap(),
4124 HashMap::from_iter([
4125 ("dir/one.ts".to_string(), vec![14..18]),
4126 ("dir/two.ts".to_string(), vec![14..18]),
4127 ]),
4128 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4129 );
4130
4131 assert_eq!(
4132 search(
4133 &project,
4134 SearchQuery::text(
4135 search_query,
4136 false,
4137 true,
4138 false,
4139
4140 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4141
4142 Default::default(),
4143 None,
4144 ).unwrap(),
4145 cx
4146 )
4147 .await
4148 .unwrap(),
4149 HashMap::from_iter([
4150 ("dir/two.ts".to_string(), vec![14..18]),
4151 ("dir/one.rs".to_string(), vec![8..12]),
4152 ("dir/one.ts".to_string(), vec![14..18]),
4153 ("dir/two.rs".to_string(), vec![8..12]),
4154 ]),
4155 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4156 );
4157}
4158
4159#[gpui::test]
4160async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4161 init_test(cx);
4162
4163 let search_query = "file";
4164
4165 let fs = FakeFs::new(cx.executor());
4166 fs.insert_tree(
4167 "/dir",
4168 json!({
4169 "one.rs": r#"// Rust file one"#,
4170 "one.ts": r#"// TypeScript file one"#,
4171 "two.rs": r#"// Rust file two"#,
4172 "two.ts": r#"// TypeScript file two"#,
4173 }),
4174 )
4175 .await;
4176 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4177
4178 assert_eq!(
4179 search(
4180 &project,
4181 SearchQuery::text(
4182 search_query,
4183 false,
4184 true,
4185 false,
4186 Default::default(),
4187 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4188 None,
4189 )
4190 .unwrap(),
4191 cx
4192 )
4193 .await
4194 .unwrap(),
4195 HashMap::from_iter([
4196 ("dir/one.rs".to_string(), vec![8..12]),
4197 ("dir/one.ts".to_string(), vec![14..18]),
4198 ("dir/two.rs".to_string(), vec![8..12]),
4199 ("dir/two.ts".to_string(), vec![14..18]),
4200 ]),
4201 "If no exclusions match, all files should be returned"
4202 );
4203
4204 assert_eq!(
4205 search(
4206 &project,
4207 SearchQuery::text(
4208 search_query,
4209 false,
4210 true,
4211 false,
4212 Default::default(),
4213 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4214 None,
4215 )
4216 .unwrap(),
4217 cx
4218 )
4219 .await
4220 .unwrap(),
4221 HashMap::from_iter([
4222 ("dir/one.ts".to_string(), vec![14..18]),
4223 ("dir/two.ts".to_string(), vec![14..18]),
4224 ]),
4225 "Rust exclusion search should give only TypeScript files"
4226 );
4227
4228 assert_eq!(
4229 search(
4230 &project,
4231 SearchQuery::text(
4232 search_query,
4233 false,
4234 true,
4235 false,
4236 Default::default(),
4237 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4238 None,
4239 ).unwrap(),
4240 cx
4241 )
4242 .await
4243 .unwrap(),
4244 HashMap::from_iter([
4245 ("dir/one.rs".to_string(), vec![8..12]),
4246 ("dir/two.rs".to_string(), vec![8..12]),
4247 ]),
4248 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4249 );
4250
4251 assert!(
4252 search(
4253 &project,
4254 SearchQuery::text(
4255 search_query,
4256 false,
4257 true,
4258 false,
4259 Default::default(),
4260
4261 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4262 None,
4263
4264 ).unwrap(),
4265 cx
4266 )
4267 .await
4268 .unwrap().is_empty(),
4269 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4270 );
4271}
4272
4273#[gpui::test]
4274async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4275 init_test(cx);
4276
4277 let search_query = "file";
4278
4279 let fs = FakeFs::new(cx.executor());
4280 fs.insert_tree(
4281 "/dir",
4282 json!({
4283 "one.rs": r#"// Rust file one"#,
4284 "one.ts": r#"// TypeScript file one"#,
4285 "two.rs": r#"// Rust file two"#,
4286 "two.ts": r#"// TypeScript file two"#,
4287 }),
4288 )
4289 .await;
4290 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4291
4292 assert!(
4293 search(
4294 &project,
4295 SearchQuery::text(
4296 search_query,
4297 false,
4298 true,
4299 false,
4300 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4301 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4302 None,
4303 )
4304 .unwrap(),
4305 cx
4306 )
4307 .await
4308 .unwrap()
4309 .is_empty(),
4310 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4311 );
4312
4313 assert!(
4314 search(
4315 &project,
4316 SearchQuery::text(
4317 search_query,
4318 false,
4319 true,
4320 false,
4321 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4322 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4323 None,
4324 ).unwrap(),
4325 cx
4326 )
4327 .await
4328 .unwrap()
4329 .is_empty(),
4330 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4331 );
4332
4333 assert!(
4334 search(
4335 &project,
4336 SearchQuery::text(
4337 search_query,
4338 false,
4339 true,
4340 false,
4341 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4342 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4343 None,
4344 )
4345 .unwrap(),
4346 cx
4347 )
4348 .await
4349 .unwrap()
4350 .is_empty(),
4351 "Non-matching inclusions and exclusions should not change that."
4352 );
4353
4354 assert_eq!(
4355 search(
4356 &project,
4357 SearchQuery::text(
4358 search_query,
4359 false,
4360 true,
4361 false,
4362 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4363 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4364 None,
4365 )
4366 .unwrap(),
4367 cx
4368 )
4369 .await
4370 .unwrap(),
4371 HashMap::from_iter([
4372 ("dir/one.ts".to_string(), vec![14..18]),
4373 ("dir/two.ts".to_string(), vec![14..18]),
4374 ]),
4375 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4376 );
4377}
4378
4379#[gpui::test]
4380async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4381 init_test(cx);
4382
4383 let fs = FakeFs::new(cx.executor());
4384 fs.insert_tree(
4385 "/worktree-a",
4386 json!({
4387 "haystack.rs": r#"// NEEDLE"#,
4388 "haystack.ts": r#"// NEEDLE"#,
4389 }),
4390 )
4391 .await;
4392 fs.insert_tree(
4393 "/worktree-b",
4394 json!({
4395 "haystack.rs": r#"// NEEDLE"#,
4396 "haystack.ts": r#"// NEEDLE"#,
4397 }),
4398 )
4399 .await;
4400
4401 let project = Project::test(
4402 fs.clone(),
4403 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4404 cx,
4405 )
4406 .await;
4407
4408 assert_eq!(
4409 search(
4410 &project,
4411 SearchQuery::text(
4412 "NEEDLE",
4413 false,
4414 true,
4415 false,
4416 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4417 Default::default(),
4418 None,
4419 )
4420 .unwrap(),
4421 cx
4422 )
4423 .await
4424 .unwrap(),
4425 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4426 "should only return results from included worktree"
4427 );
4428 assert_eq!(
4429 search(
4430 &project,
4431 SearchQuery::text(
4432 "NEEDLE",
4433 false,
4434 true,
4435 false,
4436 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4437 Default::default(),
4438 None,
4439 )
4440 .unwrap(),
4441 cx
4442 )
4443 .await
4444 .unwrap(),
4445 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4446 "should only return results from included worktree"
4447 );
4448
4449 assert_eq!(
4450 search(
4451 &project,
4452 SearchQuery::text(
4453 "NEEDLE",
4454 false,
4455 true,
4456 false,
4457 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4458 Default::default(),
4459 None,
4460 )
4461 .unwrap(),
4462 cx
4463 )
4464 .await
4465 .unwrap(),
4466 HashMap::from_iter([
4467 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4468 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4469 ]),
4470 "should return results from both worktrees"
4471 );
4472}
4473
4474#[gpui::test]
4475async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4476 init_test(cx);
4477
4478 let fs = FakeFs::new(cx.background_executor.clone());
4479 fs.insert_tree(
4480 "/dir",
4481 json!({
4482 ".git": {},
4483 ".gitignore": "**/target\n/node_modules\n",
4484 "target": {
4485 "index.txt": "index_key:index_value"
4486 },
4487 "node_modules": {
4488 "eslint": {
4489 "index.ts": "const eslint_key = 'eslint value'",
4490 "package.json": r#"{ "some_key": "some value" }"#,
4491 },
4492 "prettier": {
4493 "index.ts": "const prettier_key = 'prettier value'",
4494 "package.json": r#"{ "other_key": "other value" }"#,
4495 },
4496 },
4497 "package.json": r#"{ "main_key": "main value" }"#,
4498 }),
4499 )
4500 .await;
4501 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4502
4503 let query = "key";
4504 assert_eq!(
4505 search(
4506 &project,
4507 SearchQuery::text(
4508 query,
4509 false,
4510 false,
4511 false,
4512 Default::default(),
4513 Default::default(),
4514 None,
4515 )
4516 .unwrap(),
4517 cx
4518 )
4519 .await
4520 .unwrap(),
4521 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4522 "Only one non-ignored file should have the query"
4523 );
4524
4525 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4526 assert_eq!(
4527 search(
4528 &project,
4529 SearchQuery::text(
4530 query,
4531 false,
4532 false,
4533 true,
4534 Default::default(),
4535 Default::default(),
4536 None,
4537 )
4538 .unwrap(),
4539 cx
4540 )
4541 .await
4542 .unwrap(),
4543 HashMap::from_iter([
4544 ("dir/package.json".to_string(), vec![8..11]),
4545 ("dir/target/index.txt".to_string(), vec![6..9]),
4546 (
4547 "dir/node_modules/prettier/package.json".to_string(),
4548 vec![9..12]
4549 ),
4550 (
4551 "dir/node_modules/prettier/index.ts".to_string(),
4552 vec![15..18]
4553 ),
4554 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4555 (
4556 "dir/node_modules/eslint/package.json".to_string(),
4557 vec![8..11]
4558 ),
4559 ]),
4560 "Unrestricted search with ignored directories should find every file with the query"
4561 );
4562
4563 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4564 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4565 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4566 assert_eq!(
4567 search(
4568 &project,
4569 SearchQuery::text(
4570 query,
4571 false,
4572 false,
4573 true,
4574 files_to_include,
4575 files_to_exclude,
4576 None,
4577 )
4578 .unwrap(),
4579 cx
4580 )
4581 .await
4582 .unwrap(),
4583 HashMap::from_iter([(
4584 "dir/node_modules/prettier/package.json".to_string(),
4585 vec![9..12]
4586 )]),
4587 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4588 );
4589}
4590
4591#[gpui::test]
4592async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4593 init_test(cx);
4594
4595 let fs = FakeFs::new(cx.background_executor.clone());
4596 fs.insert_tree(
4597 "/dir",
4598 json!({
4599 ".git": {},
4600 ".gitignore": "**/target\n/node_modules\n",
4601 "aaa.txt": "key:value",
4602 "bbb": {
4603 "index.txt": "index_key:index_value"
4604 },
4605 "node_modules": {
4606 "10 eleven": "key",
4607 "1 two": "key"
4608 },
4609 }),
4610 )
4611 .await;
4612 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4613
4614 let mut search = project.update(cx, |project, cx| {
4615 project.search(
4616 SearchQuery::text(
4617 "key",
4618 false,
4619 false,
4620 true,
4621 Default::default(),
4622 Default::default(),
4623 None,
4624 )
4625 .unwrap(),
4626 cx,
4627 )
4628 });
4629
4630 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4631 match search_result.unwrap() {
4632 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4633 buffer.file().unwrap().path().to_string_lossy().to_string()
4634 }),
4635 _ => panic!("Expected buffer"),
4636 }
4637 }
4638
4639 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4640 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4641 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4642 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4643 assert!(search.next().await.is_none())
4644}
4645
4646#[gpui::test]
4647async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4648 init_test(cx);
4649
4650 let fs = FakeFs::new(cx.executor().clone());
4651 fs.insert_tree(
4652 "/one/two",
4653 json!({
4654 "three": {
4655 "a.txt": "",
4656 "four": {}
4657 },
4658 "c.rs": ""
4659 }),
4660 )
4661 .await;
4662
4663 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4664 project
4665 .update(cx, |project, cx| {
4666 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4667 project.create_entry((id, "b.."), true, cx)
4668 })
4669 .await
4670 .unwrap()
4671 .to_included()
4672 .unwrap();
4673
4674 // Can't create paths outside the project
4675 let result = project
4676 .update(cx, |project, cx| {
4677 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4678 project.create_entry((id, "../../boop"), true, cx)
4679 })
4680 .await;
4681 assert!(result.is_err());
4682
4683 // Can't create paths with '..'
4684 let result = project
4685 .update(cx, |project, cx| {
4686 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4687 project.create_entry((id, "four/../beep"), true, cx)
4688 })
4689 .await;
4690 assert!(result.is_err());
4691
4692 assert_eq!(
4693 fs.paths(true),
4694 vec![
4695 PathBuf::from("/"),
4696 PathBuf::from("/one"),
4697 PathBuf::from("/one/two"),
4698 PathBuf::from("/one/two/c.rs"),
4699 PathBuf::from("/one/two/three"),
4700 PathBuf::from("/one/two/three/a.txt"),
4701 PathBuf::from("/one/two/three/b.."),
4702 PathBuf::from("/one/two/three/four"),
4703 ]
4704 );
4705
4706 // And we cannot open buffers with '..'
4707 let result = project
4708 .update(cx, |project, cx| {
4709 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4710 project.open_buffer((id, "../c.rs"), cx)
4711 })
4712 .await;
4713 assert!(result.is_err())
4714}
4715
4716#[gpui::test]
4717async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4718 init_test(cx);
4719
4720 let fs = FakeFs::new(cx.executor());
4721 fs.insert_tree(
4722 "/dir",
4723 json!({
4724 "a.tsx": "a",
4725 }),
4726 )
4727 .await;
4728
4729 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4730
4731 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4732 language_registry.add(tsx_lang());
4733 let language_server_names = [
4734 "TypeScriptServer",
4735 "TailwindServer",
4736 "ESLintServer",
4737 "NoHoverCapabilitiesServer",
4738 ];
4739 let mut language_servers = [
4740 language_registry.register_fake_lsp(
4741 "tsx",
4742 FakeLspAdapter {
4743 name: language_server_names[0],
4744 capabilities: lsp::ServerCapabilities {
4745 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4746 ..lsp::ServerCapabilities::default()
4747 },
4748 ..FakeLspAdapter::default()
4749 },
4750 ),
4751 language_registry.register_fake_lsp(
4752 "tsx",
4753 FakeLspAdapter {
4754 name: language_server_names[1],
4755 capabilities: lsp::ServerCapabilities {
4756 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4757 ..lsp::ServerCapabilities::default()
4758 },
4759 ..FakeLspAdapter::default()
4760 },
4761 ),
4762 language_registry.register_fake_lsp(
4763 "tsx",
4764 FakeLspAdapter {
4765 name: language_server_names[2],
4766 capabilities: lsp::ServerCapabilities {
4767 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4768 ..lsp::ServerCapabilities::default()
4769 },
4770 ..FakeLspAdapter::default()
4771 },
4772 ),
4773 language_registry.register_fake_lsp(
4774 "tsx",
4775 FakeLspAdapter {
4776 name: language_server_names[3],
4777 capabilities: lsp::ServerCapabilities {
4778 hover_provider: None,
4779 ..lsp::ServerCapabilities::default()
4780 },
4781 ..FakeLspAdapter::default()
4782 },
4783 ),
4784 ];
4785
4786 let buffer = project
4787 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4788 .await
4789 .unwrap();
4790 cx.executor().run_until_parked();
4791
4792 let mut servers_with_hover_requests = HashMap::default();
4793 for i in 0..language_server_names.len() {
4794 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4795 panic!(
4796 "Failed to get language server #{i} with name {}",
4797 &language_server_names[i]
4798 )
4799 });
4800 let new_server_name = new_server.server.name();
4801 assert!(
4802 !servers_with_hover_requests.contains_key(new_server_name),
4803 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4804 );
4805 let new_server_name = new_server_name.to_string();
4806 match new_server_name.as_str() {
4807 "TailwindServer" | "TypeScriptServer" => {
4808 servers_with_hover_requests.insert(
4809 new_server_name.clone(),
4810 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4811 let name = new_server_name.clone();
4812 async move {
4813 Ok(Some(lsp::Hover {
4814 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4815 format!("{name} hover"),
4816 )),
4817 range: None,
4818 }))
4819 }
4820 }),
4821 );
4822 }
4823 "ESLintServer" => {
4824 servers_with_hover_requests.insert(
4825 new_server_name,
4826 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4827 |_, _| async move { Ok(None) },
4828 ),
4829 );
4830 }
4831 "NoHoverCapabilitiesServer" => {
4832 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4833 |_, _| async move {
4834 panic!(
4835 "Should not call for hovers server with no corresponding capabilities"
4836 )
4837 },
4838 );
4839 }
4840 unexpected => panic!("Unexpected server name: {unexpected}"),
4841 }
4842 }
4843
4844 let hover_task = project.update(cx, |project, cx| {
4845 project.hover(&buffer, Point::new(0, 0), cx)
4846 });
4847 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4848 |mut hover_request| async move {
4849 hover_request
4850 .next()
4851 .await
4852 .expect("All hover requests should have been triggered")
4853 },
4854 ))
4855 .await;
4856 assert_eq!(
4857 vec!["TailwindServer hover", "TypeScriptServer hover"],
4858 hover_task
4859 .await
4860 .into_iter()
4861 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4862 .sorted()
4863 .collect::<Vec<_>>(),
4864 "Should receive hover responses from all related servers with hover capabilities"
4865 );
4866}
4867
4868#[gpui::test]
4869async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4870 init_test(cx);
4871
4872 let fs = FakeFs::new(cx.executor());
4873 fs.insert_tree(
4874 "/dir",
4875 json!({
4876 "a.ts": "a",
4877 }),
4878 )
4879 .await;
4880
4881 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4882
4883 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4884 language_registry.add(typescript_lang());
4885 let mut fake_language_servers = language_registry.register_fake_lsp(
4886 "TypeScript",
4887 FakeLspAdapter {
4888 capabilities: lsp::ServerCapabilities {
4889 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4890 ..lsp::ServerCapabilities::default()
4891 },
4892 ..FakeLspAdapter::default()
4893 },
4894 );
4895
4896 let buffer = project
4897 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4898 .await
4899 .unwrap();
4900 cx.executor().run_until_parked();
4901
4902 let fake_server = fake_language_servers
4903 .next()
4904 .await
4905 .expect("failed to get the language server");
4906
4907 let mut request_handled =
4908 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4909 Ok(Some(lsp::Hover {
4910 contents: lsp::HoverContents::Array(vec![
4911 lsp::MarkedString::String("".to_string()),
4912 lsp::MarkedString::String(" ".to_string()),
4913 lsp::MarkedString::String("\n\n\n".to_string()),
4914 ]),
4915 range: None,
4916 }))
4917 });
4918
4919 let hover_task = project.update(cx, |project, cx| {
4920 project.hover(&buffer, Point::new(0, 0), cx)
4921 });
4922 let () = request_handled
4923 .next()
4924 .await
4925 .expect("All hover requests should have been triggered");
4926 assert_eq!(
4927 Vec::<String>::new(),
4928 hover_task
4929 .await
4930 .into_iter()
4931 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4932 .sorted()
4933 .collect::<Vec<_>>(),
4934 "Empty hover parts should be ignored"
4935 );
4936}
4937
4938#[gpui::test]
4939async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4940 init_test(cx);
4941
4942 let fs = FakeFs::new(cx.executor());
4943 fs.insert_tree(
4944 "/dir",
4945 json!({
4946 "a.tsx": "a",
4947 }),
4948 )
4949 .await;
4950
4951 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4952
4953 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4954 language_registry.add(tsx_lang());
4955 let language_server_names = [
4956 "TypeScriptServer",
4957 "TailwindServer",
4958 "ESLintServer",
4959 "NoActionsCapabilitiesServer",
4960 ];
4961
4962 let mut language_server_rxs = [
4963 language_registry.register_fake_lsp(
4964 "tsx",
4965 FakeLspAdapter {
4966 name: language_server_names[0],
4967 capabilities: lsp::ServerCapabilities {
4968 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4969 ..lsp::ServerCapabilities::default()
4970 },
4971 ..FakeLspAdapter::default()
4972 },
4973 ),
4974 language_registry.register_fake_lsp(
4975 "tsx",
4976 FakeLspAdapter {
4977 name: language_server_names[1],
4978 capabilities: lsp::ServerCapabilities {
4979 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4980 ..lsp::ServerCapabilities::default()
4981 },
4982 ..FakeLspAdapter::default()
4983 },
4984 ),
4985 language_registry.register_fake_lsp(
4986 "tsx",
4987 FakeLspAdapter {
4988 name: language_server_names[2],
4989 capabilities: lsp::ServerCapabilities {
4990 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4991 ..lsp::ServerCapabilities::default()
4992 },
4993 ..FakeLspAdapter::default()
4994 },
4995 ),
4996 language_registry.register_fake_lsp(
4997 "tsx",
4998 FakeLspAdapter {
4999 name: language_server_names[3],
5000 capabilities: lsp::ServerCapabilities {
5001 code_action_provider: None,
5002 ..lsp::ServerCapabilities::default()
5003 },
5004 ..FakeLspAdapter::default()
5005 },
5006 ),
5007 ];
5008
5009 let buffer = project
5010 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5011 .await
5012 .unwrap();
5013 cx.executor().run_until_parked();
5014
5015 let mut servers_with_actions_requests = HashMap::default();
5016 for i in 0..language_server_names.len() {
5017 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5018 panic!(
5019 "Failed to get language server #{i} with name {}",
5020 &language_server_names[i]
5021 )
5022 });
5023 let new_server_name = new_server.server.name();
5024
5025 assert!(
5026 !servers_with_actions_requests.contains_key(new_server_name),
5027 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5028 );
5029 let new_server_name = new_server_name.to_string();
5030 match new_server_name.as_str() {
5031 "TailwindServer" | "TypeScriptServer" => {
5032 servers_with_actions_requests.insert(
5033 new_server_name.clone(),
5034 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5035 move |_, _| {
5036 let name = new_server_name.clone();
5037 async move {
5038 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5039 lsp::CodeAction {
5040 title: format!("{name} code action"),
5041 ..lsp::CodeAction::default()
5042 },
5043 )]))
5044 }
5045 },
5046 ),
5047 );
5048 }
5049 "ESLintServer" => {
5050 servers_with_actions_requests.insert(
5051 new_server_name,
5052 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5053 |_, _| async move { Ok(None) },
5054 ),
5055 );
5056 }
5057 "NoActionsCapabilitiesServer" => {
5058 let _never_handled = new_server
5059 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5060 panic!(
5061 "Should not call for code actions server with no corresponding capabilities"
5062 )
5063 });
5064 }
5065 unexpected => panic!("Unexpected server name: {unexpected}"),
5066 }
5067 }
5068
5069 let code_actions_task = project.update(cx, |project, cx| {
5070 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5071 });
5072
5073 // cx.run_until_parked();
5074 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5075 |mut code_actions_request| async move {
5076 code_actions_request
5077 .next()
5078 .await
5079 .expect("All code actions requests should have been triggered")
5080 },
5081 ))
5082 .await;
5083 assert_eq!(
5084 vec!["TailwindServer code action", "TypeScriptServer code action"],
5085 code_actions_task
5086 .await
5087 .unwrap()
5088 .into_iter()
5089 .map(|code_action| code_action.lsp_action.title)
5090 .sorted()
5091 .collect::<Vec<_>>(),
5092 "Should receive code actions responses from all related servers with hover capabilities"
5093 );
5094}
5095
5096#[gpui::test]
5097async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5098 init_test(cx);
5099
5100 let fs = FakeFs::new(cx.executor());
5101 fs.insert_tree(
5102 "/dir",
5103 json!({
5104 "a.rs": "let a = 1;",
5105 "b.rs": "let b = 2;",
5106 "c.rs": "let c = 2;",
5107 }),
5108 )
5109 .await;
5110
5111 let project = Project::test(
5112 fs,
5113 [
5114 "/dir/a.rs".as_ref(),
5115 "/dir/b.rs".as_ref(),
5116 "/dir/c.rs".as_ref(),
5117 ],
5118 cx,
5119 )
5120 .await;
5121
5122 // check the initial state and get the worktrees
5123 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5124 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5125 assert_eq!(worktrees.len(), 3);
5126
5127 let worktree_a = worktrees[0].read(cx);
5128 let worktree_b = worktrees[1].read(cx);
5129 let worktree_c = worktrees[2].read(cx);
5130
5131 // check they start in the right order
5132 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5133 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5134 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5135
5136 (
5137 worktrees[0].clone(),
5138 worktrees[1].clone(),
5139 worktrees[2].clone(),
5140 )
5141 });
5142
5143 // move first worktree to after the second
5144 // [a, b, c] -> [b, a, c]
5145 project
5146 .update(cx, |project, cx| {
5147 let first = worktree_a.read(cx);
5148 let second = worktree_b.read(cx);
5149 project.move_worktree(first.id(), second.id(), cx)
5150 })
5151 .expect("moving first after second");
5152
5153 // check the state after moving
5154 project.update(cx, |project, cx| {
5155 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5156 assert_eq!(worktrees.len(), 3);
5157
5158 let first = worktrees[0].read(cx);
5159 let second = worktrees[1].read(cx);
5160 let third = worktrees[2].read(cx);
5161
5162 // check they are now in the right order
5163 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5164 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5165 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5166 });
5167
5168 // move the second worktree to before the first
5169 // [b, a, c] -> [a, b, c]
5170 project
5171 .update(cx, |project, cx| {
5172 let second = worktree_a.read(cx);
5173 let first = worktree_b.read(cx);
5174 project.move_worktree(first.id(), second.id(), cx)
5175 })
5176 .expect("moving second before first");
5177
5178 // check the state after moving
5179 project.update(cx, |project, cx| {
5180 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5181 assert_eq!(worktrees.len(), 3);
5182
5183 let first = worktrees[0].read(cx);
5184 let second = worktrees[1].read(cx);
5185 let third = worktrees[2].read(cx);
5186
5187 // check they are now in the right order
5188 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5189 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5190 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5191 });
5192
5193 // move the second worktree to after the third
5194 // [a, b, c] -> [a, c, b]
5195 project
5196 .update(cx, |project, cx| {
5197 let second = worktree_b.read(cx);
5198 let third = worktree_c.read(cx);
5199 project.move_worktree(second.id(), third.id(), cx)
5200 })
5201 .expect("moving second after third");
5202
5203 // check the state after moving
5204 project.update(cx, |project, cx| {
5205 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5206 assert_eq!(worktrees.len(), 3);
5207
5208 let first = worktrees[0].read(cx);
5209 let second = worktrees[1].read(cx);
5210 let third = worktrees[2].read(cx);
5211
5212 // check they are now in the right order
5213 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5214 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5215 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5216 });
5217
5218 // move the third worktree to before the second
5219 // [a, c, b] -> [a, b, c]
5220 project
5221 .update(cx, |project, cx| {
5222 let third = worktree_c.read(cx);
5223 let second = worktree_b.read(cx);
5224 project.move_worktree(third.id(), second.id(), cx)
5225 })
5226 .expect("moving third before second");
5227
5228 // check the state after moving
5229 project.update(cx, |project, cx| {
5230 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5231 assert_eq!(worktrees.len(), 3);
5232
5233 let first = worktrees[0].read(cx);
5234 let second = worktrees[1].read(cx);
5235 let third = worktrees[2].read(cx);
5236
5237 // check they are now in the right order
5238 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5239 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5240 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5241 });
5242
5243 // move the first worktree to after the third
5244 // [a, b, c] -> [b, c, a]
5245 project
5246 .update(cx, |project, cx| {
5247 let first = worktree_a.read(cx);
5248 let third = worktree_c.read(cx);
5249 project.move_worktree(first.id(), third.id(), cx)
5250 })
5251 .expect("moving first after third");
5252
5253 // check the state after moving
5254 project.update(cx, |project, cx| {
5255 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5256 assert_eq!(worktrees.len(), 3);
5257
5258 let first = worktrees[0].read(cx);
5259 let second = worktrees[1].read(cx);
5260 let third = worktrees[2].read(cx);
5261
5262 // check they are now in the right order
5263 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5264 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5265 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5266 });
5267
5268 // move the third worktree to before the first
5269 // [b, c, a] -> [a, b, c]
5270 project
5271 .update(cx, |project, cx| {
5272 let third = worktree_a.read(cx);
5273 let first = worktree_b.read(cx);
5274 project.move_worktree(third.id(), first.id(), cx)
5275 })
5276 .expect("moving third before first");
5277
5278 // check the state after moving
5279 project.update(cx, |project, cx| {
5280 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5281 assert_eq!(worktrees.len(), 3);
5282
5283 let first = worktrees[0].read(cx);
5284 let second = worktrees[1].read(cx);
5285 let third = worktrees[2].read(cx);
5286
5287 // check they are now in the right order
5288 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5289 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5290 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5291 });
5292}
5293
5294async fn search(
5295 project: &Model<Project>,
5296 query: SearchQuery,
5297 cx: &mut gpui::TestAppContext,
5298) -> Result<HashMap<String, Vec<Range<usize>>>> {
5299 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5300 let mut results = HashMap::default();
5301 while let Some(search_result) = search_rx.next().await {
5302 match search_result {
5303 SearchResult::Buffer { buffer, ranges } => {
5304 results.entry(buffer).or_insert(ranges);
5305 }
5306 SearchResult::LimitReached => {}
5307 }
5308 }
5309 Ok(results
5310 .into_iter()
5311 .map(|(buffer, ranges)| {
5312 buffer.update(cx, |buffer, cx| {
5313 let path = buffer
5314 .file()
5315 .unwrap()
5316 .full_path(cx)
5317 .to_string_lossy()
5318 .to_string();
5319 let ranges = ranges
5320 .into_iter()
5321 .map(|range| range.to_offset(buffer))
5322 .collect::<Vec<_>>();
5323 (path, ranges)
5324 })
5325 })
5326 .collect())
5327}
5328
5329pub fn init_test(cx: &mut gpui::TestAppContext) {
5330 if std::env::var("RUST_LOG").is_ok() {
5331 env_logger::try_init().ok();
5332 }
5333
5334 cx.update(|cx| {
5335 let settings_store = SettingsStore::test(cx);
5336 cx.set_global(settings_store);
5337 release_channel::init(SemanticVersion::default(), cx);
5338 language::init(cx);
5339 Project::init_settings(cx);
5340 });
5341}
5342
5343fn json_lang() -> Arc<Language> {
5344 Arc::new(Language::new(
5345 LanguageConfig {
5346 name: "JSON".into(),
5347 matcher: LanguageMatcher {
5348 path_suffixes: vec!["json".to_string()],
5349 ..Default::default()
5350 },
5351 ..Default::default()
5352 },
5353 None,
5354 ))
5355}
5356
5357fn js_lang() -> Arc<Language> {
5358 Arc::new(Language::new(
5359 LanguageConfig {
5360 name: "JavaScript".into(),
5361 matcher: LanguageMatcher {
5362 path_suffixes: vec!["js".to_string()],
5363 ..Default::default()
5364 },
5365 ..Default::default()
5366 },
5367 None,
5368 ))
5369}
5370
5371fn rust_lang() -> Arc<Language> {
5372 Arc::new(Language::new(
5373 LanguageConfig {
5374 name: "Rust".into(),
5375 matcher: LanguageMatcher {
5376 path_suffixes: vec!["rs".to_string()],
5377 ..Default::default()
5378 },
5379 ..Default::default()
5380 },
5381 Some(tree_sitter_rust::LANGUAGE.into()),
5382 ))
5383}
5384
5385fn typescript_lang() -> Arc<Language> {
5386 Arc::new(Language::new(
5387 LanguageConfig {
5388 name: "TypeScript".into(),
5389 matcher: LanguageMatcher {
5390 path_suffixes: vec!["ts".to_string()],
5391 ..Default::default()
5392 },
5393 ..Default::default()
5394 },
5395 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5396 ))
5397}
5398
5399fn tsx_lang() -> Arc<Language> {
5400 Arc::new(Language::new(
5401 LanguageConfig {
5402 name: "tsx".into(),
5403 matcher: LanguageMatcher {
5404 path_suffixes: vec!["tsx".to_string()],
5405 ..Default::default()
5406 },
5407 ..Default::default()
5408 },
5409 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5410 ))
5411}
5412
5413fn get_all_tasks(
5414 project: &Model<Project>,
5415 worktree_id: Option<WorktreeId>,
5416 task_context: &TaskContext,
5417 cx: &mut AppContext,
5418) -> Vec<(TaskSourceKind, ResolvedTask)> {
5419 let (mut old, new) = project.update(cx, |project, cx| {
5420 project
5421 .task_store
5422 .read(cx)
5423 .task_inventory()
5424 .unwrap()
5425 .read(cx)
5426 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5427 });
5428 old.extend(new);
5429 old
5430}