1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97 TaskStore::init(None);
98
99 let fs = FakeFs::new(cx.executor());
100 fs.insert_tree(
101 "/the-root",
102 json!({
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 8 }"#,
105 "tasks.json": r#"[{
106 "label": "cargo check all",
107 "command": "cargo",
108 "args": ["check", "--all"]
109 },]"#,
110 },
111 "a": {
112 "a.rs": "fn a() {\n A\n}"
113 },
114 "b": {
115 ".zed": {
116 "settings.json": r#"{ "tab_size": 2 }"#,
117 "tasks.json": r#"[{
118 "label": "cargo check",
119 "command": "cargo",
120 "args": ["check"]
121 },]"#,
122 },
123 "b.rs": "fn b() {\n B\n}"
124 }
125 }),
126 )
127 .await;
128
129 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
130 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
131 let task_context = TaskContext::default();
132
133 cx.executor().run_until_parked();
134 let worktree_id = cx.update(|cx| {
135 project.update(cx, |project, cx| {
136 project.worktrees(cx).next().unwrap().read(cx).id()
137 })
138 });
139 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
140 id: worktree_id,
141 directory_in_worktree: PathBuf::from(".zed"),
142 id_base: "local worktree tasks from directory \".zed\"".into(),
143 };
144
145 let all_tasks = cx
146 .update(|cx| {
147 let tree = worktree.read(cx);
148
149 let settings_a = language_settings(
150 None,
151 Some(
152 &(File::for_entry(
153 tree.entry_for_path("a/a.rs").unwrap().clone(),
154 worktree.clone(),
155 ) as _),
156 ),
157 cx,
158 );
159 let settings_b = language_settings(
160 None,
161 Some(
162 &(File::for_entry(
163 tree.entry_for_path("b/b.rs").unwrap().clone(),
164 worktree.clone(),
165 ) as _),
166 ),
167 cx,
168 );
169
170 assert_eq!(settings_a.tab_size.get(), 8);
171 assert_eq!(settings_b.tab_size.get(), 2);
172
173 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
174 })
175 .into_iter()
176 .map(|(source_kind, task)| {
177 let resolved = task.resolved.unwrap();
178 (
179 source_kind,
180 task.resolved_label,
181 resolved.args,
182 resolved.env,
183 )
184 })
185 .collect::<Vec<_>>();
186 assert_eq!(
187 all_tasks,
188 vec![
189 (
190 TaskSourceKind::Worktree {
191 id: worktree_id,
192 directory_in_worktree: PathBuf::from("b/.zed"),
193 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
194 },
195 "cargo check".to_string(),
196 vec!["check".to_string()],
197 HashMap::default(),
198 ),
199 (
200 topmost_local_task_source_kind.clone(),
201 "cargo check all".to_string(),
202 vec!["check".to_string(), "--all".to_string()],
203 HashMap::default(),
204 ),
205 ]
206 );
207
208 let (_, resolved_task) = cx
209 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
210 .into_iter()
211 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
212 .expect("should have one global task");
213 project.update(cx, |project, cx| {
214 let task_inventory = project
215 .task_store
216 .read(cx)
217 .task_inventory()
218 .cloned()
219 .unwrap();
220 task_inventory.update(cx, |inventory, _| {
221 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
222 inventory
223 .update_file_based_tasks(
224 None,
225 Some(
226 &json!([{
227 "label": "cargo check unstable",
228 "command": "cargo",
229 "args": [
230 "check",
231 "--all",
232 "--all-targets"
233 ],
234 "env": {
235 "RUSTFLAGS": "-Zunstable-options"
236 }
237 }])
238 .to_string(),
239 ),
240 )
241 .unwrap();
242 });
243 });
244 cx.run_until_parked();
245
246 let all_tasks = cx
247 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
248 .into_iter()
249 .map(|(source_kind, task)| {
250 let resolved = task.resolved.unwrap();
251 (
252 source_kind,
253 task.resolved_label,
254 resolved.args,
255 resolved.env,
256 )
257 })
258 .collect::<Vec<_>>();
259 assert_eq!(
260 all_tasks,
261 vec![
262 (
263 topmost_local_task_source_kind.clone(),
264 "cargo check all".to_string(),
265 vec!["check".to_string(), "--all".to_string()],
266 HashMap::default(),
267 ),
268 (
269 TaskSourceKind::Worktree {
270 id: worktree_id,
271 directory_in_worktree: PathBuf::from("b/.zed"),
272 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
273 },
274 "cargo check".to_string(),
275 vec!["check".to_string()],
276 HashMap::default(),
277 ),
278 (
279 TaskSourceKind::AbsPath {
280 abs_path: paths::tasks_file().clone(),
281 id_base: "global tasks.json".into(),
282 },
283 "cargo check unstable".to_string(),
284 vec![
285 "check".to_string(),
286 "--all".to_string(),
287 "--all-targets".to_string(),
288 ],
289 HashMap::from_iter(Some((
290 "RUSTFLAGS".to_string(),
291 "-Zunstable-options".to_string()
292 ))),
293 ),
294 ]
295 );
296}
297
298#[gpui::test]
299async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301
302 let fs = FakeFs::new(cx.executor());
303 fs.insert_tree(
304 "/the-root",
305 json!({
306 "test.rs": "const A: i32 = 1;",
307 "test2.rs": "",
308 "Cargo.toml": "a = 1",
309 "package.json": "{\"a\": 1}",
310 }),
311 )
312 .await;
313
314 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
316
317 let mut fake_rust_servers = language_registry.register_fake_lsp(
318 "Rust",
319 FakeLspAdapter {
320 name: "the-rust-language-server",
321 capabilities: lsp::ServerCapabilities {
322 completion_provider: Some(lsp::CompletionOptions {
323 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
324 ..Default::default()
325 }),
326 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
327 lsp::TextDocumentSyncOptions {
328 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
329 ..Default::default()
330 },
331 )),
332 ..Default::default()
333 },
334 ..Default::default()
335 },
336 );
337 let mut fake_json_servers = language_registry.register_fake_lsp(
338 "JSON",
339 FakeLspAdapter {
340 name: "the-json-language-server",
341 capabilities: lsp::ServerCapabilities {
342 completion_provider: Some(lsp::CompletionOptions {
343 trigger_characters: Some(vec![":".to_string()]),
344 ..Default::default()
345 }),
346 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
347 lsp::TextDocumentSyncOptions {
348 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
349 ..Default::default()
350 },
351 )),
352 ..Default::default()
353 },
354 ..Default::default()
355 },
356 );
357
358 // Open a buffer without an associated language server.
359 let toml_buffer = project
360 .update(cx, |project, cx| {
361 project.open_local_buffer("/the-root/Cargo.toml", cx)
362 })
363 .await
364 .unwrap();
365
366 // Open a buffer with an associated language server before the language for it has been loaded.
367 let rust_buffer = project
368 .update(cx, |project, cx| {
369 project.open_local_buffer("/the-root/test.rs", cx)
370 })
371 .await
372 .unwrap();
373 rust_buffer.update(cx, |buffer, _| {
374 assert_eq!(buffer.language().map(|l| l.name()), None);
375 });
376
377 // Now we add the languages to the project, and ensure they get assigned to all
378 // the relevant open buffers.
379 language_registry.add(json_lang());
380 language_registry.add(rust_lang());
381 cx.executor().run_until_parked();
382 rust_buffer.update(cx, |buffer, _| {
383 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
384 });
385
386 // A server is started up, and it is notified about Rust files.
387 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
388 fake_rust_server
389 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
390 registrations: vec![lsp::Registration {
391 id: Default::default(),
392 method: "workspace/didChangeWatchedFiles".to_string(),
393 register_options: serde_json::to_value(
394 lsp::DidChangeWatchedFilesRegistrationOptions {
395 watchers: vec![
396 lsp::FileSystemWatcher {
397 glob_pattern: lsp::GlobPattern::String(
398 "/the-root/Cargo.toml".to_string(),
399 ),
400 kind: None,
401 },
402 lsp::FileSystemWatcher {
403 glob_pattern: lsp::GlobPattern::String(
404 "/the-root/*.rs".to_string(),
405 ),
406 kind: None,
407 },
408 ],
409 },
410 )
411 .ok(),
412 }],
413 })
414 .await
415 .unwrap();
416 assert_eq!(
417 fake_rust_server
418 .receive_notification::<lsp::notification::DidOpenTextDocument>()
419 .await
420 .text_document,
421 lsp::TextDocumentItem {
422 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
423 version: 0,
424 text: "const A: i32 = 1;".to_string(),
425 language_id: "rust".to_string(),
426 }
427 );
428
429 // The buffer is configured based on the language server's capabilities.
430 rust_buffer.update(cx, |buffer, _| {
431 assert_eq!(
432 buffer.completion_triggers(),
433 &[".".to_string(), "::".to_string()]
434 );
435 });
436 toml_buffer.update(cx, |buffer, _| {
437 assert!(buffer.completion_triggers().is_empty());
438 });
439
440 // Edit a buffer. The changes are reported to the language server.
441 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
442 assert_eq!(
443 fake_rust_server
444 .receive_notification::<lsp::notification::DidChangeTextDocument>()
445 .await
446 .text_document,
447 lsp::VersionedTextDocumentIdentifier::new(
448 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
449 1
450 )
451 );
452
453 // Open a third buffer with a different associated language server.
454 let json_buffer = project
455 .update(cx, |project, cx| {
456 project.open_local_buffer("/the-root/package.json", cx)
457 })
458 .await
459 .unwrap();
460
461 // A json language server is started up and is only notified about the json buffer.
462 let mut fake_json_server = fake_json_servers.next().await.unwrap();
463 fake_json_server
464 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
465 registrations: vec![lsp::Registration {
466 id: Default::default(),
467 method: "workspace/didChangeWatchedFiles".to_string(),
468 register_options: serde_json::to_value(
469 lsp::DidChangeWatchedFilesRegistrationOptions {
470 watchers: vec![lsp::FileSystemWatcher {
471 glob_pattern: lsp::GlobPattern::String("/the-root/*.json".to_string()),
472 kind: None,
473 }],
474 },
475 )
476 .ok(),
477 }],
478 })
479 .await
480 .unwrap();
481 assert_eq!(
482 fake_json_server
483 .receive_notification::<lsp::notification::DidOpenTextDocument>()
484 .await
485 .text_document,
486 lsp::TextDocumentItem {
487 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
488 version: 0,
489 text: "{\"a\": 1}".to_string(),
490 language_id: "json".to_string(),
491 }
492 );
493
494 // This buffer is configured based on the second language server's
495 // capabilities.
496 json_buffer.update(cx, |buffer, _| {
497 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
498 });
499
500 // When opening another buffer whose language server is already running,
501 // it is also configured based on the existing language server's capabilities.
502 let rust_buffer2 = project
503 .update(cx, |project, cx| {
504 project.open_local_buffer("/the-root/test2.rs", cx)
505 })
506 .await
507 .unwrap();
508 rust_buffer2.update(cx, |buffer, _| {
509 assert_eq!(
510 buffer.completion_triggers(),
511 &[".".to_string(), "::".to_string()]
512 );
513 });
514
515 // Changes are reported only to servers matching the buffer's language.
516 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
517 rust_buffer2.update(cx, |buffer, cx| {
518 buffer.edit([(0..0, "let x = 1;")], None, cx)
519 });
520 assert_eq!(
521 fake_rust_server
522 .receive_notification::<lsp::notification::DidChangeTextDocument>()
523 .await
524 .text_document,
525 lsp::VersionedTextDocumentIdentifier::new(
526 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
527 1
528 )
529 );
530
531 // Save notifications are reported only to servers that signed up for a given extension.
532 project
533 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
534 .await
535 .unwrap();
536 assert_eq!(
537 fake_rust_server
538 .receive_notification::<lsp::notification::DidSaveTextDocument>()
539 .await
540 .text_document,
541 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
542 );
543
544 // Renames are reported only to servers matching the buffer's language.
545 fs.rename(
546 Path::new("/the-root/test2.rs"),
547 Path::new("/the-root/test3.rs"),
548 Default::default(),
549 )
550 .await
551 .unwrap();
552 assert_eq!(
553 fake_rust_server
554 .receive_notification::<lsp::notification::DidCloseTextDocument>()
555 .await
556 .text_document,
557 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
558 );
559 assert_eq!(
560 fake_rust_server
561 .receive_notification::<lsp::notification::DidOpenTextDocument>()
562 .await
563 .text_document,
564 lsp::TextDocumentItem {
565 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
566 version: 0,
567 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
568 language_id: "rust".to_string(),
569 },
570 );
571
572 rust_buffer2.update(cx, |buffer, cx| {
573 buffer.update_diagnostics(
574 LanguageServerId(0),
575 DiagnosticSet::from_sorted_entries(
576 vec![DiagnosticEntry {
577 diagnostic: Default::default(),
578 range: Anchor::MIN..Anchor::MAX,
579 }],
580 &buffer.snapshot(),
581 ),
582 cx,
583 );
584 assert_eq!(
585 buffer
586 .snapshot()
587 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
588 .count(),
589 1
590 );
591 });
592
593 // When the rename changes the extension of the file, the buffer gets closed on the old
594 // language server and gets opened on the new one.
595 fs.rename(
596 Path::new("/the-root/test3.rs"),
597 Path::new("/the-root/test3.json"),
598 Default::default(),
599 )
600 .await
601 .unwrap();
602 assert_eq!(
603 fake_rust_server
604 .receive_notification::<lsp::notification::DidCloseTextDocument>()
605 .await
606 .text_document,
607 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
608 );
609 assert_eq!(
610 fake_json_server
611 .receive_notification::<lsp::notification::DidOpenTextDocument>()
612 .await
613 .text_document,
614 lsp::TextDocumentItem {
615 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
616 version: 0,
617 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
618 language_id: "json".to_string(),
619 },
620 );
621
622 // We clear the diagnostics, since the language has changed.
623 rust_buffer2.update(cx, |buffer, _| {
624 assert_eq!(
625 buffer
626 .snapshot()
627 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
628 .count(),
629 0
630 );
631 });
632
633 // The renamed file's version resets after changing language server.
634 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
635 assert_eq!(
636 fake_json_server
637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
638 .await
639 .text_document,
640 lsp::VersionedTextDocumentIdentifier::new(
641 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
642 1
643 )
644 );
645
646 // Restart language servers
647 project.update(cx, |project, cx| {
648 project.restart_language_servers_for_buffers(
649 vec![rust_buffer.clone(), json_buffer.clone()],
650 cx,
651 );
652 });
653
654 let mut rust_shutdown_requests = fake_rust_server
655 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
656 let mut json_shutdown_requests = fake_json_server
657 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
658 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
659
660 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
661 let mut fake_json_server = fake_json_servers.next().await.unwrap();
662
663 // Ensure rust document is reopened in new rust language server
664 assert_eq!(
665 fake_rust_server
666 .receive_notification::<lsp::notification::DidOpenTextDocument>()
667 .await
668 .text_document,
669 lsp::TextDocumentItem {
670 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
671 version: 0,
672 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
673 language_id: "rust".to_string(),
674 }
675 );
676
677 // Ensure json documents are reopened in new json language server
678 assert_set_eq!(
679 [
680 fake_json_server
681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
682 .await
683 .text_document,
684 fake_json_server
685 .receive_notification::<lsp::notification::DidOpenTextDocument>()
686 .await
687 .text_document,
688 ],
689 [
690 lsp::TextDocumentItem {
691 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
692 version: 0,
693 text: json_buffer.update(cx, |buffer, _| buffer.text()),
694 language_id: "json".to_string(),
695 },
696 lsp::TextDocumentItem {
697 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
698 version: 0,
699 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
700 language_id: "json".to_string(),
701 }
702 ]
703 );
704
705 // Close notifications are reported only to servers matching the buffer's language.
706 cx.update(|_| drop(json_buffer));
707 let close_message = lsp::DidCloseTextDocumentParams {
708 text_document: lsp::TextDocumentIdentifier::new(
709 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
710 ),
711 };
712 assert_eq!(
713 fake_json_server
714 .receive_notification::<lsp::notification::DidCloseTextDocument>()
715 .await,
716 close_message,
717 );
718}
719
720#[gpui::test]
721async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
722 init_test(cx);
723
724 let fs = FakeFs::new(cx.executor());
725 fs.insert_tree(
726 "/the-root",
727 json!({
728 ".gitignore": "target\n",
729 "src": {
730 "a.rs": "",
731 "b.rs": "",
732 },
733 "target": {
734 "x": {
735 "out": {
736 "x.rs": ""
737 }
738 },
739 "y": {
740 "out": {
741 "y.rs": "",
742 }
743 },
744 "z": {
745 "out": {
746 "z.rs": ""
747 }
748 }
749 }
750 }),
751 )
752 .await;
753
754 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
755 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
756 language_registry.add(rust_lang());
757 let mut fake_servers = language_registry.register_fake_lsp(
758 "Rust",
759 FakeLspAdapter {
760 name: "the-language-server",
761 ..Default::default()
762 },
763 );
764
765 cx.executor().run_until_parked();
766
767 // Start the language server by opening a buffer with a compatible file extension.
768 let _buffer = project
769 .update(cx, |project, cx| {
770 project.open_local_buffer("/the-root/src/a.rs", cx)
771 })
772 .await
773 .unwrap();
774
775 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
776 project.update(cx, |project, cx| {
777 let worktree = project.worktrees(cx).next().unwrap();
778 assert_eq!(
779 worktree
780 .read(cx)
781 .snapshot()
782 .entries(true, 0)
783 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
784 .collect::<Vec<_>>(),
785 &[
786 (Path::new(""), false),
787 (Path::new(".gitignore"), false),
788 (Path::new("src"), false),
789 (Path::new("src/a.rs"), false),
790 (Path::new("src/b.rs"), false),
791 (Path::new("target"), true),
792 ]
793 );
794 });
795
796 let prev_read_dir_count = fs.read_dir_call_count();
797
798 // Keep track of the FS events reported to the language server.
799 let fake_server = fake_servers.next().await.unwrap();
800 let file_changes = Arc::new(Mutex::new(Vec::new()));
801 fake_server
802 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
803 registrations: vec![lsp::Registration {
804 id: Default::default(),
805 method: "workspace/didChangeWatchedFiles".to_string(),
806 register_options: serde_json::to_value(
807 lsp::DidChangeWatchedFilesRegistrationOptions {
808 watchers: vec![
809 lsp::FileSystemWatcher {
810 glob_pattern: lsp::GlobPattern::String(
811 "/the-root/Cargo.toml".to_string(),
812 ),
813 kind: None,
814 },
815 lsp::FileSystemWatcher {
816 glob_pattern: lsp::GlobPattern::String(
817 "/the-root/src/*.{rs,c}".to_string(),
818 ),
819 kind: None,
820 },
821 lsp::FileSystemWatcher {
822 glob_pattern: lsp::GlobPattern::String(
823 "/the-root/target/y/**/*.rs".to_string(),
824 ),
825 kind: None,
826 },
827 ],
828 },
829 )
830 .ok(),
831 }],
832 })
833 .await
834 .unwrap();
835 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
836 let file_changes = file_changes.clone();
837 move |params, _| {
838 let mut file_changes = file_changes.lock();
839 file_changes.extend(params.changes);
840 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
841 }
842 });
843
844 cx.executor().run_until_parked();
845 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
846 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
847
848 // Now the language server has asked us to watch an ignored directory path,
849 // so we recursively load it.
850 project.update(cx, |project, cx| {
851 let worktree = project.worktrees(cx).next().unwrap();
852 assert_eq!(
853 worktree
854 .read(cx)
855 .snapshot()
856 .entries(true, 0)
857 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
858 .collect::<Vec<_>>(),
859 &[
860 (Path::new(""), false),
861 (Path::new(".gitignore"), false),
862 (Path::new("src"), false),
863 (Path::new("src/a.rs"), false),
864 (Path::new("src/b.rs"), false),
865 (Path::new("target"), true),
866 (Path::new("target/x"), true),
867 (Path::new("target/y"), true),
868 (Path::new("target/y/out"), true),
869 (Path::new("target/y/out/y.rs"), true),
870 (Path::new("target/z"), true),
871 ]
872 );
873 });
874
875 // Perform some file system mutations, two of which match the watched patterns,
876 // and one of which does not.
877 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
878 .await
879 .unwrap();
880 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
881 .await
882 .unwrap();
883 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
884 .await
885 .unwrap();
886 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
887 .await
888 .unwrap();
889 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
890 .await
891 .unwrap();
892
893 // The language server receives events for the FS mutations that match its watch patterns.
894 cx.executor().run_until_parked();
895 assert_eq!(
896 &*file_changes.lock(),
897 &[
898 lsp::FileEvent {
899 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
900 typ: lsp::FileChangeType::DELETED,
901 },
902 lsp::FileEvent {
903 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
904 typ: lsp::FileChangeType::CREATED,
905 },
906 lsp::FileEvent {
907 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
908 typ: lsp::FileChangeType::CREATED,
909 },
910 ]
911 );
912}
913
914#[gpui::test]
915async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
916 init_test(cx);
917
918 let fs = FakeFs::new(cx.executor());
919 fs.insert_tree(
920 "/dir",
921 json!({
922 "a.rs": "let a = 1;",
923 "b.rs": "let b = 2;"
924 }),
925 )
926 .await;
927
928 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
929
930 let buffer_a = project
931 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
932 .await
933 .unwrap();
934 let buffer_b = project
935 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
936 .await
937 .unwrap();
938
939 project.update(cx, |project, cx| {
940 project
941 .update_diagnostics(
942 LanguageServerId(0),
943 lsp::PublishDiagnosticsParams {
944 uri: Url::from_file_path("/dir/a.rs").unwrap(),
945 version: None,
946 diagnostics: vec![lsp::Diagnostic {
947 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
948 severity: Some(lsp::DiagnosticSeverity::ERROR),
949 message: "error 1".to_string(),
950 ..Default::default()
951 }],
952 },
953 &[],
954 cx,
955 )
956 .unwrap();
957 project
958 .update_diagnostics(
959 LanguageServerId(0),
960 lsp::PublishDiagnosticsParams {
961 uri: Url::from_file_path("/dir/b.rs").unwrap(),
962 version: None,
963 diagnostics: vec![lsp::Diagnostic {
964 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
965 severity: Some(DiagnosticSeverity::WARNING),
966 message: "error 2".to_string(),
967 ..Default::default()
968 }],
969 },
970 &[],
971 cx,
972 )
973 .unwrap();
974 });
975
976 buffer_a.update(cx, |buffer, _| {
977 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
978 assert_eq!(
979 chunks
980 .iter()
981 .map(|(s, d)| (s.as_str(), *d))
982 .collect::<Vec<_>>(),
983 &[
984 ("let ", None),
985 ("a", Some(DiagnosticSeverity::ERROR)),
986 (" = 1;", None),
987 ]
988 );
989 });
990 buffer_b.update(cx, |buffer, _| {
991 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
992 assert_eq!(
993 chunks
994 .iter()
995 .map(|(s, d)| (s.as_str(), *d))
996 .collect::<Vec<_>>(),
997 &[
998 ("let ", None),
999 ("b", Some(DiagnosticSeverity::WARNING)),
1000 (" = 2;", None),
1001 ]
1002 );
1003 });
1004}
1005
1006#[gpui::test]
1007async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1008 init_test(cx);
1009
1010 let fs = FakeFs::new(cx.executor());
1011 fs.insert_tree(
1012 "/root",
1013 json!({
1014 "dir": {
1015 ".git": {
1016 "HEAD": "ref: refs/heads/main",
1017 },
1018 ".gitignore": "b.rs",
1019 "a.rs": "let a = 1;",
1020 "b.rs": "let b = 2;",
1021 },
1022 "other.rs": "let b = c;"
1023 }),
1024 )
1025 .await;
1026
1027 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1028 let (worktree, _) = project
1029 .update(cx, |project, cx| {
1030 project.find_or_create_worktree("/root/dir", true, cx)
1031 })
1032 .await
1033 .unwrap();
1034 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1035
1036 let (worktree, _) = project
1037 .update(cx, |project, cx| {
1038 project.find_or_create_worktree("/root/other.rs", false, cx)
1039 })
1040 .await
1041 .unwrap();
1042 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1043
1044 let server_id = LanguageServerId(0);
1045 project.update(cx, |project, cx| {
1046 project
1047 .update_diagnostics(
1048 server_id,
1049 lsp::PublishDiagnosticsParams {
1050 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1051 version: None,
1052 diagnostics: vec![lsp::Diagnostic {
1053 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1054 severity: Some(lsp::DiagnosticSeverity::ERROR),
1055 message: "unused variable 'b'".to_string(),
1056 ..Default::default()
1057 }],
1058 },
1059 &[],
1060 cx,
1061 )
1062 .unwrap();
1063 project
1064 .update_diagnostics(
1065 server_id,
1066 lsp::PublishDiagnosticsParams {
1067 uri: Url::from_file_path("/root/other.rs").unwrap(),
1068 version: None,
1069 diagnostics: vec![lsp::Diagnostic {
1070 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1071 severity: Some(lsp::DiagnosticSeverity::ERROR),
1072 message: "unknown variable 'c'".to_string(),
1073 ..Default::default()
1074 }],
1075 },
1076 &[],
1077 cx,
1078 )
1079 .unwrap();
1080 });
1081
1082 let main_ignored_buffer = project
1083 .update(cx, |project, cx| {
1084 project.open_buffer((main_worktree_id, "b.rs"), cx)
1085 })
1086 .await
1087 .unwrap();
1088 main_ignored_buffer.update(cx, |buffer, _| {
1089 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1090 assert_eq!(
1091 chunks
1092 .iter()
1093 .map(|(s, d)| (s.as_str(), *d))
1094 .collect::<Vec<_>>(),
1095 &[
1096 ("let ", None),
1097 ("b", Some(DiagnosticSeverity::ERROR)),
1098 (" = 2;", None),
1099 ],
1100 "Gigitnored buffers should still get in-buffer diagnostics",
1101 );
1102 });
1103 let other_buffer = project
1104 .update(cx, |project, cx| {
1105 project.open_buffer((other_worktree_id, ""), cx)
1106 })
1107 .await
1108 .unwrap();
1109 other_buffer.update(cx, |buffer, _| {
1110 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1111 assert_eq!(
1112 chunks
1113 .iter()
1114 .map(|(s, d)| (s.as_str(), *d))
1115 .collect::<Vec<_>>(),
1116 &[
1117 ("let b = ", None),
1118 ("c", Some(DiagnosticSeverity::ERROR)),
1119 (";", None),
1120 ],
1121 "Buffers from hidden projects should still get in-buffer diagnostics"
1122 );
1123 });
1124
1125 project.update(cx, |project, cx| {
1126 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1127 assert_eq!(
1128 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1129 vec![(
1130 ProjectPath {
1131 worktree_id: main_worktree_id,
1132 path: Arc::from(Path::new("b.rs")),
1133 },
1134 server_id,
1135 DiagnosticSummary {
1136 error_count: 1,
1137 warning_count: 0,
1138 }
1139 )]
1140 );
1141 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1142 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1143 });
1144}
1145
1146#[gpui::test]
1147async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1148 init_test(cx);
1149
1150 let progress_token = "the-progress-token";
1151
1152 let fs = FakeFs::new(cx.executor());
1153 fs.insert_tree(
1154 "/dir",
1155 json!({
1156 "a.rs": "fn a() { A }",
1157 "b.rs": "const y: i32 = 1",
1158 }),
1159 )
1160 .await;
1161
1162 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1163 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1164
1165 language_registry.add(rust_lang());
1166 let mut fake_servers = language_registry.register_fake_lsp(
1167 "Rust",
1168 FakeLspAdapter {
1169 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1170 disk_based_diagnostics_sources: vec!["disk".into()],
1171 ..Default::default()
1172 },
1173 );
1174
1175 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1176
1177 // Cause worktree to start the fake language server
1178 let _buffer = project
1179 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1180 .await
1181 .unwrap();
1182
1183 let mut events = cx.events(&project);
1184
1185 let fake_server = fake_servers.next().await.unwrap();
1186 assert_eq!(
1187 events.next().await.unwrap(),
1188 Event::LanguageServerAdded(
1189 LanguageServerId(0),
1190 fake_server.server.name().into(),
1191 Some(worktree_id)
1192 ),
1193 );
1194
1195 fake_server
1196 .start_progress(format!("{}/0", progress_token))
1197 .await;
1198 assert_eq!(
1199 events.next().await.unwrap(),
1200 Event::DiskBasedDiagnosticsStarted {
1201 language_server_id: LanguageServerId(0),
1202 }
1203 );
1204
1205 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1206 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1207 version: None,
1208 diagnostics: vec![lsp::Diagnostic {
1209 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1210 severity: Some(lsp::DiagnosticSeverity::ERROR),
1211 message: "undefined variable 'A'".to_string(),
1212 ..Default::default()
1213 }],
1214 });
1215 assert_eq!(
1216 events.next().await.unwrap(),
1217 Event::DiagnosticsUpdated {
1218 language_server_id: LanguageServerId(0),
1219 path: (worktree_id, Path::new("a.rs")).into()
1220 }
1221 );
1222
1223 fake_server.end_progress(format!("{}/0", progress_token));
1224 assert_eq!(
1225 events.next().await.unwrap(),
1226 Event::DiskBasedDiagnosticsFinished {
1227 language_server_id: LanguageServerId(0)
1228 }
1229 );
1230
1231 let buffer = project
1232 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1233 .await
1234 .unwrap();
1235
1236 buffer.update(cx, |buffer, _| {
1237 let snapshot = buffer.snapshot();
1238 let diagnostics = snapshot
1239 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1240 .collect::<Vec<_>>();
1241 assert_eq!(
1242 diagnostics,
1243 &[DiagnosticEntry {
1244 range: Point::new(0, 9)..Point::new(0, 10),
1245 diagnostic: Diagnostic {
1246 severity: lsp::DiagnosticSeverity::ERROR,
1247 message: "undefined variable 'A'".to_string(),
1248 group_id: 0,
1249 is_primary: true,
1250 ..Default::default()
1251 }
1252 }]
1253 )
1254 });
1255
1256 // Ensure publishing empty diagnostics twice only results in one update event.
1257 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1258 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1259 version: None,
1260 diagnostics: Default::default(),
1261 });
1262 assert_eq!(
1263 events.next().await.unwrap(),
1264 Event::DiagnosticsUpdated {
1265 language_server_id: LanguageServerId(0),
1266 path: (worktree_id, Path::new("a.rs")).into()
1267 }
1268 );
1269
1270 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1271 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1272 version: None,
1273 diagnostics: Default::default(),
1274 });
1275 cx.executor().run_until_parked();
1276 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1277}
1278
1279#[gpui::test]
1280async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1281 init_test(cx);
1282
1283 let progress_token = "the-progress-token";
1284
1285 let fs = FakeFs::new(cx.executor());
1286 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1287
1288 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1289
1290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1291 language_registry.add(rust_lang());
1292 let mut fake_servers = language_registry.register_fake_lsp(
1293 "Rust",
1294 FakeLspAdapter {
1295 name: "the-language-server",
1296 disk_based_diagnostics_sources: vec!["disk".into()],
1297 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1298 ..Default::default()
1299 },
1300 );
1301
1302 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1303
1304 let buffer = project
1305 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1306 .await
1307 .unwrap();
1308
1309 // Simulate diagnostics starting to update.
1310 let fake_server = fake_servers.next().await.unwrap();
1311 fake_server.start_progress(progress_token).await;
1312
1313 // Restart the server before the diagnostics finish updating.
1314 project.update(cx, |project, cx| {
1315 project.restart_language_servers_for_buffers([buffer], cx);
1316 });
1317 let mut events = cx.events(&project);
1318
1319 // Simulate the newly started server sending more diagnostics.
1320 let fake_server = fake_servers.next().await.unwrap();
1321 assert_eq!(
1322 events.next().await.unwrap(),
1323 Event::LanguageServerAdded(
1324 LanguageServerId(1),
1325 fake_server.server.name().into(),
1326 Some(worktree_id)
1327 )
1328 );
1329 fake_server.start_progress(progress_token).await;
1330 assert_eq!(
1331 events.next().await.unwrap(),
1332 Event::DiskBasedDiagnosticsStarted {
1333 language_server_id: LanguageServerId(1)
1334 }
1335 );
1336 project.update(cx, |project, cx| {
1337 assert_eq!(
1338 project
1339 .language_servers_running_disk_based_diagnostics(cx)
1340 .collect::<Vec<_>>(),
1341 [LanguageServerId(1)]
1342 );
1343 });
1344
1345 // All diagnostics are considered done, despite the old server's diagnostic
1346 // task never completing.
1347 fake_server.end_progress(progress_token);
1348 assert_eq!(
1349 events.next().await.unwrap(),
1350 Event::DiskBasedDiagnosticsFinished {
1351 language_server_id: LanguageServerId(1)
1352 }
1353 );
1354 project.update(cx, |project, cx| {
1355 assert_eq!(
1356 project
1357 .language_servers_running_disk_based_diagnostics(cx)
1358 .collect::<Vec<_>>(),
1359 [] as [language::LanguageServerId; 0]
1360 );
1361 });
1362}
1363
1364#[gpui::test]
1365async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1366 init_test(cx);
1367
1368 let fs = FakeFs::new(cx.executor());
1369 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1370
1371 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1372
1373 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1374 language_registry.add(rust_lang());
1375 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1376
1377 let buffer = project
1378 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1379 .await
1380 .unwrap();
1381
1382 // Publish diagnostics
1383 let fake_server = fake_servers.next().await.unwrap();
1384 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1385 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1386 version: None,
1387 diagnostics: vec![lsp::Diagnostic {
1388 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1389 severity: Some(lsp::DiagnosticSeverity::ERROR),
1390 message: "the message".to_string(),
1391 ..Default::default()
1392 }],
1393 });
1394
1395 cx.executor().run_until_parked();
1396 buffer.update(cx, |buffer, _| {
1397 assert_eq!(
1398 buffer
1399 .snapshot()
1400 .diagnostics_in_range::<_, usize>(0..1, false)
1401 .map(|entry| entry.diagnostic.message.clone())
1402 .collect::<Vec<_>>(),
1403 ["the message".to_string()]
1404 );
1405 });
1406 project.update(cx, |project, cx| {
1407 assert_eq!(
1408 project.diagnostic_summary(false, cx),
1409 DiagnosticSummary {
1410 error_count: 1,
1411 warning_count: 0,
1412 }
1413 );
1414 });
1415
1416 project.update(cx, |project, cx| {
1417 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1418 });
1419
1420 // The diagnostics are cleared.
1421 cx.executor().run_until_parked();
1422 buffer.update(cx, |buffer, _| {
1423 assert_eq!(
1424 buffer
1425 .snapshot()
1426 .diagnostics_in_range::<_, usize>(0..1, false)
1427 .map(|entry| entry.diagnostic.message.clone())
1428 .collect::<Vec<_>>(),
1429 Vec::<String>::new(),
1430 );
1431 });
1432 project.update(cx, |project, cx| {
1433 assert_eq!(
1434 project.diagnostic_summary(false, cx),
1435 DiagnosticSummary {
1436 error_count: 0,
1437 warning_count: 0,
1438 }
1439 );
1440 });
1441}
1442
1443#[gpui::test]
1444async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1445 init_test(cx);
1446
1447 let fs = FakeFs::new(cx.executor());
1448 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1449
1450 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1452
1453 language_registry.add(rust_lang());
1454 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1455
1456 let buffer = project
1457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1458 .await
1459 .unwrap();
1460
1461 // Before restarting the server, report diagnostics with an unknown buffer version.
1462 let fake_server = fake_servers.next().await.unwrap();
1463 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1464 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1465 version: Some(10000),
1466 diagnostics: Vec::new(),
1467 });
1468 cx.executor().run_until_parked();
1469
1470 project.update(cx, |project, cx| {
1471 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1472 });
1473 let mut fake_server = fake_servers.next().await.unwrap();
1474 let notification = fake_server
1475 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1476 .await
1477 .text_document;
1478 assert_eq!(notification.version, 0);
1479}
1480
1481#[gpui::test]
1482async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1483 init_test(cx);
1484
1485 let progress_token = "the-progress-token";
1486
1487 let fs = FakeFs::new(cx.executor());
1488 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1489
1490 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1491
1492 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1493 language_registry.add(rust_lang());
1494 let mut fake_servers = language_registry.register_fake_lsp(
1495 "Rust",
1496 FakeLspAdapter {
1497 name: "the-language-server",
1498 disk_based_diagnostics_sources: vec!["disk".into()],
1499 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1500 ..Default::default()
1501 },
1502 );
1503
1504 let buffer = project
1505 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1506 .await
1507 .unwrap();
1508
1509 // Simulate diagnostics starting to update.
1510 let mut fake_server = fake_servers.next().await.unwrap();
1511 fake_server
1512 .start_progress_with(
1513 "another-token",
1514 lsp::WorkDoneProgressBegin {
1515 cancellable: Some(false),
1516 ..Default::default()
1517 },
1518 )
1519 .await;
1520 fake_server
1521 .start_progress_with(
1522 progress_token,
1523 lsp::WorkDoneProgressBegin {
1524 cancellable: Some(true),
1525 ..Default::default()
1526 },
1527 )
1528 .await;
1529 cx.executor().run_until_parked();
1530
1531 project.update(cx, |project, cx| {
1532 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1533 });
1534
1535 let cancel_notification = fake_server
1536 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1537 .await;
1538 assert_eq!(
1539 cancel_notification.token,
1540 NumberOrString::String(progress_token.into())
1541 );
1542}
1543
1544#[gpui::test]
1545async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let fs = FakeFs::new(cx.executor());
1549 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1550 .await;
1551
1552 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1553 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1554
1555 let mut fake_rust_servers = language_registry.register_fake_lsp(
1556 "Rust",
1557 FakeLspAdapter {
1558 name: "rust-lsp",
1559 ..Default::default()
1560 },
1561 );
1562 let mut fake_js_servers = language_registry.register_fake_lsp(
1563 "JavaScript",
1564 FakeLspAdapter {
1565 name: "js-lsp",
1566 ..Default::default()
1567 },
1568 );
1569 language_registry.add(rust_lang());
1570 language_registry.add(js_lang());
1571
1572 let _rs_buffer = project
1573 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1574 .await
1575 .unwrap();
1576 let _js_buffer = project
1577 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1578 .await
1579 .unwrap();
1580
1581 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1582 assert_eq!(
1583 fake_rust_server_1
1584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1585 .await
1586 .text_document
1587 .uri
1588 .as_str(),
1589 "file:///dir/a.rs"
1590 );
1591
1592 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1593 assert_eq!(
1594 fake_js_server
1595 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1596 .await
1597 .text_document
1598 .uri
1599 .as_str(),
1600 "file:///dir/b.js"
1601 );
1602
1603 // Disable Rust language server, ensuring only that server gets stopped.
1604 cx.update(|cx| {
1605 SettingsStore::update_global(cx, |settings, cx| {
1606 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1607 settings.languages.insert(
1608 "Rust".into(),
1609 LanguageSettingsContent {
1610 enable_language_server: Some(false),
1611 ..Default::default()
1612 },
1613 );
1614 });
1615 })
1616 });
1617 fake_rust_server_1
1618 .receive_notification::<lsp::notification::Exit>()
1619 .await;
1620
1621 // Enable Rust and disable JavaScript language servers, ensuring that the
1622 // former gets started again and that the latter stops.
1623 cx.update(|cx| {
1624 SettingsStore::update_global(cx, |settings, cx| {
1625 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1626 settings.languages.insert(
1627 LanguageName::new("Rust"),
1628 LanguageSettingsContent {
1629 enable_language_server: Some(true),
1630 ..Default::default()
1631 },
1632 );
1633 settings.languages.insert(
1634 LanguageName::new("JavaScript"),
1635 LanguageSettingsContent {
1636 enable_language_server: Some(false),
1637 ..Default::default()
1638 },
1639 );
1640 });
1641 })
1642 });
1643 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1644 assert_eq!(
1645 fake_rust_server_2
1646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1647 .await
1648 .text_document
1649 .uri
1650 .as_str(),
1651 "file:///dir/a.rs"
1652 );
1653 fake_js_server
1654 .receive_notification::<lsp::notification::Exit>()
1655 .await;
1656}
1657
1658#[gpui::test(iterations = 3)]
1659async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1660 init_test(cx);
1661
1662 let text = "
1663 fn a() { A }
1664 fn b() { BB }
1665 fn c() { CCC }
1666 "
1667 .unindent();
1668
1669 let fs = FakeFs::new(cx.executor());
1670 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1671
1672 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1673 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1674
1675 language_registry.add(rust_lang());
1676 let mut fake_servers = language_registry.register_fake_lsp(
1677 "Rust",
1678 FakeLspAdapter {
1679 disk_based_diagnostics_sources: vec!["disk".into()],
1680 ..Default::default()
1681 },
1682 );
1683
1684 let buffer = project
1685 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1686 .await
1687 .unwrap();
1688
1689 let mut fake_server = fake_servers.next().await.unwrap();
1690 let open_notification = fake_server
1691 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1692 .await;
1693
1694 // Edit the buffer, moving the content down
1695 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1696 let change_notification_1 = fake_server
1697 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1698 .await;
1699 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1700
1701 // Report some diagnostics for the initial version of the buffer
1702 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1703 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1704 version: Some(open_notification.text_document.version),
1705 diagnostics: vec![
1706 lsp::Diagnostic {
1707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1708 severity: Some(DiagnosticSeverity::ERROR),
1709 message: "undefined variable 'A'".to_string(),
1710 source: Some("disk".to_string()),
1711 ..Default::default()
1712 },
1713 lsp::Diagnostic {
1714 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1715 severity: Some(DiagnosticSeverity::ERROR),
1716 message: "undefined variable 'BB'".to_string(),
1717 source: Some("disk".to_string()),
1718 ..Default::default()
1719 },
1720 lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1722 severity: Some(DiagnosticSeverity::ERROR),
1723 source: Some("disk".to_string()),
1724 message: "undefined variable 'CCC'".to_string(),
1725 ..Default::default()
1726 },
1727 ],
1728 });
1729
1730 // The diagnostics have moved down since they were created.
1731 cx.executor().run_until_parked();
1732 buffer.update(cx, |buffer, _| {
1733 assert_eq!(
1734 buffer
1735 .snapshot()
1736 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1737 .collect::<Vec<_>>(),
1738 &[
1739 DiagnosticEntry {
1740 range: Point::new(3, 9)..Point::new(3, 11),
1741 diagnostic: Diagnostic {
1742 source: Some("disk".into()),
1743 severity: DiagnosticSeverity::ERROR,
1744 message: "undefined variable 'BB'".to_string(),
1745 is_disk_based: true,
1746 group_id: 1,
1747 is_primary: true,
1748 ..Default::default()
1749 },
1750 },
1751 DiagnosticEntry {
1752 range: Point::new(4, 9)..Point::new(4, 12),
1753 diagnostic: Diagnostic {
1754 source: Some("disk".into()),
1755 severity: DiagnosticSeverity::ERROR,
1756 message: "undefined variable 'CCC'".to_string(),
1757 is_disk_based: true,
1758 group_id: 2,
1759 is_primary: true,
1760 ..Default::default()
1761 }
1762 }
1763 ]
1764 );
1765 assert_eq!(
1766 chunks_with_diagnostics(buffer, 0..buffer.len()),
1767 [
1768 ("\n\nfn a() { ".to_string(), None),
1769 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1770 (" }\nfn b() { ".to_string(), None),
1771 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1772 (" }\nfn c() { ".to_string(), None),
1773 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1774 (" }\n".to_string(), None),
1775 ]
1776 );
1777 assert_eq!(
1778 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1779 [
1780 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1781 (" }\nfn c() { ".to_string(), None),
1782 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1783 ]
1784 );
1785 });
1786
1787 // Ensure overlapping diagnostics are highlighted correctly.
1788 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1789 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1790 version: Some(open_notification.text_document.version),
1791 diagnostics: vec![
1792 lsp::Diagnostic {
1793 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1794 severity: Some(DiagnosticSeverity::ERROR),
1795 message: "undefined variable 'A'".to_string(),
1796 source: Some("disk".to_string()),
1797 ..Default::default()
1798 },
1799 lsp::Diagnostic {
1800 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1801 severity: Some(DiagnosticSeverity::WARNING),
1802 message: "unreachable statement".to_string(),
1803 source: Some("disk".to_string()),
1804 ..Default::default()
1805 },
1806 ],
1807 });
1808
1809 cx.executor().run_until_parked();
1810 buffer.update(cx, |buffer, _| {
1811 assert_eq!(
1812 buffer
1813 .snapshot()
1814 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1815 .collect::<Vec<_>>(),
1816 &[
1817 DiagnosticEntry {
1818 range: Point::new(2, 9)..Point::new(2, 12),
1819 diagnostic: Diagnostic {
1820 source: Some("disk".into()),
1821 severity: DiagnosticSeverity::WARNING,
1822 message: "unreachable statement".to_string(),
1823 is_disk_based: true,
1824 group_id: 4,
1825 is_primary: true,
1826 ..Default::default()
1827 }
1828 },
1829 DiagnosticEntry {
1830 range: Point::new(2, 9)..Point::new(2, 10),
1831 diagnostic: Diagnostic {
1832 source: Some("disk".into()),
1833 severity: DiagnosticSeverity::ERROR,
1834 message: "undefined variable 'A'".to_string(),
1835 is_disk_based: true,
1836 group_id: 3,
1837 is_primary: true,
1838 ..Default::default()
1839 },
1840 }
1841 ]
1842 );
1843 assert_eq!(
1844 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1845 [
1846 ("fn a() { ".to_string(), None),
1847 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1848 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1849 ("\n".to_string(), None),
1850 ]
1851 );
1852 assert_eq!(
1853 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1854 [
1855 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1856 ("\n".to_string(), None),
1857 ]
1858 );
1859 });
1860
1861 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1862 // changes since the last save.
1863 buffer.update(cx, |buffer, cx| {
1864 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1865 buffer.edit(
1866 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1867 None,
1868 cx,
1869 );
1870 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1871 });
1872 let change_notification_2 = fake_server
1873 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1874 .await;
1875 assert!(
1876 change_notification_2.text_document.version > change_notification_1.text_document.version
1877 );
1878
1879 // Handle out-of-order diagnostics
1880 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1881 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1882 version: Some(change_notification_2.text_document.version),
1883 diagnostics: vec![
1884 lsp::Diagnostic {
1885 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1886 severity: Some(DiagnosticSeverity::ERROR),
1887 message: "undefined variable 'BB'".to_string(),
1888 source: Some("disk".to_string()),
1889 ..Default::default()
1890 },
1891 lsp::Diagnostic {
1892 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1893 severity: Some(DiagnosticSeverity::WARNING),
1894 message: "undefined variable 'A'".to_string(),
1895 source: Some("disk".to_string()),
1896 ..Default::default()
1897 },
1898 ],
1899 });
1900
1901 cx.executor().run_until_parked();
1902 buffer.update(cx, |buffer, _| {
1903 assert_eq!(
1904 buffer
1905 .snapshot()
1906 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1907 .collect::<Vec<_>>(),
1908 &[
1909 DiagnosticEntry {
1910 range: Point::new(2, 21)..Point::new(2, 22),
1911 diagnostic: Diagnostic {
1912 source: Some("disk".into()),
1913 severity: DiagnosticSeverity::WARNING,
1914 message: "undefined variable 'A'".to_string(),
1915 is_disk_based: true,
1916 group_id: 6,
1917 is_primary: true,
1918 ..Default::default()
1919 }
1920 },
1921 DiagnosticEntry {
1922 range: Point::new(3, 9)..Point::new(3, 14),
1923 diagnostic: Diagnostic {
1924 source: Some("disk".into()),
1925 severity: DiagnosticSeverity::ERROR,
1926 message: "undefined variable 'BB'".to_string(),
1927 is_disk_based: true,
1928 group_id: 5,
1929 is_primary: true,
1930 ..Default::default()
1931 },
1932 }
1933 ]
1934 );
1935 });
1936}
1937
1938#[gpui::test]
1939async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1940 init_test(cx);
1941
1942 let text = concat!(
1943 "let one = ;\n", //
1944 "let two = \n",
1945 "let three = 3;\n",
1946 );
1947
1948 let fs = FakeFs::new(cx.executor());
1949 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1950
1951 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1952 let buffer = project
1953 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1954 .await
1955 .unwrap();
1956
1957 project.update(cx, |project, cx| {
1958 project.lsp_store.update(cx, |lsp_store, cx| {
1959 lsp_store
1960 .update_buffer_diagnostics(
1961 &buffer,
1962 LanguageServerId(0),
1963 None,
1964 vec![
1965 DiagnosticEntry {
1966 range: Unclipped(PointUtf16::new(0, 10))
1967 ..Unclipped(PointUtf16::new(0, 10)),
1968 diagnostic: Diagnostic {
1969 severity: DiagnosticSeverity::ERROR,
1970 message: "syntax error 1".to_string(),
1971 ..Default::default()
1972 },
1973 },
1974 DiagnosticEntry {
1975 range: Unclipped(PointUtf16::new(1, 10))
1976 ..Unclipped(PointUtf16::new(1, 10)),
1977 diagnostic: Diagnostic {
1978 severity: DiagnosticSeverity::ERROR,
1979 message: "syntax error 2".to_string(),
1980 ..Default::default()
1981 },
1982 },
1983 ],
1984 cx,
1985 )
1986 .unwrap();
1987 })
1988 });
1989
1990 // An empty range is extended forward to include the following character.
1991 // At the end of a line, an empty range is extended backward to include
1992 // the preceding character.
1993 buffer.update(cx, |buffer, _| {
1994 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1995 assert_eq!(
1996 chunks
1997 .iter()
1998 .map(|(s, d)| (s.as_str(), *d))
1999 .collect::<Vec<_>>(),
2000 &[
2001 ("let one = ", None),
2002 (";", Some(DiagnosticSeverity::ERROR)),
2003 ("\nlet two =", None),
2004 (" ", Some(DiagnosticSeverity::ERROR)),
2005 ("\nlet three = 3;\n", None)
2006 ]
2007 );
2008 });
2009}
2010
2011#[gpui::test]
2012async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2013 init_test(cx);
2014
2015 let fs = FakeFs::new(cx.executor());
2016 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2017 .await;
2018
2019 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2020
2021 project.update(cx, |project, cx| {
2022 project
2023 .update_diagnostic_entries(
2024 LanguageServerId(0),
2025 Path::new("/dir/a.rs").to_owned(),
2026 None,
2027 vec![DiagnosticEntry {
2028 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2029 diagnostic: Diagnostic {
2030 severity: DiagnosticSeverity::ERROR,
2031 is_primary: true,
2032 message: "syntax error a1".to_string(),
2033 ..Default::default()
2034 },
2035 }],
2036 cx,
2037 )
2038 .unwrap();
2039 project
2040 .update_diagnostic_entries(
2041 LanguageServerId(1),
2042 Path::new("/dir/a.rs").to_owned(),
2043 None,
2044 vec![DiagnosticEntry {
2045 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2046 diagnostic: Diagnostic {
2047 severity: DiagnosticSeverity::ERROR,
2048 is_primary: true,
2049 message: "syntax error b1".to_string(),
2050 ..Default::default()
2051 },
2052 }],
2053 cx,
2054 )
2055 .unwrap();
2056
2057 assert_eq!(
2058 project.diagnostic_summary(false, cx),
2059 DiagnosticSummary {
2060 error_count: 2,
2061 warning_count: 0,
2062 }
2063 );
2064 });
2065}
2066
2067#[gpui::test]
2068async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2069 init_test(cx);
2070
2071 let text = "
2072 fn a() {
2073 f1();
2074 }
2075 fn b() {
2076 f2();
2077 }
2078 fn c() {
2079 f3();
2080 }
2081 "
2082 .unindent();
2083
2084 let fs = FakeFs::new(cx.executor());
2085 fs.insert_tree(
2086 "/dir",
2087 json!({
2088 "a.rs": text.clone(),
2089 }),
2090 )
2091 .await;
2092
2093 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2094 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2095
2096 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2097 language_registry.add(rust_lang());
2098 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2099
2100 let buffer = project
2101 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2102 .await
2103 .unwrap();
2104
2105 let mut fake_server = fake_servers.next().await.unwrap();
2106 let lsp_document_version = fake_server
2107 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2108 .await
2109 .text_document
2110 .version;
2111
2112 // Simulate editing the buffer after the language server computes some edits.
2113 buffer.update(cx, |buffer, cx| {
2114 buffer.edit(
2115 [(
2116 Point::new(0, 0)..Point::new(0, 0),
2117 "// above first function\n",
2118 )],
2119 None,
2120 cx,
2121 );
2122 buffer.edit(
2123 [(
2124 Point::new(2, 0)..Point::new(2, 0),
2125 " // inside first function\n",
2126 )],
2127 None,
2128 cx,
2129 );
2130 buffer.edit(
2131 [(
2132 Point::new(6, 4)..Point::new(6, 4),
2133 "// inside second function ",
2134 )],
2135 None,
2136 cx,
2137 );
2138
2139 assert_eq!(
2140 buffer.text(),
2141 "
2142 // above first function
2143 fn a() {
2144 // inside first function
2145 f1();
2146 }
2147 fn b() {
2148 // inside second function f2();
2149 }
2150 fn c() {
2151 f3();
2152 }
2153 "
2154 .unindent()
2155 );
2156 });
2157
2158 let edits = lsp_store
2159 .update(cx, |lsp_store, cx| {
2160 lsp_store.edits_from_lsp(
2161 &buffer,
2162 vec![
2163 // replace body of first function
2164 lsp::TextEdit {
2165 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2166 new_text: "
2167 fn a() {
2168 f10();
2169 }
2170 "
2171 .unindent(),
2172 },
2173 // edit inside second function
2174 lsp::TextEdit {
2175 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2176 new_text: "00".into(),
2177 },
2178 // edit inside third function via two distinct edits
2179 lsp::TextEdit {
2180 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2181 new_text: "4000".into(),
2182 },
2183 lsp::TextEdit {
2184 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2185 new_text: "".into(),
2186 },
2187 ],
2188 LanguageServerId(0),
2189 Some(lsp_document_version),
2190 cx,
2191 )
2192 })
2193 .await
2194 .unwrap();
2195
2196 buffer.update(cx, |buffer, cx| {
2197 for (range, new_text) in edits {
2198 buffer.edit([(range, new_text)], None, cx);
2199 }
2200 assert_eq!(
2201 buffer.text(),
2202 "
2203 // above first function
2204 fn a() {
2205 // inside first function
2206 f10();
2207 }
2208 fn b() {
2209 // inside second function f200();
2210 }
2211 fn c() {
2212 f4000();
2213 }
2214 "
2215 .unindent()
2216 );
2217 });
2218}
2219
2220#[gpui::test]
2221async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2222 init_test(cx);
2223
2224 let text = "
2225 use a::b;
2226 use a::c;
2227
2228 fn f() {
2229 b();
2230 c();
2231 }
2232 "
2233 .unindent();
2234
2235 let fs = FakeFs::new(cx.executor());
2236 fs.insert_tree(
2237 "/dir",
2238 json!({
2239 "a.rs": text.clone(),
2240 }),
2241 )
2242 .await;
2243
2244 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2245 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2246 let buffer = project
2247 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2248 .await
2249 .unwrap();
2250
2251 // Simulate the language server sending us a small edit in the form of a very large diff.
2252 // Rust-analyzer does this when performing a merge-imports code action.
2253 let edits = lsp_store
2254 .update(cx, |lsp_store, cx| {
2255 lsp_store.edits_from_lsp(
2256 &buffer,
2257 [
2258 // Replace the first use statement without editing the semicolon.
2259 lsp::TextEdit {
2260 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2261 new_text: "a::{b, c}".into(),
2262 },
2263 // Reinsert the remainder of the file between the semicolon and the final
2264 // newline of the file.
2265 lsp::TextEdit {
2266 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2267 new_text: "\n\n".into(),
2268 },
2269 lsp::TextEdit {
2270 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2271 new_text: "
2272 fn f() {
2273 b();
2274 c();
2275 }"
2276 .unindent(),
2277 },
2278 // Delete everything after the first newline of the file.
2279 lsp::TextEdit {
2280 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2281 new_text: "".into(),
2282 },
2283 ],
2284 LanguageServerId(0),
2285 None,
2286 cx,
2287 )
2288 })
2289 .await
2290 .unwrap();
2291
2292 buffer.update(cx, |buffer, cx| {
2293 let edits = edits
2294 .into_iter()
2295 .map(|(range, text)| {
2296 (
2297 range.start.to_point(buffer)..range.end.to_point(buffer),
2298 text,
2299 )
2300 })
2301 .collect::<Vec<_>>();
2302
2303 assert_eq!(
2304 edits,
2305 [
2306 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2307 (Point::new(1, 0)..Point::new(2, 0), "".into())
2308 ]
2309 );
2310
2311 for (range, new_text) in edits {
2312 buffer.edit([(range, new_text)], None, cx);
2313 }
2314 assert_eq!(
2315 buffer.text(),
2316 "
2317 use a::{b, c};
2318
2319 fn f() {
2320 b();
2321 c();
2322 }
2323 "
2324 .unindent()
2325 );
2326 });
2327}
2328
2329#[gpui::test]
2330async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2331 init_test(cx);
2332
2333 let text = "
2334 use a::b;
2335 use a::c;
2336
2337 fn f() {
2338 b();
2339 c();
2340 }
2341 "
2342 .unindent();
2343
2344 let fs = FakeFs::new(cx.executor());
2345 fs.insert_tree(
2346 "/dir",
2347 json!({
2348 "a.rs": text.clone(),
2349 }),
2350 )
2351 .await;
2352
2353 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2354 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2355 let buffer = project
2356 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2357 .await
2358 .unwrap();
2359
2360 // Simulate the language server sending us edits in a non-ordered fashion,
2361 // with ranges sometimes being inverted or pointing to invalid locations.
2362 let edits = lsp_store
2363 .update(cx, |lsp_store, cx| {
2364 lsp_store.edits_from_lsp(
2365 &buffer,
2366 [
2367 lsp::TextEdit {
2368 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2369 new_text: "\n\n".into(),
2370 },
2371 lsp::TextEdit {
2372 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2373 new_text: "a::{b, c}".into(),
2374 },
2375 lsp::TextEdit {
2376 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2377 new_text: "".into(),
2378 },
2379 lsp::TextEdit {
2380 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2381 new_text: "
2382 fn f() {
2383 b();
2384 c();
2385 }"
2386 .unindent(),
2387 },
2388 ],
2389 LanguageServerId(0),
2390 None,
2391 cx,
2392 )
2393 })
2394 .await
2395 .unwrap();
2396
2397 buffer.update(cx, |buffer, cx| {
2398 let edits = edits
2399 .into_iter()
2400 .map(|(range, text)| {
2401 (
2402 range.start.to_point(buffer)..range.end.to_point(buffer),
2403 text,
2404 )
2405 })
2406 .collect::<Vec<_>>();
2407
2408 assert_eq!(
2409 edits,
2410 [
2411 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2412 (Point::new(1, 0)..Point::new(2, 0), "".into())
2413 ]
2414 );
2415
2416 for (range, new_text) in edits {
2417 buffer.edit([(range, new_text)], None, cx);
2418 }
2419 assert_eq!(
2420 buffer.text(),
2421 "
2422 use a::{b, c};
2423
2424 fn f() {
2425 b();
2426 c();
2427 }
2428 "
2429 .unindent()
2430 );
2431 });
2432}
2433
2434fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2435 buffer: &Buffer,
2436 range: Range<T>,
2437) -> Vec<(String, Option<DiagnosticSeverity>)> {
2438 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2439 for chunk in buffer.snapshot().chunks(range, true) {
2440 if chunks.last().map_or(false, |prev_chunk| {
2441 prev_chunk.1 == chunk.diagnostic_severity
2442 }) {
2443 chunks.last_mut().unwrap().0.push_str(chunk.text);
2444 } else {
2445 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2446 }
2447 }
2448 chunks
2449}
2450
2451#[gpui::test(iterations = 10)]
2452async fn test_definition(cx: &mut gpui::TestAppContext) {
2453 init_test(cx);
2454
2455 let fs = FakeFs::new(cx.executor());
2456 fs.insert_tree(
2457 "/dir",
2458 json!({
2459 "a.rs": "const fn a() { A }",
2460 "b.rs": "const y: i32 = crate::a()",
2461 }),
2462 )
2463 .await;
2464
2465 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2466
2467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2468 language_registry.add(rust_lang());
2469 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2470
2471 let buffer = project
2472 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2473 .await
2474 .unwrap();
2475
2476 let fake_server = fake_servers.next().await.unwrap();
2477 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2478 let params = params.text_document_position_params;
2479 assert_eq!(
2480 params.text_document.uri.to_file_path().unwrap(),
2481 Path::new("/dir/b.rs"),
2482 );
2483 assert_eq!(params.position, lsp::Position::new(0, 22));
2484
2485 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2486 lsp::Location::new(
2487 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2488 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2489 ),
2490 )))
2491 });
2492
2493 let mut definitions = project
2494 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2495 .await
2496 .unwrap();
2497
2498 // Assert no new language server started
2499 cx.executor().run_until_parked();
2500 assert!(fake_servers.try_next().is_err());
2501
2502 assert_eq!(definitions.len(), 1);
2503 let definition = definitions.pop().unwrap();
2504 cx.update(|cx| {
2505 let target_buffer = definition.target.buffer.read(cx);
2506 assert_eq!(
2507 target_buffer
2508 .file()
2509 .unwrap()
2510 .as_local()
2511 .unwrap()
2512 .abs_path(cx),
2513 Path::new("/dir/a.rs"),
2514 );
2515 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2516 assert_eq!(
2517 list_worktrees(&project, cx),
2518 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2519 );
2520
2521 drop(definition);
2522 });
2523 cx.update(|cx| {
2524 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2525 });
2526
2527 fn list_worktrees<'a>(
2528 project: &'a Model<Project>,
2529 cx: &'a AppContext,
2530 ) -> Vec<(&'a Path, bool)> {
2531 project
2532 .read(cx)
2533 .worktrees(cx)
2534 .map(|worktree| {
2535 let worktree = worktree.read(cx);
2536 (
2537 worktree.as_local().unwrap().abs_path().as_ref(),
2538 worktree.is_visible(),
2539 )
2540 })
2541 .collect::<Vec<_>>()
2542 }
2543}
2544
2545#[gpui::test]
2546async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2547 init_test(cx);
2548
2549 let fs = FakeFs::new(cx.executor());
2550 fs.insert_tree(
2551 "/dir",
2552 json!({
2553 "a.ts": "",
2554 }),
2555 )
2556 .await;
2557
2558 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2559
2560 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2561 language_registry.add(typescript_lang());
2562 let mut fake_language_servers = language_registry.register_fake_lsp(
2563 "TypeScript",
2564 FakeLspAdapter {
2565 capabilities: lsp::ServerCapabilities {
2566 completion_provider: Some(lsp::CompletionOptions {
2567 trigger_characters: Some(vec![":".to_string()]),
2568 ..Default::default()
2569 }),
2570 ..Default::default()
2571 },
2572 ..Default::default()
2573 },
2574 );
2575
2576 let buffer = project
2577 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2578 .await
2579 .unwrap();
2580
2581 let fake_server = fake_language_servers.next().await.unwrap();
2582
2583 let text = "let a = b.fqn";
2584 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2585 let completions = project.update(cx, |project, cx| {
2586 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2587 });
2588
2589 fake_server
2590 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2591 Ok(Some(lsp::CompletionResponse::Array(vec![
2592 lsp::CompletionItem {
2593 label: "fullyQualifiedName?".into(),
2594 insert_text: Some("fullyQualifiedName".into()),
2595 ..Default::default()
2596 },
2597 ])))
2598 })
2599 .next()
2600 .await;
2601 let completions = completions.await.unwrap();
2602 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2603 assert_eq!(completions.len(), 1);
2604 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2605 assert_eq!(
2606 completions[0].old_range.to_offset(&snapshot),
2607 text.len() - 3..text.len()
2608 );
2609
2610 let text = "let a = \"atoms/cmp\"";
2611 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2612 let completions = project.update(cx, |project, cx| {
2613 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2614 });
2615
2616 fake_server
2617 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2618 Ok(Some(lsp::CompletionResponse::Array(vec![
2619 lsp::CompletionItem {
2620 label: "component".into(),
2621 ..Default::default()
2622 },
2623 ])))
2624 })
2625 .next()
2626 .await;
2627 let completions = completions.await.unwrap();
2628 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2629 assert_eq!(completions.len(), 1);
2630 assert_eq!(completions[0].new_text, "component");
2631 assert_eq!(
2632 completions[0].old_range.to_offset(&snapshot),
2633 text.len() - 4..text.len() - 1
2634 );
2635}
2636
2637#[gpui::test]
2638async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2639 init_test(cx);
2640
2641 let fs = FakeFs::new(cx.executor());
2642 fs.insert_tree(
2643 "/dir",
2644 json!({
2645 "a.ts": "",
2646 }),
2647 )
2648 .await;
2649
2650 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2651
2652 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2653 language_registry.add(typescript_lang());
2654 let mut fake_language_servers = language_registry.register_fake_lsp(
2655 "TypeScript",
2656 FakeLspAdapter {
2657 capabilities: lsp::ServerCapabilities {
2658 completion_provider: Some(lsp::CompletionOptions {
2659 trigger_characters: Some(vec![":".to_string()]),
2660 ..Default::default()
2661 }),
2662 ..Default::default()
2663 },
2664 ..Default::default()
2665 },
2666 );
2667
2668 let buffer = project
2669 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2670 .await
2671 .unwrap();
2672
2673 let fake_server = fake_language_servers.next().await.unwrap();
2674
2675 let text = "let a = b.fqn";
2676 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2677 let completions = project.update(cx, |project, cx| {
2678 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2679 });
2680
2681 fake_server
2682 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2683 Ok(Some(lsp::CompletionResponse::Array(vec![
2684 lsp::CompletionItem {
2685 label: "fullyQualifiedName?".into(),
2686 insert_text: Some("fully\rQualified\r\nName".into()),
2687 ..Default::default()
2688 },
2689 ])))
2690 })
2691 .next()
2692 .await;
2693 let completions = completions.await.unwrap();
2694 assert_eq!(completions.len(), 1);
2695 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2696}
2697
2698#[gpui::test(iterations = 10)]
2699async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2700 init_test(cx);
2701
2702 let fs = FakeFs::new(cx.executor());
2703 fs.insert_tree(
2704 "/dir",
2705 json!({
2706 "a.ts": "a",
2707 }),
2708 )
2709 .await;
2710
2711 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2712
2713 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2714 language_registry.add(typescript_lang());
2715 let mut fake_language_servers = language_registry.register_fake_lsp(
2716 "TypeScript",
2717 FakeLspAdapter {
2718 capabilities: lsp::ServerCapabilities {
2719 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2720 lsp::CodeActionOptions {
2721 resolve_provider: Some(true),
2722 ..lsp::CodeActionOptions::default()
2723 },
2724 )),
2725 ..lsp::ServerCapabilities::default()
2726 },
2727 ..FakeLspAdapter::default()
2728 },
2729 );
2730
2731 let buffer = project
2732 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2733 .await
2734 .unwrap();
2735
2736 let fake_server = fake_language_servers.next().await.unwrap();
2737
2738 // Language server returns code actions that contain commands, and not edits.
2739 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2740 fake_server
2741 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2742 Ok(Some(vec![
2743 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2744 title: "The code action".into(),
2745 data: Some(serde_json::json!({
2746 "command": "_the/command",
2747 })),
2748 ..lsp::CodeAction::default()
2749 }),
2750 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2751 title: "two".into(),
2752 ..lsp::CodeAction::default()
2753 }),
2754 ]))
2755 })
2756 .next()
2757 .await;
2758
2759 let action = actions.await.unwrap()[0].clone();
2760 let apply = project.update(cx, |project, cx| {
2761 project.apply_code_action(buffer.clone(), action, true, cx)
2762 });
2763
2764 // Resolving the code action does not populate its edits. In absence of
2765 // edits, we must execute the given command.
2766 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2767 |mut action, _| async move {
2768 if action.data.is_some() {
2769 action.command = Some(lsp::Command {
2770 title: "The command".into(),
2771 command: "_the/command".into(),
2772 arguments: Some(vec![json!("the-argument")]),
2773 });
2774 }
2775 Ok(action)
2776 },
2777 );
2778
2779 // While executing the command, the language server sends the editor
2780 // a `workspaceEdit` request.
2781 fake_server
2782 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2783 let fake = fake_server.clone();
2784 move |params, _| {
2785 assert_eq!(params.command, "_the/command");
2786 let fake = fake.clone();
2787 async move {
2788 fake.server
2789 .request::<lsp::request::ApplyWorkspaceEdit>(
2790 lsp::ApplyWorkspaceEditParams {
2791 label: None,
2792 edit: lsp::WorkspaceEdit {
2793 changes: Some(
2794 [(
2795 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2796 vec![lsp::TextEdit {
2797 range: lsp::Range::new(
2798 lsp::Position::new(0, 0),
2799 lsp::Position::new(0, 0),
2800 ),
2801 new_text: "X".into(),
2802 }],
2803 )]
2804 .into_iter()
2805 .collect(),
2806 ),
2807 ..Default::default()
2808 },
2809 },
2810 )
2811 .await
2812 .unwrap();
2813 Ok(Some(json!(null)))
2814 }
2815 }
2816 })
2817 .next()
2818 .await;
2819
2820 // Applying the code action returns a project transaction containing the edits
2821 // sent by the language server in its `workspaceEdit` request.
2822 let transaction = apply.await.unwrap();
2823 assert!(transaction.0.contains_key(&buffer));
2824 buffer.update(cx, |buffer, cx| {
2825 assert_eq!(buffer.text(), "Xa");
2826 buffer.undo(cx);
2827 assert_eq!(buffer.text(), "a");
2828 });
2829}
2830
2831#[gpui::test(iterations = 10)]
2832async fn test_save_file(cx: &mut gpui::TestAppContext) {
2833 init_test(cx);
2834
2835 let fs = FakeFs::new(cx.executor());
2836 fs.insert_tree(
2837 "/dir",
2838 json!({
2839 "file1": "the old contents",
2840 }),
2841 )
2842 .await;
2843
2844 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2845 let buffer = project
2846 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2847 .await
2848 .unwrap();
2849 buffer.update(cx, |buffer, cx| {
2850 assert_eq!(buffer.text(), "the old contents");
2851 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2852 });
2853
2854 project
2855 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2856 .await
2857 .unwrap();
2858
2859 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2860 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2861}
2862
2863#[gpui::test(iterations = 30)]
2864async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2865 init_test(cx);
2866
2867 let fs = FakeFs::new(cx.executor().clone());
2868 fs.insert_tree(
2869 "/dir",
2870 json!({
2871 "file1": "the original contents",
2872 }),
2873 )
2874 .await;
2875
2876 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2877 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2878 let buffer = project
2879 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2880 .await
2881 .unwrap();
2882
2883 // Simulate buffer diffs being slow, so that they don't complete before
2884 // the next file change occurs.
2885 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2886
2887 // Change the buffer's file on disk, and then wait for the file change
2888 // to be detected by the worktree, so that the buffer starts reloading.
2889 fs.save(
2890 "/dir/file1".as_ref(),
2891 &"the first contents".into(),
2892 Default::default(),
2893 )
2894 .await
2895 .unwrap();
2896 worktree.next_event(cx).await;
2897
2898 // Change the buffer's file again. Depending on the random seed, the
2899 // previous file change may still be in progress.
2900 fs.save(
2901 "/dir/file1".as_ref(),
2902 &"the second contents".into(),
2903 Default::default(),
2904 )
2905 .await
2906 .unwrap();
2907 worktree.next_event(cx).await;
2908
2909 cx.executor().run_until_parked();
2910 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2911 buffer.read_with(cx, |buffer, _| {
2912 assert_eq!(buffer.text(), on_disk_text);
2913 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2914 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2915 });
2916}
2917
2918#[gpui::test(iterations = 30)]
2919async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2920 init_test(cx);
2921
2922 let fs = FakeFs::new(cx.executor().clone());
2923 fs.insert_tree(
2924 "/dir",
2925 json!({
2926 "file1": "the original contents",
2927 }),
2928 )
2929 .await;
2930
2931 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2932 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2933 let buffer = project
2934 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2935 .await
2936 .unwrap();
2937
2938 // Simulate buffer diffs being slow, so that they don't complete before
2939 // the next file change occurs.
2940 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2941
2942 // Change the buffer's file on disk, and then wait for the file change
2943 // to be detected by the worktree, so that the buffer starts reloading.
2944 fs.save(
2945 "/dir/file1".as_ref(),
2946 &"the first contents".into(),
2947 Default::default(),
2948 )
2949 .await
2950 .unwrap();
2951 worktree.next_event(cx).await;
2952
2953 cx.executor()
2954 .spawn(cx.executor().simulate_random_delay())
2955 .await;
2956
2957 // Perform a noop edit, causing the buffer's version to increase.
2958 buffer.update(cx, |buffer, cx| {
2959 buffer.edit([(0..0, " ")], None, cx);
2960 buffer.undo(cx);
2961 });
2962
2963 cx.executor().run_until_parked();
2964 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2965 buffer.read_with(cx, |buffer, _| {
2966 let buffer_text = buffer.text();
2967 if buffer_text == on_disk_text {
2968 assert!(
2969 !buffer.is_dirty() && !buffer.has_conflict(),
2970 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2971 );
2972 }
2973 // If the file change occurred while the buffer was processing the first
2974 // change, the buffer will be in a conflicting state.
2975 else {
2976 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2977 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2978 }
2979 });
2980}
2981
2982#[gpui::test]
2983async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2984 init_test(cx);
2985
2986 let fs = FakeFs::new(cx.executor());
2987 fs.insert_tree(
2988 "/dir",
2989 json!({
2990 "file1": "the old contents",
2991 }),
2992 )
2993 .await;
2994
2995 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2996 let buffer = project
2997 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2998 .await
2999 .unwrap();
3000 buffer.update(cx, |buffer, cx| {
3001 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3002 });
3003
3004 project
3005 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3006 .await
3007 .unwrap();
3008
3009 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3010 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3011}
3012
3013#[gpui::test]
3014async fn test_save_as(cx: &mut gpui::TestAppContext) {
3015 init_test(cx);
3016
3017 let fs = FakeFs::new(cx.executor());
3018 fs.insert_tree("/dir", json!({})).await;
3019
3020 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3021
3022 let languages = project.update(cx, |project, _| project.languages().clone());
3023 languages.add(rust_lang());
3024
3025 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3026 buffer.update(cx, |buffer, cx| {
3027 buffer.edit([(0..0, "abc")], None, cx);
3028 assert!(buffer.is_dirty());
3029 assert!(!buffer.has_conflict());
3030 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3031 });
3032 project
3033 .update(cx, |project, cx| {
3034 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3035 let path = ProjectPath {
3036 worktree_id,
3037 path: Arc::from(Path::new("file1.rs")),
3038 };
3039 project.save_buffer_as(buffer.clone(), path, cx)
3040 })
3041 .await
3042 .unwrap();
3043 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3044
3045 cx.executor().run_until_parked();
3046 buffer.update(cx, |buffer, cx| {
3047 assert_eq!(
3048 buffer.file().unwrap().full_path(cx),
3049 Path::new("dir/file1.rs")
3050 );
3051 assert!(!buffer.is_dirty());
3052 assert!(!buffer.has_conflict());
3053 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3054 });
3055
3056 let opened_buffer = project
3057 .update(cx, |project, cx| {
3058 project.open_local_buffer("/dir/file1.rs", cx)
3059 })
3060 .await
3061 .unwrap();
3062 assert_eq!(opened_buffer, buffer);
3063}
3064
3065#[gpui::test(retries = 5)]
3066async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3067 use worktree::WorktreeModelHandle as _;
3068
3069 init_test(cx);
3070 cx.executor().allow_parking();
3071
3072 let dir = temp_tree(json!({
3073 "a": {
3074 "file1": "",
3075 "file2": "",
3076 "file3": "",
3077 },
3078 "b": {
3079 "c": {
3080 "file4": "",
3081 "file5": "",
3082 }
3083 }
3084 }));
3085
3086 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3087
3088 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3089 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3090 async move { buffer.await.unwrap() }
3091 };
3092 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3093 project.update(cx, |project, cx| {
3094 let tree = project.worktrees(cx).next().unwrap();
3095 tree.read(cx)
3096 .entry_for_path(path)
3097 .unwrap_or_else(|| panic!("no entry for path {}", path))
3098 .id
3099 })
3100 };
3101
3102 let buffer2 = buffer_for_path("a/file2", cx).await;
3103 let buffer3 = buffer_for_path("a/file3", cx).await;
3104 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3105 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3106
3107 let file2_id = id_for_path("a/file2", cx);
3108 let file3_id = id_for_path("a/file3", cx);
3109 let file4_id = id_for_path("b/c/file4", cx);
3110
3111 // Create a remote copy of this worktree.
3112 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3113 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3114
3115 let updates = Arc::new(Mutex::new(Vec::new()));
3116 tree.update(cx, |tree, cx| {
3117 let updates = updates.clone();
3118 tree.observe_updates(0, cx, move |update| {
3119 updates.lock().push(update);
3120 async { true }
3121 });
3122 });
3123
3124 let remote =
3125 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3126
3127 cx.executor().run_until_parked();
3128
3129 cx.update(|cx| {
3130 assert!(!buffer2.read(cx).is_dirty());
3131 assert!(!buffer3.read(cx).is_dirty());
3132 assert!(!buffer4.read(cx).is_dirty());
3133 assert!(!buffer5.read(cx).is_dirty());
3134 });
3135
3136 // Rename and delete files and directories.
3137 tree.flush_fs_events(cx).await;
3138 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3139 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3140 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3141 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3142 tree.flush_fs_events(cx).await;
3143
3144 let expected_paths = vec![
3145 "a",
3146 "a/file1",
3147 "a/file2.new",
3148 "b",
3149 "d",
3150 "d/file3",
3151 "d/file4",
3152 ];
3153
3154 cx.update(|app| {
3155 assert_eq!(
3156 tree.read(app)
3157 .paths()
3158 .map(|p| p.to_str().unwrap())
3159 .collect::<Vec<_>>(),
3160 expected_paths
3161 );
3162 });
3163
3164 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3165 assert_eq!(id_for_path("d/file3", cx), file3_id);
3166 assert_eq!(id_for_path("d/file4", cx), file4_id);
3167
3168 cx.update(|cx| {
3169 assert_eq!(
3170 buffer2.read(cx).file().unwrap().path().as_ref(),
3171 Path::new("a/file2.new")
3172 );
3173 assert_eq!(
3174 buffer3.read(cx).file().unwrap().path().as_ref(),
3175 Path::new("d/file3")
3176 );
3177 assert_eq!(
3178 buffer4.read(cx).file().unwrap().path().as_ref(),
3179 Path::new("d/file4")
3180 );
3181 assert_eq!(
3182 buffer5.read(cx).file().unwrap().path().as_ref(),
3183 Path::new("b/c/file5")
3184 );
3185
3186 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3187 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3188 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3189 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3190 });
3191
3192 // Update the remote worktree. Check that it becomes consistent with the
3193 // local worktree.
3194 cx.executor().run_until_parked();
3195
3196 remote.update(cx, |remote, _| {
3197 for update in updates.lock().drain(..) {
3198 remote.as_remote_mut().unwrap().update_from_remote(update);
3199 }
3200 });
3201 cx.executor().run_until_parked();
3202 remote.update(cx, |remote, _| {
3203 assert_eq!(
3204 remote
3205 .paths()
3206 .map(|p| p.to_str().unwrap())
3207 .collect::<Vec<_>>(),
3208 expected_paths
3209 );
3210 });
3211}
3212
3213#[gpui::test(iterations = 10)]
3214async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3215 init_test(cx);
3216
3217 let fs = FakeFs::new(cx.executor());
3218 fs.insert_tree(
3219 "/dir",
3220 json!({
3221 "a": {
3222 "file1": "",
3223 }
3224 }),
3225 )
3226 .await;
3227
3228 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3229 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3230 let tree_id = tree.update(cx, |tree, _| tree.id());
3231
3232 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3233 project.update(cx, |project, cx| {
3234 let tree = project.worktrees(cx).next().unwrap();
3235 tree.read(cx)
3236 .entry_for_path(path)
3237 .unwrap_or_else(|| panic!("no entry for path {}", path))
3238 .id
3239 })
3240 };
3241
3242 let dir_id = id_for_path("a", cx);
3243 let file_id = id_for_path("a/file1", cx);
3244 let buffer = project
3245 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3246 .await
3247 .unwrap();
3248 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3249
3250 project
3251 .update(cx, |project, cx| {
3252 project.rename_entry(dir_id, Path::new("b"), cx)
3253 })
3254 .unwrap()
3255 .await
3256 .to_included()
3257 .unwrap();
3258 cx.executor().run_until_parked();
3259
3260 assert_eq!(id_for_path("b", cx), dir_id);
3261 assert_eq!(id_for_path("b/file1", cx), file_id);
3262 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3263}
3264
3265#[gpui::test]
3266async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3267 init_test(cx);
3268
3269 let fs = FakeFs::new(cx.executor());
3270 fs.insert_tree(
3271 "/dir",
3272 json!({
3273 "a.txt": "a-contents",
3274 "b.txt": "b-contents",
3275 }),
3276 )
3277 .await;
3278
3279 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3280
3281 // Spawn multiple tasks to open paths, repeating some paths.
3282 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3283 (
3284 p.open_local_buffer("/dir/a.txt", cx),
3285 p.open_local_buffer("/dir/b.txt", cx),
3286 p.open_local_buffer("/dir/a.txt", cx),
3287 )
3288 });
3289
3290 let buffer_a_1 = buffer_a_1.await.unwrap();
3291 let buffer_a_2 = buffer_a_2.await.unwrap();
3292 let buffer_b = buffer_b.await.unwrap();
3293 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3294 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3295
3296 // There is only one buffer per path.
3297 let buffer_a_id = buffer_a_1.entity_id();
3298 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3299
3300 // Open the same path again while it is still open.
3301 drop(buffer_a_1);
3302 let buffer_a_3 = project
3303 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3304 .await
3305 .unwrap();
3306
3307 // There's still only one buffer per path.
3308 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3309}
3310
3311#[gpui::test]
3312async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3313 init_test(cx);
3314
3315 let fs = FakeFs::new(cx.executor());
3316 fs.insert_tree(
3317 "/dir",
3318 json!({
3319 "file1": "abc",
3320 "file2": "def",
3321 "file3": "ghi",
3322 }),
3323 )
3324 .await;
3325
3326 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3327
3328 let buffer1 = project
3329 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3330 .await
3331 .unwrap();
3332 let events = Arc::new(Mutex::new(Vec::new()));
3333
3334 // initially, the buffer isn't dirty.
3335 buffer1.update(cx, |buffer, cx| {
3336 cx.subscribe(&buffer1, {
3337 let events = events.clone();
3338 move |_, _, event, _| match event {
3339 BufferEvent::Operation { .. } => {}
3340 _ => events.lock().push(event.clone()),
3341 }
3342 })
3343 .detach();
3344
3345 assert!(!buffer.is_dirty());
3346 assert!(events.lock().is_empty());
3347
3348 buffer.edit([(1..2, "")], None, cx);
3349 });
3350
3351 // after the first edit, the buffer is dirty, and emits a dirtied event.
3352 buffer1.update(cx, |buffer, cx| {
3353 assert!(buffer.text() == "ac");
3354 assert!(buffer.is_dirty());
3355 assert_eq!(
3356 *events.lock(),
3357 &[
3358 language::BufferEvent::Edited,
3359 language::BufferEvent::DirtyChanged
3360 ]
3361 );
3362 events.lock().clear();
3363 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3364 });
3365
3366 // after saving, the buffer is not dirty, and emits a saved event.
3367 buffer1.update(cx, |buffer, cx| {
3368 assert!(!buffer.is_dirty());
3369 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3370 events.lock().clear();
3371
3372 buffer.edit([(1..1, "B")], None, cx);
3373 buffer.edit([(2..2, "D")], None, cx);
3374 });
3375
3376 // after editing again, the buffer is dirty, and emits another dirty event.
3377 buffer1.update(cx, |buffer, cx| {
3378 assert!(buffer.text() == "aBDc");
3379 assert!(buffer.is_dirty());
3380 assert_eq!(
3381 *events.lock(),
3382 &[
3383 language::BufferEvent::Edited,
3384 language::BufferEvent::DirtyChanged,
3385 language::BufferEvent::Edited,
3386 ],
3387 );
3388 events.lock().clear();
3389
3390 // After restoring the buffer to its previously-saved state,
3391 // the buffer is not considered dirty anymore.
3392 buffer.edit([(1..3, "")], None, cx);
3393 assert!(buffer.text() == "ac");
3394 assert!(!buffer.is_dirty());
3395 });
3396
3397 assert_eq!(
3398 *events.lock(),
3399 &[
3400 language::BufferEvent::Edited,
3401 language::BufferEvent::DirtyChanged
3402 ]
3403 );
3404
3405 // When a file is deleted, the buffer is considered dirty.
3406 let events = Arc::new(Mutex::new(Vec::new()));
3407 let buffer2 = project
3408 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3409 .await
3410 .unwrap();
3411 buffer2.update(cx, |_, cx| {
3412 cx.subscribe(&buffer2, {
3413 let events = events.clone();
3414 move |_, _, event, _| events.lock().push(event.clone())
3415 })
3416 .detach();
3417 });
3418
3419 fs.remove_file("/dir/file2".as_ref(), Default::default())
3420 .await
3421 .unwrap();
3422 cx.executor().run_until_parked();
3423 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3424 assert_eq!(
3425 *events.lock(),
3426 &[
3427 language::BufferEvent::DirtyChanged,
3428 language::BufferEvent::FileHandleChanged
3429 ]
3430 );
3431
3432 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3433 let events = Arc::new(Mutex::new(Vec::new()));
3434 let buffer3 = project
3435 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3436 .await
3437 .unwrap();
3438 buffer3.update(cx, |_, cx| {
3439 cx.subscribe(&buffer3, {
3440 let events = events.clone();
3441 move |_, _, event, _| events.lock().push(event.clone())
3442 })
3443 .detach();
3444 });
3445
3446 buffer3.update(cx, |buffer, cx| {
3447 buffer.edit([(0..0, "x")], None, cx);
3448 });
3449 events.lock().clear();
3450 fs.remove_file("/dir/file3".as_ref(), Default::default())
3451 .await
3452 .unwrap();
3453 cx.executor().run_until_parked();
3454 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3455 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3456}
3457
3458#[gpui::test]
3459async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3460 init_test(cx);
3461
3462 let initial_contents = "aaa\nbbbbb\nc\n";
3463 let fs = FakeFs::new(cx.executor());
3464 fs.insert_tree(
3465 "/dir",
3466 json!({
3467 "the-file": initial_contents,
3468 }),
3469 )
3470 .await;
3471 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3472 let buffer = project
3473 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3474 .await
3475 .unwrap();
3476
3477 let anchors = (0..3)
3478 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3479 .collect::<Vec<_>>();
3480
3481 // Change the file on disk, adding two new lines of text, and removing
3482 // one line.
3483 buffer.update(cx, |buffer, _| {
3484 assert!(!buffer.is_dirty());
3485 assert!(!buffer.has_conflict());
3486 });
3487 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3488 fs.save(
3489 "/dir/the-file".as_ref(),
3490 &new_contents.into(),
3491 LineEnding::Unix,
3492 )
3493 .await
3494 .unwrap();
3495
3496 // Because the buffer was not modified, it is reloaded from disk. Its
3497 // contents are edited according to the diff between the old and new
3498 // file contents.
3499 cx.executor().run_until_parked();
3500 buffer.update(cx, |buffer, _| {
3501 assert_eq!(buffer.text(), new_contents);
3502 assert!(!buffer.is_dirty());
3503 assert!(!buffer.has_conflict());
3504
3505 let anchor_positions = anchors
3506 .iter()
3507 .map(|anchor| anchor.to_point(&*buffer))
3508 .collect::<Vec<_>>();
3509 assert_eq!(
3510 anchor_positions,
3511 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3512 );
3513 });
3514
3515 // Modify the buffer
3516 buffer.update(cx, |buffer, cx| {
3517 buffer.edit([(0..0, " ")], None, cx);
3518 assert!(buffer.is_dirty());
3519 assert!(!buffer.has_conflict());
3520 });
3521
3522 // Change the file on disk again, adding blank lines to the beginning.
3523 fs.save(
3524 "/dir/the-file".as_ref(),
3525 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3526 LineEnding::Unix,
3527 )
3528 .await
3529 .unwrap();
3530
3531 // Because the buffer is modified, it doesn't reload from disk, but is
3532 // marked as having a conflict.
3533 cx.executor().run_until_parked();
3534 buffer.update(cx, |buffer, _| {
3535 assert!(buffer.has_conflict());
3536 });
3537}
3538
3539#[gpui::test]
3540async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3541 init_test(cx);
3542
3543 let fs = FakeFs::new(cx.executor());
3544 fs.insert_tree(
3545 "/dir",
3546 json!({
3547 "file1": "a\nb\nc\n",
3548 "file2": "one\r\ntwo\r\nthree\r\n",
3549 }),
3550 )
3551 .await;
3552
3553 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3554 let buffer1 = project
3555 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3556 .await
3557 .unwrap();
3558 let buffer2 = project
3559 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3560 .await
3561 .unwrap();
3562
3563 buffer1.update(cx, |buffer, _| {
3564 assert_eq!(buffer.text(), "a\nb\nc\n");
3565 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3566 });
3567 buffer2.update(cx, |buffer, _| {
3568 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3569 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3570 });
3571
3572 // Change a file's line endings on disk from unix to windows. The buffer's
3573 // state updates correctly.
3574 fs.save(
3575 "/dir/file1".as_ref(),
3576 &"aaa\nb\nc\n".into(),
3577 LineEnding::Windows,
3578 )
3579 .await
3580 .unwrap();
3581 cx.executor().run_until_parked();
3582 buffer1.update(cx, |buffer, _| {
3583 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3584 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3585 });
3586
3587 // Save a file with windows line endings. The file is written correctly.
3588 buffer2.update(cx, |buffer, cx| {
3589 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3590 });
3591 project
3592 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3593 .await
3594 .unwrap();
3595 assert_eq!(
3596 fs.load("/dir/file2".as_ref()).await.unwrap(),
3597 "one\r\ntwo\r\nthree\r\nfour\r\n",
3598 );
3599}
3600
3601#[gpui::test]
3602async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3603 init_test(cx);
3604
3605 let fs = FakeFs::new(cx.executor());
3606 fs.insert_tree(
3607 "/the-dir",
3608 json!({
3609 "a.rs": "
3610 fn foo(mut v: Vec<usize>) {
3611 for x in &v {
3612 v.push(1);
3613 }
3614 }
3615 "
3616 .unindent(),
3617 }),
3618 )
3619 .await;
3620
3621 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3622 let buffer = project
3623 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3624 .await
3625 .unwrap();
3626
3627 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3628 let message = lsp::PublishDiagnosticsParams {
3629 uri: buffer_uri.clone(),
3630 diagnostics: vec![
3631 lsp::Diagnostic {
3632 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3633 severity: Some(DiagnosticSeverity::WARNING),
3634 message: "error 1".to_string(),
3635 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3636 location: lsp::Location {
3637 uri: buffer_uri.clone(),
3638 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3639 },
3640 message: "error 1 hint 1".to_string(),
3641 }]),
3642 ..Default::default()
3643 },
3644 lsp::Diagnostic {
3645 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3646 severity: Some(DiagnosticSeverity::HINT),
3647 message: "error 1 hint 1".to_string(),
3648 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3649 location: lsp::Location {
3650 uri: buffer_uri.clone(),
3651 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3652 },
3653 message: "original diagnostic".to_string(),
3654 }]),
3655 ..Default::default()
3656 },
3657 lsp::Diagnostic {
3658 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3659 severity: Some(DiagnosticSeverity::ERROR),
3660 message: "error 2".to_string(),
3661 related_information: Some(vec![
3662 lsp::DiagnosticRelatedInformation {
3663 location: lsp::Location {
3664 uri: buffer_uri.clone(),
3665 range: lsp::Range::new(
3666 lsp::Position::new(1, 13),
3667 lsp::Position::new(1, 15),
3668 ),
3669 },
3670 message: "error 2 hint 1".to_string(),
3671 },
3672 lsp::DiagnosticRelatedInformation {
3673 location: lsp::Location {
3674 uri: buffer_uri.clone(),
3675 range: lsp::Range::new(
3676 lsp::Position::new(1, 13),
3677 lsp::Position::new(1, 15),
3678 ),
3679 },
3680 message: "error 2 hint 2".to_string(),
3681 },
3682 ]),
3683 ..Default::default()
3684 },
3685 lsp::Diagnostic {
3686 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3687 severity: Some(DiagnosticSeverity::HINT),
3688 message: "error 2 hint 1".to_string(),
3689 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3690 location: lsp::Location {
3691 uri: buffer_uri.clone(),
3692 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3693 },
3694 message: "original diagnostic".to_string(),
3695 }]),
3696 ..Default::default()
3697 },
3698 lsp::Diagnostic {
3699 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3700 severity: Some(DiagnosticSeverity::HINT),
3701 message: "error 2 hint 2".to_string(),
3702 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3703 location: lsp::Location {
3704 uri: buffer_uri,
3705 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3706 },
3707 message: "original diagnostic".to_string(),
3708 }]),
3709 ..Default::default()
3710 },
3711 ],
3712 version: None,
3713 };
3714
3715 project
3716 .update(cx, |p, cx| {
3717 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3718 })
3719 .unwrap();
3720 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3721
3722 assert_eq!(
3723 buffer
3724 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3725 .collect::<Vec<_>>(),
3726 &[
3727 DiagnosticEntry {
3728 range: Point::new(1, 8)..Point::new(1, 9),
3729 diagnostic: Diagnostic {
3730 severity: DiagnosticSeverity::WARNING,
3731 message: "error 1".to_string(),
3732 group_id: 1,
3733 is_primary: true,
3734 ..Default::default()
3735 }
3736 },
3737 DiagnosticEntry {
3738 range: Point::new(1, 8)..Point::new(1, 9),
3739 diagnostic: Diagnostic {
3740 severity: DiagnosticSeverity::HINT,
3741 message: "error 1 hint 1".to_string(),
3742 group_id: 1,
3743 is_primary: false,
3744 ..Default::default()
3745 }
3746 },
3747 DiagnosticEntry {
3748 range: Point::new(1, 13)..Point::new(1, 15),
3749 diagnostic: Diagnostic {
3750 severity: DiagnosticSeverity::HINT,
3751 message: "error 2 hint 1".to_string(),
3752 group_id: 0,
3753 is_primary: false,
3754 ..Default::default()
3755 }
3756 },
3757 DiagnosticEntry {
3758 range: Point::new(1, 13)..Point::new(1, 15),
3759 diagnostic: Diagnostic {
3760 severity: DiagnosticSeverity::HINT,
3761 message: "error 2 hint 2".to_string(),
3762 group_id: 0,
3763 is_primary: false,
3764 ..Default::default()
3765 }
3766 },
3767 DiagnosticEntry {
3768 range: Point::new(2, 8)..Point::new(2, 17),
3769 diagnostic: Diagnostic {
3770 severity: DiagnosticSeverity::ERROR,
3771 message: "error 2".to_string(),
3772 group_id: 0,
3773 is_primary: true,
3774 ..Default::default()
3775 }
3776 }
3777 ]
3778 );
3779
3780 assert_eq!(
3781 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3782 &[
3783 DiagnosticEntry {
3784 range: Point::new(1, 13)..Point::new(1, 15),
3785 diagnostic: Diagnostic {
3786 severity: DiagnosticSeverity::HINT,
3787 message: "error 2 hint 1".to_string(),
3788 group_id: 0,
3789 is_primary: false,
3790 ..Default::default()
3791 }
3792 },
3793 DiagnosticEntry {
3794 range: Point::new(1, 13)..Point::new(1, 15),
3795 diagnostic: Diagnostic {
3796 severity: DiagnosticSeverity::HINT,
3797 message: "error 2 hint 2".to_string(),
3798 group_id: 0,
3799 is_primary: false,
3800 ..Default::default()
3801 }
3802 },
3803 DiagnosticEntry {
3804 range: Point::new(2, 8)..Point::new(2, 17),
3805 diagnostic: Diagnostic {
3806 severity: DiagnosticSeverity::ERROR,
3807 message: "error 2".to_string(),
3808 group_id: 0,
3809 is_primary: true,
3810 ..Default::default()
3811 }
3812 }
3813 ]
3814 );
3815
3816 assert_eq!(
3817 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3818 &[
3819 DiagnosticEntry {
3820 range: Point::new(1, 8)..Point::new(1, 9),
3821 diagnostic: Diagnostic {
3822 severity: DiagnosticSeverity::WARNING,
3823 message: "error 1".to_string(),
3824 group_id: 1,
3825 is_primary: true,
3826 ..Default::default()
3827 }
3828 },
3829 DiagnosticEntry {
3830 range: Point::new(1, 8)..Point::new(1, 9),
3831 diagnostic: Diagnostic {
3832 severity: DiagnosticSeverity::HINT,
3833 message: "error 1 hint 1".to_string(),
3834 group_id: 1,
3835 is_primary: false,
3836 ..Default::default()
3837 }
3838 },
3839 ]
3840 );
3841}
3842
3843#[gpui::test]
3844async fn test_rename(cx: &mut gpui::TestAppContext) {
3845 // hi
3846 init_test(cx);
3847
3848 let fs = FakeFs::new(cx.executor());
3849 fs.insert_tree(
3850 "/dir",
3851 json!({
3852 "one.rs": "const ONE: usize = 1;",
3853 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3854 }),
3855 )
3856 .await;
3857
3858 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3859
3860 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3861 language_registry.add(rust_lang());
3862 let mut fake_servers = language_registry.register_fake_lsp(
3863 "Rust",
3864 FakeLspAdapter {
3865 capabilities: lsp::ServerCapabilities {
3866 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3867 prepare_provider: Some(true),
3868 work_done_progress_options: Default::default(),
3869 })),
3870 ..Default::default()
3871 },
3872 ..Default::default()
3873 },
3874 );
3875
3876 let buffer = project
3877 .update(cx, |project, cx| {
3878 project.open_local_buffer("/dir/one.rs", cx)
3879 })
3880 .await
3881 .unwrap();
3882
3883 let fake_server = fake_servers.next().await.unwrap();
3884
3885 let response = project.update(cx, |project, cx| {
3886 project.prepare_rename(buffer.clone(), 7, cx)
3887 });
3888 fake_server
3889 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3890 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3891 assert_eq!(params.position, lsp::Position::new(0, 7));
3892 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3893 lsp::Position::new(0, 6),
3894 lsp::Position::new(0, 9),
3895 ))))
3896 })
3897 .next()
3898 .await
3899 .unwrap();
3900 let range = response.await.unwrap().unwrap();
3901 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3902 assert_eq!(range, 6..9);
3903
3904 let response = project.update(cx, |project, cx| {
3905 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3906 });
3907 fake_server
3908 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3909 assert_eq!(
3910 params.text_document_position.text_document.uri.as_str(),
3911 "file:///dir/one.rs"
3912 );
3913 assert_eq!(
3914 params.text_document_position.position,
3915 lsp::Position::new(0, 7)
3916 );
3917 assert_eq!(params.new_name, "THREE");
3918 Ok(Some(lsp::WorkspaceEdit {
3919 changes: Some(
3920 [
3921 (
3922 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3923 vec![lsp::TextEdit::new(
3924 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3925 "THREE".to_string(),
3926 )],
3927 ),
3928 (
3929 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3930 vec![
3931 lsp::TextEdit::new(
3932 lsp::Range::new(
3933 lsp::Position::new(0, 24),
3934 lsp::Position::new(0, 27),
3935 ),
3936 "THREE".to_string(),
3937 ),
3938 lsp::TextEdit::new(
3939 lsp::Range::new(
3940 lsp::Position::new(0, 35),
3941 lsp::Position::new(0, 38),
3942 ),
3943 "THREE".to_string(),
3944 ),
3945 ],
3946 ),
3947 ]
3948 .into_iter()
3949 .collect(),
3950 ),
3951 ..Default::default()
3952 }))
3953 })
3954 .next()
3955 .await
3956 .unwrap();
3957 let mut transaction = response.await.unwrap().0;
3958 assert_eq!(transaction.len(), 2);
3959 assert_eq!(
3960 transaction
3961 .remove_entry(&buffer)
3962 .unwrap()
3963 .0
3964 .update(cx, |buffer, _| buffer.text()),
3965 "const THREE: usize = 1;"
3966 );
3967 assert_eq!(
3968 transaction
3969 .into_keys()
3970 .next()
3971 .unwrap()
3972 .update(cx, |buffer, _| buffer.text()),
3973 "const TWO: usize = one::THREE + one::THREE;"
3974 );
3975}
3976
3977#[gpui::test]
3978async fn test_search(cx: &mut gpui::TestAppContext) {
3979 init_test(cx);
3980
3981 let fs = FakeFs::new(cx.executor());
3982 fs.insert_tree(
3983 "/dir",
3984 json!({
3985 "one.rs": "const ONE: usize = 1;",
3986 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3987 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3988 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3989 }),
3990 )
3991 .await;
3992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3993 assert_eq!(
3994 search(
3995 &project,
3996 SearchQuery::text(
3997 "TWO",
3998 false,
3999 true,
4000 false,
4001 Default::default(),
4002 Default::default(),
4003 None
4004 )
4005 .unwrap(),
4006 cx
4007 )
4008 .await
4009 .unwrap(),
4010 HashMap::from_iter([
4011 ("dir/two.rs".to_string(), vec![6..9]),
4012 ("dir/three.rs".to_string(), vec![37..40])
4013 ])
4014 );
4015
4016 let buffer_4 = project
4017 .update(cx, |project, cx| {
4018 project.open_local_buffer("/dir/four.rs", cx)
4019 })
4020 .await
4021 .unwrap();
4022 buffer_4.update(cx, |buffer, cx| {
4023 let text = "two::TWO";
4024 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4025 });
4026
4027 assert_eq!(
4028 search(
4029 &project,
4030 SearchQuery::text(
4031 "TWO",
4032 false,
4033 true,
4034 false,
4035 Default::default(),
4036 Default::default(),
4037 None,
4038 )
4039 .unwrap(),
4040 cx
4041 )
4042 .await
4043 .unwrap(),
4044 HashMap::from_iter([
4045 ("dir/two.rs".to_string(), vec![6..9]),
4046 ("dir/three.rs".to_string(), vec![37..40]),
4047 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4048 ])
4049 );
4050}
4051
4052#[gpui::test]
4053async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4054 init_test(cx);
4055
4056 let search_query = "file";
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 "/dir",
4061 json!({
4062 "one.rs": r#"// Rust file one"#,
4063 "one.ts": r#"// TypeScript file one"#,
4064 "two.rs": r#"// Rust file two"#,
4065 "two.ts": r#"// TypeScript file two"#,
4066 }),
4067 )
4068 .await;
4069 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4070
4071 assert!(
4072 search(
4073 &project,
4074 SearchQuery::text(
4075 search_query,
4076 false,
4077 true,
4078 false,
4079 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4080 Default::default(),
4081 None
4082 )
4083 .unwrap(),
4084 cx
4085 )
4086 .await
4087 .unwrap()
4088 .is_empty(),
4089 "If no inclusions match, no files should be returned"
4090 );
4091
4092 assert_eq!(
4093 search(
4094 &project,
4095 SearchQuery::text(
4096 search_query,
4097 false,
4098 true,
4099 false,
4100 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4101 Default::default(),
4102 None
4103 )
4104 .unwrap(),
4105 cx
4106 )
4107 .await
4108 .unwrap(),
4109 HashMap::from_iter([
4110 ("dir/one.rs".to_string(), vec![8..12]),
4111 ("dir/two.rs".to_string(), vec![8..12]),
4112 ]),
4113 "Rust only search should give only Rust files"
4114 );
4115
4116 assert_eq!(
4117 search(
4118 &project,
4119 SearchQuery::text(
4120 search_query,
4121 false,
4122 true,
4123 false,
4124
4125 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4126
4127 Default::default(),
4128 None,
4129 ).unwrap(),
4130 cx
4131 )
4132 .await
4133 .unwrap(),
4134 HashMap::from_iter([
4135 ("dir/one.ts".to_string(), vec![14..18]),
4136 ("dir/two.ts".to_string(), vec![14..18]),
4137 ]),
4138 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4139 );
4140
4141 assert_eq!(
4142 search(
4143 &project,
4144 SearchQuery::text(
4145 search_query,
4146 false,
4147 true,
4148 false,
4149
4150 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4151
4152 Default::default(),
4153 None,
4154 ).unwrap(),
4155 cx
4156 )
4157 .await
4158 .unwrap(),
4159 HashMap::from_iter([
4160 ("dir/two.ts".to_string(), vec![14..18]),
4161 ("dir/one.rs".to_string(), vec![8..12]),
4162 ("dir/one.ts".to_string(), vec![14..18]),
4163 ("dir/two.rs".to_string(), vec![8..12]),
4164 ]),
4165 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4166 );
4167}
4168
4169#[gpui::test]
4170async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4171 init_test(cx);
4172
4173 let search_query = "file";
4174
4175 let fs = FakeFs::new(cx.executor());
4176 fs.insert_tree(
4177 "/dir",
4178 json!({
4179 "one.rs": r#"// Rust file one"#,
4180 "one.ts": r#"// TypeScript file one"#,
4181 "two.rs": r#"// Rust file two"#,
4182 "two.ts": r#"// TypeScript file two"#,
4183 }),
4184 )
4185 .await;
4186 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4187
4188 assert_eq!(
4189 search(
4190 &project,
4191 SearchQuery::text(
4192 search_query,
4193 false,
4194 true,
4195 false,
4196 Default::default(),
4197 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4198 None,
4199 )
4200 .unwrap(),
4201 cx
4202 )
4203 .await
4204 .unwrap(),
4205 HashMap::from_iter([
4206 ("dir/one.rs".to_string(), vec![8..12]),
4207 ("dir/one.ts".to_string(), vec![14..18]),
4208 ("dir/two.rs".to_string(), vec![8..12]),
4209 ("dir/two.ts".to_string(), vec![14..18]),
4210 ]),
4211 "If no exclusions match, all files should be returned"
4212 );
4213
4214 assert_eq!(
4215 search(
4216 &project,
4217 SearchQuery::text(
4218 search_query,
4219 false,
4220 true,
4221 false,
4222 Default::default(),
4223 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4224 None,
4225 )
4226 .unwrap(),
4227 cx
4228 )
4229 .await
4230 .unwrap(),
4231 HashMap::from_iter([
4232 ("dir/one.ts".to_string(), vec![14..18]),
4233 ("dir/two.ts".to_string(), vec![14..18]),
4234 ]),
4235 "Rust exclusion search should give only TypeScript files"
4236 );
4237
4238 assert_eq!(
4239 search(
4240 &project,
4241 SearchQuery::text(
4242 search_query,
4243 false,
4244 true,
4245 false,
4246 Default::default(),
4247 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4248 None,
4249 ).unwrap(),
4250 cx
4251 )
4252 .await
4253 .unwrap(),
4254 HashMap::from_iter([
4255 ("dir/one.rs".to_string(), vec![8..12]),
4256 ("dir/two.rs".to_string(), vec![8..12]),
4257 ]),
4258 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4259 );
4260
4261 assert!(
4262 search(
4263 &project,
4264 SearchQuery::text(
4265 search_query,
4266 false,
4267 true,
4268 false,
4269 Default::default(),
4270
4271 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4272 None,
4273
4274 ).unwrap(),
4275 cx
4276 )
4277 .await
4278 .unwrap().is_empty(),
4279 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4280 );
4281}
4282
4283#[gpui::test]
4284async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4285 init_test(cx);
4286
4287 let search_query = "file";
4288
4289 let fs = FakeFs::new(cx.executor());
4290 fs.insert_tree(
4291 "/dir",
4292 json!({
4293 "one.rs": r#"// Rust file one"#,
4294 "one.ts": r#"// TypeScript file one"#,
4295 "two.rs": r#"// Rust file two"#,
4296 "two.ts": r#"// TypeScript file two"#,
4297 }),
4298 )
4299 .await;
4300 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4301
4302 assert!(
4303 search(
4304 &project,
4305 SearchQuery::text(
4306 search_query,
4307 false,
4308 true,
4309 false,
4310 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4311 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4312 None,
4313 )
4314 .unwrap(),
4315 cx
4316 )
4317 .await
4318 .unwrap()
4319 .is_empty(),
4320 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4321 );
4322
4323 assert!(
4324 search(
4325 &project,
4326 SearchQuery::text(
4327 search_query,
4328 false,
4329 true,
4330 false,
4331 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4332 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4333 None,
4334 ).unwrap(),
4335 cx
4336 )
4337 .await
4338 .unwrap()
4339 .is_empty(),
4340 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4341 );
4342
4343 assert!(
4344 search(
4345 &project,
4346 SearchQuery::text(
4347 search_query,
4348 false,
4349 true,
4350 false,
4351 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4352 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4353 None,
4354 )
4355 .unwrap(),
4356 cx
4357 )
4358 .await
4359 .unwrap()
4360 .is_empty(),
4361 "Non-matching inclusions and exclusions should not change that."
4362 );
4363
4364 assert_eq!(
4365 search(
4366 &project,
4367 SearchQuery::text(
4368 search_query,
4369 false,
4370 true,
4371 false,
4372 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4373 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4374 None,
4375 )
4376 .unwrap(),
4377 cx
4378 )
4379 .await
4380 .unwrap(),
4381 HashMap::from_iter([
4382 ("dir/one.ts".to_string(), vec![14..18]),
4383 ("dir/two.ts".to_string(), vec![14..18]),
4384 ]),
4385 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4386 );
4387}
4388
4389#[gpui::test]
4390async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4391 init_test(cx);
4392
4393 let fs = FakeFs::new(cx.executor());
4394 fs.insert_tree(
4395 "/worktree-a",
4396 json!({
4397 "haystack.rs": r#"// NEEDLE"#,
4398 "haystack.ts": r#"// NEEDLE"#,
4399 }),
4400 )
4401 .await;
4402 fs.insert_tree(
4403 "/worktree-b",
4404 json!({
4405 "haystack.rs": r#"// NEEDLE"#,
4406 "haystack.ts": r#"// NEEDLE"#,
4407 }),
4408 )
4409 .await;
4410
4411 let project = Project::test(
4412 fs.clone(),
4413 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4414 cx,
4415 )
4416 .await;
4417
4418 assert_eq!(
4419 search(
4420 &project,
4421 SearchQuery::text(
4422 "NEEDLE",
4423 false,
4424 true,
4425 false,
4426 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4427 Default::default(),
4428 None,
4429 )
4430 .unwrap(),
4431 cx
4432 )
4433 .await
4434 .unwrap(),
4435 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4436 "should only return results from included worktree"
4437 );
4438 assert_eq!(
4439 search(
4440 &project,
4441 SearchQuery::text(
4442 "NEEDLE",
4443 false,
4444 true,
4445 false,
4446 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4447 Default::default(),
4448 None,
4449 )
4450 .unwrap(),
4451 cx
4452 )
4453 .await
4454 .unwrap(),
4455 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4456 "should only return results from included worktree"
4457 );
4458
4459 assert_eq!(
4460 search(
4461 &project,
4462 SearchQuery::text(
4463 "NEEDLE",
4464 false,
4465 true,
4466 false,
4467 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4468 Default::default(),
4469 None,
4470 )
4471 .unwrap(),
4472 cx
4473 )
4474 .await
4475 .unwrap(),
4476 HashMap::from_iter([
4477 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4478 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4479 ]),
4480 "should return results from both worktrees"
4481 );
4482}
4483
4484#[gpui::test]
4485async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4486 init_test(cx);
4487
4488 let fs = FakeFs::new(cx.background_executor.clone());
4489 fs.insert_tree(
4490 "/dir",
4491 json!({
4492 ".git": {},
4493 ".gitignore": "**/target\n/node_modules\n",
4494 "target": {
4495 "index.txt": "index_key:index_value"
4496 },
4497 "node_modules": {
4498 "eslint": {
4499 "index.ts": "const eslint_key = 'eslint value'",
4500 "package.json": r#"{ "some_key": "some value" }"#,
4501 },
4502 "prettier": {
4503 "index.ts": "const prettier_key = 'prettier value'",
4504 "package.json": r#"{ "other_key": "other value" }"#,
4505 },
4506 },
4507 "package.json": r#"{ "main_key": "main value" }"#,
4508 }),
4509 )
4510 .await;
4511 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4512
4513 let query = "key";
4514 assert_eq!(
4515 search(
4516 &project,
4517 SearchQuery::text(
4518 query,
4519 false,
4520 false,
4521 false,
4522 Default::default(),
4523 Default::default(),
4524 None,
4525 )
4526 .unwrap(),
4527 cx
4528 )
4529 .await
4530 .unwrap(),
4531 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4532 "Only one non-ignored file should have the query"
4533 );
4534
4535 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4536 assert_eq!(
4537 search(
4538 &project,
4539 SearchQuery::text(
4540 query,
4541 false,
4542 false,
4543 true,
4544 Default::default(),
4545 Default::default(),
4546 None,
4547 )
4548 .unwrap(),
4549 cx
4550 )
4551 .await
4552 .unwrap(),
4553 HashMap::from_iter([
4554 ("dir/package.json".to_string(), vec![8..11]),
4555 ("dir/target/index.txt".to_string(), vec![6..9]),
4556 (
4557 "dir/node_modules/prettier/package.json".to_string(),
4558 vec![9..12]
4559 ),
4560 (
4561 "dir/node_modules/prettier/index.ts".to_string(),
4562 vec![15..18]
4563 ),
4564 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4565 (
4566 "dir/node_modules/eslint/package.json".to_string(),
4567 vec![8..11]
4568 ),
4569 ]),
4570 "Unrestricted search with ignored directories should find every file with the query"
4571 );
4572
4573 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4574 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4575 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4576 assert_eq!(
4577 search(
4578 &project,
4579 SearchQuery::text(
4580 query,
4581 false,
4582 false,
4583 true,
4584 files_to_include,
4585 files_to_exclude,
4586 None,
4587 )
4588 .unwrap(),
4589 cx
4590 )
4591 .await
4592 .unwrap(),
4593 HashMap::from_iter([(
4594 "dir/node_modules/prettier/package.json".to_string(),
4595 vec![9..12]
4596 )]),
4597 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4598 );
4599}
4600
4601#[gpui::test]
4602async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4603 init_test(cx);
4604
4605 let fs = FakeFs::new(cx.background_executor.clone());
4606 fs.insert_tree(
4607 "/dir",
4608 json!({
4609 ".git": {},
4610 ".gitignore": "**/target\n/node_modules\n",
4611 "aaa.txt": "key:value",
4612 "bbb": {
4613 "index.txt": "index_key:index_value"
4614 },
4615 "node_modules": {
4616 "10 eleven": "key",
4617 "1 two": "key"
4618 },
4619 }),
4620 )
4621 .await;
4622 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4623
4624 let mut search = project.update(cx, |project, cx| {
4625 project.search(
4626 SearchQuery::text(
4627 "key",
4628 false,
4629 false,
4630 true,
4631 Default::default(),
4632 Default::default(),
4633 None,
4634 )
4635 .unwrap(),
4636 cx,
4637 )
4638 });
4639
4640 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4641 match search_result.unwrap() {
4642 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4643 buffer.file().unwrap().path().to_string_lossy().to_string()
4644 }),
4645 _ => panic!("Expected buffer"),
4646 }
4647 }
4648
4649 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4650 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4651 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4652 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4653 assert!(search.next().await.is_none())
4654}
4655
4656#[gpui::test]
4657async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4658 init_test(cx);
4659
4660 let fs = FakeFs::new(cx.executor().clone());
4661 fs.insert_tree(
4662 "/one/two",
4663 json!({
4664 "three": {
4665 "a.txt": "",
4666 "four": {}
4667 },
4668 "c.rs": ""
4669 }),
4670 )
4671 .await;
4672
4673 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4674 project
4675 .update(cx, |project, cx| {
4676 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4677 project.create_entry((id, "b.."), true, cx)
4678 })
4679 .await
4680 .unwrap()
4681 .to_included()
4682 .unwrap();
4683
4684 // Can't create paths outside the project
4685 let result = project
4686 .update(cx, |project, cx| {
4687 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4688 project.create_entry((id, "../../boop"), true, cx)
4689 })
4690 .await;
4691 assert!(result.is_err());
4692
4693 // Can't create paths with '..'
4694 let result = project
4695 .update(cx, |project, cx| {
4696 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4697 project.create_entry((id, "four/../beep"), true, cx)
4698 })
4699 .await;
4700 assert!(result.is_err());
4701
4702 assert_eq!(
4703 fs.paths(true),
4704 vec![
4705 PathBuf::from("/"),
4706 PathBuf::from("/one"),
4707 PathBuf::from("/one/two"),
4708 PathBuf::from("/one/two/c.rs"),
4709 PathBuf::from("/one/two/three"),
4710 PathBuf::from("/one/two/three/a.txt"),
4711 PathBuf::from("/one/two/three/b.."),
4712 PathBuf::from("/one/two/three/four"),
4713 ]
4714 );
4715
4716 // And we cannot open buffers with '..'
4717 let result = project
4718 .update(cx, |project, cx| {
4719 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4720 project.open_buffer((id, "../c.rs"), cx)
4721 })
4722 .await;
4723 assert!(result.is_err())
4724}
4725
4726#[gpui::test]
4727async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4728 init_test(cx);
4729
4730 let fs = FakeFs::new(cx.executor());
4731 fs.insert_tree(
4732 "/dir",
4733 json!({
4734 "a.tsx": "a",
4735 }),
4736 )
4737 .await;
4738
4739 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4740
4741 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4742 language_registry.add(tsx_lang());
4743 let language_server_names = [
4744 "TypeScriptServer",
4745 "TailwindServer",
4746 "ESLintServer",
4747 "NoHoverCapabilitiesServer",
4748 ];
4749 let mut language_servers = [
4750 language_registry.register_fake_lsp(
4751 "tsx",
4752 FakeLspAdapter {
4753 name: language_server_names[0],
4754 capabilities: lsp::ServerCapabilities {
4755 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4756 ..lsp::ServerCapabilities::default()
4757 },
4758 ..FakeLspAdapter::default()
4759 },
4760 ),
4761 language_registry.register_fake_lsp(
4762 "tsx",
4763 FakeLspAdapter {
4764 name: language_server_names[1],
4765 capabilities: lsp::ServerCapabilities {
4766 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4767 ..lsp::ServerCapabilities::default()
4768 },
4769 ..FakeLspAdapter::default()
4770 },
4771 ),
4772 language_registry.register_fake_lsp(
4773 "tsx",
4774 FakeLspAdapter {
4775 name: language_server_names[2],
4776 capabilities: lsp::ServerCapabilities {
4777 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4778 ..lsp::ServerCapabilities::default()
4779 },
4780 ..FakeLspAdapter::default()
4781 },
4782 ),
4783 language_registry.register_fake_lsp(
4784 "tsx",
4785 FakeLspAdapter {
4786 name: language_server_names[3],
4787 capabilities: lsp::ServerCapabilities {
4788 hover_provider: None,
4789 ..lsp::ServerCapabilities::default()
4790 },
4791 ..FakeLspAdapter::default()
4792 },
4793 ),
4794 ];
4795
4796 let buffer = project
4797 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4798 .await
4799 .unwrap();
4800 cx.executor().run_until_parked();
4801
4802 let mut servers_with_hover_requests = HashMap::default();
4803 for i in 0..language_server_names.len() {
4804 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4805 panic!(
4806 "Failed to get language server #{i} with name {}",
4807 &language_server_names[i]
4808 )
4809 });
4810 let new_server_name = new_server.server.name();
4811 assert!(
4812 !servers_with_hover_requests.contains_key(new_server_name),
4813 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4814 );
4815 let new_server_name = new_server_name.to_string();
4816 match new_server_name.as_str() {
4817 "TailwindServer" | "TypeScriptServer" => {
4818 servers_with_hover_requests.insert(
4819 new_server_name.clone(),
4820 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4821 let name = new_server_name.clone();
4822 async move {
4823 Ok(Some(lsp::Hover {
4824 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4825 format!("{name} hover"),
4826 )),
4827 range: None,
4828 }))
4829 }
4830 }),
4831 );
4832 }
4833 "ESLintServer" => {
4834 servers_with_hover_requests.insert(
4835 new_server_name,
4836 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4837 |_, _| async move { Ok(None) },
4838 ),
4839 );
4840 }
4841 "NoHoverCapabilitiesServer" => {
4842 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4843 |_, _| async move {
4844 panic!(
4845 "Should not call for hovers server with no corresponding capabilities"
4846 )
4847 },
4848 );
4849 }
4850 unexpected => panic!("Unexpected server name: {unexpected}"),
4851 }
4852 }
4853
4854 let hover_task = project.update(cx, |project, cx| {
4855 project.hover(&buffer, Point::new(0, 0), cx)
4856 });
4857 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4858 |mut hover_request| async move {
4859 hover_request
4860 .next()
4861 .await
4862 .expect("All hover requests should have been triggered")
4863 },
4864 ))
4865 .await;
4866 assert_eq!(
4867 vec!["TailwindServer hover", "TypeScriptServer hover"],
4868 hover_task
4869 .await
4870 .into_iter()
4871 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4872 .sorted()
4873 .collect::<Vec<_>>(),
4874 "Should receive hover responses from all related servers with hover capabilities"
4875 );
4876}
4877
4878#[gpui::test]
4879async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4880 init_test(cx);
4881
4882 let fs = FakeFs::new(cx.executor());
4883 fs.insert_tree(
4884 "/dir",
4885 json!({
4886 "a.ts": "a",
4887 }),
4888 )
4889 .await;
4890
4891 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4892
4893 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4894 language_registry.add(typescript_lang());
4895 let mut fake_language_servers = language_registry.register_fake_lsp(
4896 "TypeScript",
4897 FakeLspAdapter {
4898 capabilities: lsp::ServerCapabilities {
4899 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4900 ..lsp::ServerCapabilities::default()
4901 },
4902 ..FakeLspAdapter::default()
4903 },
4904 );
4905
4906 let buffer = project
4907 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4908 .await
4909 .unwrap();
4910 cx.executor().run_until_parked();
4911
4912 let fake_server = fake_language_servers
4913 .next()
4914 .await
4915 .expect("failed to get the language server");
4916
4917 let mut request_handled =
4918 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4919 Ok(Some(lsp::Hover {
4920 contents: lsp::HoverContents::Array(vec![
4921 lsp::MarkedString::String("".to_string()),
4922 lsp::MarkedString::String(" ".to_string()),
4923 lsp::MarkedString::String("\n\n\n".to_string()),
4924 ]),
4925 range: None,
4926 }))
4927 });
4928
4929 let hover_task = project.update(cx, |project, cx| {
4930 project.hover(&buffer, Point::new(0, 0), cx)
4931 });
4932 let () = request_handled
4933 .next()
4934 .await
4935 .expect("All hover requests should have been triggered");
4936 assert_eq!(
4937 Vec::<String>::new(),
4938 hover_task
4939 .await
4940 .into_iter()
4941 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4942 .sorted()
4943 .collect::<Vec<_>>(),
4944 "Empty hover parts should be ignored"
4945 );
4946}
4947
4948#[gpui::test]
4949async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4950 init_test(cx);
4951
4952 let fs = FakeFs::new(cx.executor());
4953 fs.insert_tree(
4954 "/dir",
4955 json!({
4956 "a.tsx": "a",
4957 }),
4958 )
4959 .await;
4960
4961 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4962
4963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4964 language_registry.add(tsx_lang());
4965 let language_server_names = [
4966 "TypeScriptServer",
4967 "TailwindServer",
4968 "ESLintServer",
4969 "NoActionsCapabilitiesServer",
4970 ];
4971
4972 let mut language_server_rxs = [
4973 language_registry.register_fake_lsp(
4974 "tsx",
4975 FakeLspAdapter {
4976 name: language_server_names[0],
4977 capabilities: lsp::ServerCapabilities {
4978 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4979 ..lsp::ServerCapabilities::default()
4980 },
4981 ..FakeLspAdapter::default()
4982 },
4983 ),
4984 language_registry.register_fake_lsp(
4985 "tsx",
4986 FakeLspAdapter {
4987 name: language_server_names[1],
4988 capabilities: lsp::ServerCapabilities {
4989 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4990 ..lsp::ServerCapabilities::default()
4991 },
4992 ..FakeLspAdapter::default()
4993 },
4994 ),
4995 language_registry.register_fake_lsp(
4996 "tsx",
4997 FakeLspAdapter {
4998 name: language_server_names[2],
4999 capabilities: lsp::ServerCapabilities {
5000 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5001 ..lsp::ServerCapabilities::default()
5002 },
5003 ..FakeLspAdapter::default()
5004 },
5005 ),
5006 language_registry.register_fake_lsp(
5007 "tsx",
5008 FakeLspAdapter {
5009 name: language_server_names[3],
5010 capabilities: lsp::ServerCapabilities {
5011 code_action_provider: None,
5012 ..lsp::ServerCapabilities::default()
5013 },
5014 ..FakeLspAdapter::default()
5015 },
5016 ),
5017 ];
5018
5019 let buffer = project
5020 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5021 .await
5022 .unwrap();
5023 cx.executor().run_until_parked();
5024
5025 let mut servers_with_actions_requests = HashMap::default();
5026 for i in 0..language_server_names.len() {
5027 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5028 panic!(
5029 "Failed to get language server #{i} with name {}",
5030 &language_server_names[i]
5031 )
5032 });
5033 let new_server_name = new_server.server.name();
5034
5035 assert!(
5036 !servers_with_actions_requests.contains_key(new_server_name),
5037 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5038 );
5039 let new_server_name = new_server_name.to_string();
5040 match new_server_name.as_str() {
5041 "TailwindServer" | "TypeScriptServer" => {
5042 servers_with_actions_requests.insert(
5043 new_server_name.clone(),
5044 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5045 move |_, _| {
5046 let name = new_server_name.clone();
5047 async move {
5048 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5049 lsp::CodeAction {
5050 title: format!("{name} code action"),
5051 ..lsp::CodeAction::default()
5052 },
5053 )]))
5054 }
5055 },
5056 ),
5057 );
5058 }
5059 "ESLintServer" => {
5060 servers_with_actions_requests.insert(
5061 new_server_name,
5062 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5063 |_, _| async move { Ok(None) },
5064 ),
5065 );
5066 }
5067 "NoActionsCapabilitiesServer" => {
5068 let _never_handled = new_server
5069 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5070 panic!(
5071 "Should not call for code actions server with no corresponding capabilities"
5072 )
5073 });
5074 }
5075 unexpected => panic!("Unexpected server name: {unexpected}"),
5076 }
5077 }
5078
5079 let code_actions_task = project.update(cx, |project, cx| {
5080 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5081 });
5082
5083 // cx.run_until_parked();
5084 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5085 |mut code_actions_request| async move {
5086 code_actions_request
5087 .next()
5088 .await
5089 .expect("All code actions requests should have been triggered")
5090 },
5091 ))
5092 .await;
5093 assert_eq!(
5094 vec!["TailwindServer code action", "TypeScriptServer code action"],
5095 code_actions_task
5096 .await
5097 .unwrap()
5098 .into_iter()
5099 .map(|code_action| code_action.lsp_action.title)
5100 .sorted()
5101 .collect::<Vec<_>>(),
5102 "Should receive code actions responses from all related servers with hover capabilities"
5103 );
5104}
5105
5106#[gpui::test]
5107async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5108 init_test(cx);
5109
5110 let fs = FakeFs::new(cx.executor());
5111 fs.insert_tree(
5112 "/dir",
5113 json!({
5114 "a.rs": "let a = 1;",
5115 "b.rs": "let b = 2;",
5116 "c.rs": "let c = 2;",
5117 }),
5118 )
5119 .await;
5120
5121 let project = Project::test(
5122 fs,
5123 [
5124 "/dir/a.rs".as_ref(),
5125 "/dir/b.rs".as_ref(),
5126 "/dir/c.rs".as_ref(),
5127 ],
5128 cx,
5129 )
5130 .await;
5131
5132 // check the initial state and get the worktrees
5133 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5134 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5135 assert_eq!(worktrees.len(), 3);
5136
5137 let worktree_a = worktrees[0].read(cx);
5138 let worktree_b = worktrees[1].read(cx);
5139 let worktree_c = worktrees[2].read(cx);
5140
5141 // check they start in the right order
5142 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5143 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5144 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5145
5146 (
5147 worktrees[0].clone(),
5148 worktrees[1].clone(),
5149 worktrees[2].clone(),
5150 )
5151 });
5152
5153 // move first worktree to after the second
5154 // [a, b, c] -> [b, a, c]
5155 project
5156 .update(cx, |project, cx| {
5157 let first = worktree_a.read(cx);
5158 let second = worktree_b.read(cx);
5159 project.move_worktree(first.id(), second.id(), cx)
5160 })
5161 .expect("moving first after second");
5162
5163 // check the state after moving
5164 project.update(cx, |project, cx| {
5165 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5166 assert_eq!(worktrees.len(), 3);
5167
5168 let first = worktrees[0].read(cx);
5169 let second = worktrees[1].read(cx);
5170 let third = worktrees[2].read(cx);
5171
5172 // check they are now in the right order
5173 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5174 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5175 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5176 });
5177
5178 // move the second worktree to before the first
5179 // [b, a, c] -> [a, b, c]
5180 project
5181 .update(cx, |project, cx| {
5182 let second = worktree_a.read(cx);
5183 let first = worktree_b.read(cx);
5184 project.move_worktree(first.id(), second.id(), cx)
5185 })
5186 .expect("moving second before first");
5187
5188 // check the state after moving
5189 project.update(cx, |project, cx| {
5190 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5191 assert_eq!(worktrees.len(), 3);
5192
5193 let first = worktrees[0].read(cx);
5194 let second = worktrees[1].read(cx);
5195 let third = worktrees[2].read(cx);
5196
5197 // check they are now in the right order
5198 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5199 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5200 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5201 });
5202
5203 // move the second worktree to after the third
5204 // [a, b, c] -> [a, c, b]
5205 project
5206 .update(cx, |project, cx| {
5207 let second = worktree_b.read(cx);
5208 let third = worktree_c.read(cx);
5209 project.move_worktree(second.id(), third.id(), cx)
5210 })
5211 .expect("moving second after third");
5212
5213 // check the state after moving
5214 project.update(cx, |project, cx| {
5215 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5216 assert_eq!(worktrees.len(), 3);
5217
5218 let first = worktrees[0].read(cx);
5219 let second = worktrees[1].read(cx);
5220 let third = worktrees[2].read(cx);
5221
5222 // check they are now in the right order
5223 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5224 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5225 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5226 });
5227
5228 // move the third worktree to before the second
5229 // [a, c, b] -> [a, b, c]
5230 project
5231 .update(cx, |project, cx| {
5232 let third = worktree_c.read(cx);
5233 let second = worktree_b.read(cx);
5234 project.move_worktree(third.id(), second.id(), cx)
5235 })
5236 .expect("moving third before second");
5237
5238 // check the state after moving
5239 project.update(cx, |project, cx| {
5240 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5241 assert_eq!(worktrees.len(), 3);
5242
5243 let first = worktrees[0].read(cx);
5244 let second = worktrees[1].read(cx);
5245 let third = worktrees[2].read(cx);
5246
5247 // check they are now in the right order
5248 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5249 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5250 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5251 });
5252
5253 // move the first worktree to after the third
5254 // [a, b, c] -> [b, c, a]
5255 project
5256 .update(cx, |project, cx| {
5257 let first = worktree_a.read(cx);
5258 let third = worktree_c.read(cx);
5259 project.move_worktree(first.id(), third.id(), cx)
5260 })
5261 .expect("moving first after third");
5262
5263 // check the state after moving
5264 project.update(cx, |project, cx| {
5265 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5266 assert_eq!(worktrees.len(), 3);
5267
5268 let first = worktrees[0].read(cx);
5269 let second = worktrees[1].read(cx);
5270 let third = worktrees[2].read(cx);
5271
5272 // check they are now in the right order
5273 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5274 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5275 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5276 });
5277
5278 // move the third worktree to before the first
5279 // [b, c, a] -> [a, b, c]
5280 project
5281 .update(cx, |project, cx| {
5282 let third = worktree_a.read(cx);
5283 let first = worktree_b.read(cx);
5284 project.move_worktree(third.id(), first.id(), cx)
5285 })
5286 .expect("moving third before first");
5287
5288 // check the state after moving
5289 project.update(cx, |project, cx| {
5290 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5291 assert_eq!(worktrees.len(), 3);
5292
5293 let first = worktrees[0].read(cx);
5294 let second = worktrees[1].read(cx);
5295 let third = worktrees[2].read(cx);
5296
5297 // check they are now in the right order
5298 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5299 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5300 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5301 });
5302}
5303
5304async fn search(
5305 project: &Model<Project>,
5306 query: SearchQuery,
5307 cx: &mut gpui::TestAppContext,
5308) -> Result<HashMap<String, Vec<Range<usize>>>> {
5309 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5310 let mut results = HashMap::default();
5311 while let Some(search_result) = search_rx.next().await {
5312 match search_result {
5313 SearchResult::Buffer { buffer, ranges } => {
5314 results.entry(buffer).or_insert(ranges);
5315 }
5316 SearchResult::LimitReached => {}
5317 }
5318 }
5319 Ok(results
5320 .into_iter()
5321 .map(|(buffer, ranges)| {
5322 buffer.update(cx, |buffer, cx| {
5323 let path = buffer
5324 .file()
5325 .unwrap()
5326 .full_path(cx)
5327 .to_string_lossy()
5328 .to_string();
5329 let ranges = ranges
5330 .into_iter()
5331 .map(|range| range.to_offset(buffer))
5332 .collect::<Vec<_>>();
5333 (path, ranges)
5334 })
5335 })
5336 .collect())
5337}
5338
5339pub fn init_test(cx: &mut gpui::TestAppContext) {
5340 if std::env::var("RUST_LOG").is_ok() {
5341 env_logger::try_init().ok();
5342 }
5343
5344 cx.update(|cx| {
5345 let settings_store = SettingsStore::test(cx);
5346 cx.set_global(settings_store);
5347 release_channel::init(SemanticVersion::default(), cx);
5348 language::init(cx);
5349 Project::init_settings(cx);
5350 });
5351}
5352
5353fn json_lang() -> Arc<Language> {
5354 Arc::new(Language::new(
5355 LanguageConfig {
5356 name: "JSON".into(),
5357 matcher: LanguageMatcher {
5358 path_suffixes: vec!["json".to_string()],
5359 ..Default::default()
5360 },
5361 ..Default::default()
5362 },
5363 None,
5364 ))
5365}
5366
5367fn js_lang() -> Arc<Language> {
5368 Arc::new(Language::new(
5369 LanguageConfig {
5370 name: "JavaScript".into(),
5371 matcher: LanguageMatcher {
5372 path_suffixes: vec!["js".to_string()],
5373 ..Default::default()
5374 },
5375 ..Default::default()
5376 },
5377 None,
5378 ))
5379}
5380
5381fn rust_lang() -> Arc<Language> {
5382 Arc::new(Language::new(
5383 LanguageConfig {
5384 name: "Rust".into(),
5385 matcher: LanguageMatcher {
5386 path_suffixes: vec!["rs".to_string()],
5387 ..Default::default()
5388 },
5389 ..Default::default()
5390 },
5391 Some(tree_sitter_rust::LANGUAGE.into()),
5392 ))
5393}
5394
5395fn typescript_lang() -> Arc<Language> {
5396 Arc::new(Language::new(
5397 LanguageConfig {
5398 name: "TypeScript".into(),
5399 matcher: LanguageMatcher {
5400 path_suffixes: vec!["ts".to_string()],
5401 ..Default::default()
5402 },
5403 ..Default::default()
5404 },
5405 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5406 ))
5407}
5408
5409fn tsx_lang() -> Arc<Language> {
5410 Arc::new(Language::new(
5411 LanguageConfig {
5412 name: "tsx".into(),
5413 matcher: LanguageMatcher {
5414 path_suffixes: vec!["tsx".to_string()],
5415 ..Default::default()
5416 },
5417 ..Default::default()
5418 },
5419 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5420 ))
5421}
5422
5423fn get_all_tasks(
5424 project: &Model<Project>,
5425 worktree_id: Option<WorktreeId>,
5426 task_context: &TaskContext,
5427 cx: &mut AppContext,
5428) -> Vec<(TaskSourceKind, ResolvedTask)> {
5429 let (mut old, new) = project.update(cx, |project, cx| {
5430 project
5431 .task_store
5432 .read(cx)
5433 .task_inventory()
5434 .unwrap()
5435 .read(cx)
5436 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5437 });
5438 old.extend(new);
5439 old
5440}