1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let fs = FakeFs::new(cx.executor());
99 fs.insert_tree(
100 "/the-root",
101 json!({
102 ".zed": {
103 "settings.json": r#"{ "tab_size": 8 }"#,
104 "tasks.json": r#"[{
105 "label": "cargo check",
106 "command": "cargo",
107 "args": ["check", "--all"]
108 },]"#,
109 },
110 "a": {
111 "a.rs": "fn a() {\n A\n}"
112 },
113 "b": {
114 ".zed": {
115 "settings.json": r#"{ "tab_size": 2 }"#,
116 "tasks.json": r#"[{
117 "label": "cargo check",
118 "command": "cargo",
119 "args": ["check"]
120 },]"#,
121 },
122 "b.rs": "fn b() {\n B\n}"
123 }
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
130 let task_context = TaskContext::default();
131
132 cx.executor().run_until_parked();
133 let worktree_id = cx.update(|cx| {
134 project.update(cx, |project, cx| {
135 project.worktrees(cx).next().unwrap().read(cx).id()
136 })
137 });
138 let global_task_source_kind = TaskSourceKind::Worktree {
139 id: worktree_id,
140 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
141 id_base: "local_tasks_for_worktree".into(),
142 };
143
144 let all_tasks = cx
145 .update(|cx| {
146 let tree = worktree.read(cx);
147
148 let settings_a = language_settings(
149 None,
150 Some(
151 &(File::for_entry(
152 tree.entry_for_path("a/a.rs").unwrap().clone(),
153 worktree.clone(),
154 ) as _),
155 ),
156 cx,
157 );
158 let settings_b = language_settings(
159 None,
160 Some(
161 &(File::for_entry(
162 tree.entry_for_path("b/b.rs").unwrap().clone(),
163 worktree.clone(),
164 ) as _),
165 ),
166 cx,
167 );
168
169 assert_eq!(settings_a.tab_size.get(), 8);
170 assert_eq!(settings_b.tab_size.get(), 2);
171
172 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
173 })
174 .await
175 .into_iter()
176 .map(|(source_kind, task)| {
177 let resolved = task.resolved.unwrap();
178 (
179 source_kind,
180 task.resolved_label,
181 resolved.args,
182 resolved.env,
183 )
184 })
185 .collect::<Vec<_>>();
186 assert_eq!(
187 all_tasks,
188 vec![
189 (
190 global_task_source_kind.clone(),
191 "cargo check".to_string(),
192 vec!["check".to_string(), "--all".to_string()],
193 HashMap::default(),
194 ),
195 (
196 TaskSourceKind::Worktree {
197 id: worktree_id,
198 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
199 id_base: "local_tasks_for_worktree".into(),
200 },
201 "cargo check".to_string(),
202 vec!["check".to_string()],
203 HashMap::default(),
204 ),
205 ]
206 );
207
208 let (_, resolved_task) = cx
209 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
210 .await
211 .into_iter()
212 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
213 .expect("should have one global task");
214 project.update(cx, |project, cx| {
215 project.task_inventory().update(cx, |inventory, _| {
216 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
217 });
218 });
219
220 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
221 label: "cargo check".to_string(),
222 command: "cargo".to_string(),
223 args: vec![
224 "check".to_string(),
225 "--all".to_string(),
226 "--all-targets".to_string(),
227 ],
228 env: HashMap::from_iter(Some((
229 "RUSTFLAGS".to_string(),
230 "-Zunstable-options".to_string(),
231 ))),
232 ..TaskTemplate::default()
233 }]))
234 .unwrap();
235 let (tx, rx) = futures::channel::mpsc::unbounded();
236 cx.update(|cx| {
237 project.update(cx, |project, cx| {
238 project.task_inventory().update(cx, |inventory, cx| {
239 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
240 inventory.add_source(
241 global_task_source_kind.clone(),
242 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
243 cx,
244 );
245 });
246 })
247 });
248 tx.unbounded_send(tasks).unwrap();
249
250 cx.run_until_parked();
251 let all_tasks = cx
252 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
253 .await
254 .into_iter()
255 .map(|(source_kind, task)| {
256 let resolved = task.resolved.unwrap();
257 (
258 source_kind,
259 task.resolved_label,
260 resolved.args,
261 resolved.env,
262 )
263 })
264 .collect::<Vec<_>>();
265 assert_eq!(
266 all_tasks,
267 vec![
268 (
269 TaskSourceKind::Worktree {
270 id: worktree_id,
271 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
272 id_base: "local_tasks_for_worktree".into(),
273 },
274 "cargo check".to_string(),
275 vec![
276 "check".to_string(),
277 "--all".to_string(),
278 "--all-targets".to_string()
279 ],
280 HashMap::from_iter(Some((
281 "RUSTFLAGS".to_string(),
282 "-Zunstable-options".to_string()
283 ))),
284 ),
285 (
286 TaskSourceKind::Worktree {
287 id: worktree_id,
288 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
289 id_base: "local_tasks_for_worktree".into(),
290 },
291 "cargo check".to_string(),
292 vec!["check".to_string()],
293 HashMap::default(),
294 ),
295 ]
296 );
297}
298
299#[gpui::test]
300async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
301 init_test(cx);
302
303 let fs = FakeFs::new(cx.executor());
304 fs.insert_tree(
305 "/the-root",
306 json!({
307 "test.rs": "const A: i32 = 1;",
308 "test2.rs": "",
309 "Cargo.toml": "a = 1",
310 "package.json": "{\"a\": 1}",
311 }),
312 )
313 .await;
314
315 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
316 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
317
318 let mut fake_rust_servers = language_registry.register_fake_lsp(
319 "Rust",
320 FakeLspAdapter {
321 name: "the-rust-language-server",
322 capabilities: lsp::ServerCapabilities {
323 completion_provider: Some(lsp::CompletionOptions {
324 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
325 ..Default::default()
326 }),
327 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
328 lsp::TextDocumentSyncOptions {
329 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
330 ..Default::default()
331 },
332 )),
333 ..Default::default()
334 },
335 ..Default::default()
336 },
337 );
338 let mut fake_json_servers = language_registry.register_fake_lsp(
339 "JSON",
340 FakeLspAdapter {
341 name: "the-json-language-server",
342 capabilities: lsp::ServerCapabilities {
343 completion_provider: Some(lsp::CompletionOptions {
344 trigger_characters: Some(vec![":".to_string()]),
345 ..Default::default()
346 }),
347 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
348 lsp::TextDocumentSyncOptions {
349 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
350 ..Default::default()
351 },
352 )),
353 ..Default::default()
354 },
355 ..Default::default()
356 },
357 );
358
359 // Open a buffer without an associated language server.
360 let toml_buffer = project
361 .update(cx, |project, cx| {
362 project.open_local_buffer("/the-root/Cargo.toml", cx)
363 })
364 .await
365 .unwrap();
366
367 // Open a buffer with an associated language server before the language for it has been loaded.
368 let rust_buffer = project
369 .update(cx, |project, cx| {
370 project.open_local_buffer("/the-root/test.rs", cx)
371 })
372 .await
373 .unwrap();
374 rust_buffer.update(cx, |buffer, _| {
375 assert_eq!(buffer.language().map(|l| l.name()), None);
376 });
377
378 // Now we add the languages to the project, and ensure they get assigned to all
379 // the relevant open buffers.
380 language_registry.add(json_lang());
381 language_registry.add(rust_lang());
382 cx.executor().run_until_parked();
383 rust_buffer.update(cx, |buffer, _| {
384 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
385 });
386
387 // A server is started up, and it is notified about Rust files.
388 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
389 fake_rust_server
390 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
391 registrations: vec![lsp::Registration {
392 id: Default::default(),
393 method: "workspace/didChangeWatchedFiles".to_string(),
394 register_options: serde_json::to_value(
395 lsp::DidChangeWatchedFilesRegistrationOptions {
396 watchers: vec![
397 lsp::FileSystemWatcher {
398 glob_pattern: lsp::GlobPattern::String(
399 "/the-root/Cargo.toml".to_string(),
400 ),
401 kind: None,
402 },
403 lsp::FileSystemWatcher {
404 glob_pattern: lsp::GlobPattern::String(
405 "/the-root/*.rs".to_string(),
406 ),
407 kind: None,
408 },
409 ],
410 },
411 )
412 .ok(),
413 }],
414 })
415 .await
416 .unwrap();
417 assert_eq!(
418 fake_rust_server
419 .receive_notification::<lsp::notification::DidOpenTextDocument>()
420 .await
421 .text_document,
422 lsp::TextDocumentItem {
423 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
424 version: 0,
425 text: "const A: i32 = 1;".to_string(),
426 language_id: "rust".to_string(),
427 }
428 );
429
430 // The buffer is configured based on the language server's capabilities.
431 rust_buffer.update(cx, |buffer, _| {
432 assert_eq!(
433 buffer.completion_triggers(),
434 &[".".to_string(), "::".to_string()]
435 );
436 });
437 toml_buffer.update(cx, |buffer, _| {
438 assert!(buffer.completion_triggers().is_empty());
439 });
440
441 // Edit a buffer. The changes are reported to the language server.
442 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
443 assert_eq!(
444 fake_rust_server
445 .receive_notification::<lsp::notification::DidChangeTextDocument>()
446 .await
447 .text_document,
448 lsp::VersionedTextDocumentIdentifier::new(
449 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
450 1
451 )
452 );
453
454 // Open a third buffer with a different associated language server.
455 let json_buffer = project
456 .update(cx, |project, cx| {
457 project.open_local_buffer("/the-root/package.json", cx)
458 })
459 .await
460 .unwrap();
461
462 // A json language server is started up and is only notified about the json buffer.
463 let mut fake_json_server = fake_json_servers.next().await.unwrap();
464 fake_json_server
465 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
466 registrations: vec![lsp::Registration {
467 id: Default::default(),
468 method: "workspace/didChangeWatchedFiles".to_string(),
469 register_options: serde_json::to_value(
470 lsp::DidChangeWatchedFilesRegistrationOptions {
471 watchers: vec![lsp::FileSystemWatcher {
472 glob_pattern: lsp::GlobPattern::String("/the-root/*.json".to_string()),
473 kind: None,
474 }],
475 },
476 )
477 .ok(),
478 }],
479 })
480 .await
481 .unwrap();
482 assert_eq!(
483 fake_json_server
484 .receive_notification::<lsp::notification::DidOpenTextDocument>()
485 .await
486 .text_document,
487 lsp::TextDocumentItem {
488 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
489 version: 0,
490 text: "{\"a\": 1}".to_string(),
491 language_id: "json".to_string(),
492 }
493 );
494
495 // This buffer is configured based on the second language server's
496 // capabilities.
497 json_buffer.update(cx, |buffer, _| {
498 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
499 });
500
501 // When opening another buffer whose language server is already running,
502 // it is also configured based on the existing language server's capabilities.
503 let rust_buffer2 = project
504 .update(cx, |project, cx| {
505 project.open_local_buffer("/the-root/test2.rs", cx)
506 })
507 .await
508 .unwrap();
509 rust_buffer2.update(cx, |buffer, _| {
510 assert_eq!(
511 buffer.completion_triggers(),
512 &[".".to_string(), "::".to_string()]
513 );
514 });
515
516 // Changes are reported only to servers matching the buffer's language.
517 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
518 rust_buffer2.update(cx, |buffer, cx| {
519 buffer.edit([(0..0, "let x = 1;")], None, cx)
520 });
521 assert_eq!(
522 fake_rust_server
523 .receive_notification::<lsp::notification::DidChangeTextDocument>()
524 .await
525 .text_document,
526 lsp::VersionedTextDocumentIdentifier::new(
527 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
528 1
529 )
530 );
531
532 // Save notifications are reported only to servers that signed up for a given extension.
533 project
534 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
535 .await
536 .unwrap();
537 assert_eq!(
538 fake_rust_server
539 .receive_notification::<lsp::notification::DidSaveTextDocument>()
540 .await
541 .text_document,
542 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
543 );
544
545 // Renames are reported only to servers matching the buffer's language.
546 fs.rename(
547 Path::new("/the-root/test2.rs"),
548 Path::new("/the-root/test3.rs"),
549 Default::default(),
550 )
551 .await
552 .unwrap();
553 assert_eq!(
554 fake_rust_server
555 .receive_notification::<lsp::notification::DidCloseTextDocument>()
556 .await
557 .text_document,
558 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
559 );
560 assert_eq!(
561 fake_rust_server
562 .receive_notification::<lsp::notification::DidOpenTextDocument>()
563 .await
564 .text_document,
565 lsp::TextDocumentItem {
566 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
567 version: 0,
568 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
569 language_id: "rust".to_string(),
570 },
571 );
572
573 rust_buffer2.update(cx, |buffer, cx| {
574 buffer.update_diagnostics(
575 LanguageServerId(0),
576 DiagnosticSet::from_sorted_entries(
577 vec![DiagnosticEntry {
578 diagnostic: Default::default(),
579 range: Anchor::MIN..Anchor::MAX,
580 }],
581 &buffer.snapshot(),
582 ),
583 cx,
584 );
585 assert_eq!(
586 buffer
587 .snapshot()
588 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
589 .count(),
590 1
591 );
592 });
593
594 // When the rename changes the extension of the file, the buffer gets closed on the old
595 // language server and gets opened on the new one.
596 fs.rename(
597 Path::new("/the-root/test3.rs"),
598 Path::new("/the-root/test3.json"),
599 Default::default(),
600 )
601 .await
602 .unwrap();
603 assert_eq!(
604 fake_rust_server
605 .receive_notification::<lsp::notification::DidCloseTextDocument>()
606 .await
607 .text_document,
608 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
609 );
610 assert_eq!(
611 fake_json_server
612 .receive_notification::<lsp::notification::DidOpenTextDocument>()
613 .await
614 .text_document,
615 lsp::TextDocumentItem {
616 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
617 version: 0,
618 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
619 language_id: "json".to_string(),
620 },
621 );
622
623 // We clear the diagnostics, since the language has changed.
624 rust_buffer2.update(cx, |buffer, _| {
625 assert_eq!(
626 buffer
627 .snapshot()
628 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
629 .count(),
630 0
631 );
632 });
633
634 // The renamed file's version resets after changing language server.
635 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
636 assert_eq!(
637 fake_json_server
638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
639 .await
640 .text_document,
641 lsp::VersionedTextDocumentIdentifier::new(
642 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
643 1
644 )
645 );
646
647 // Restart language servers
648 project.update(cx, |project, cx| {
649 project.restart_language_servers_for_buffers(
650 vec![rust_buffer.clone(), json_buffer.clone()],
651 cx,
652 );
653 });
654
655 let mut rust_shutdown_requests = fake_rust_server
656 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
657 let mut json_shutdown_requests = fake_json_server
658 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
659 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
660
661 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
662 let mut fake_json_server = fake_json_servers.next().await.unwrap();
663
664 // Ensure rust document is reopened in new rust language server
665 assert_eq!(
666 fake_rust_server
667 .receive_notification::<lsp::notification::DidOpenTextDocument>()
668 .await
669 .text_document,
670 lsp::TextDocumentItem {
671 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
672 version: 0,
673 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
674 language_id: "rust".to_string(),
675 }
676 );
677
678 // Ensure json documents are reopened in new json language server
679 assert_set_eq!(
680 [
681 fake_json_server
682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
683 .await
684 .text_document,
685 fake_json_server
686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
687 .await
688 .text_document,
689 ],
690 [
691 lsp::TextDocumentItem {
692 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
693 version: 0,
694 text: json_buffer.update(cx, |buffer, _| buffer.text()),
695 language_id: "json".to_string(),
696 },
697 lsp::TextDocumentItem {
698 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
699 version: 0,
700 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
701 language_id: "json".to_string(),
702 }
703 ]
704 );
705
706 // Close notifications are reported only to servers matching the buffer's language.
707 cx.update(|_| drop(json_buffer));
708 let close_message = lsp::DidCloseTextDocumentParams {
709 text_document: lsp::TextDocumentIdentifier::new(
710 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
711 ),
712 };
713 assert_eq!(
714 fake_json_server
715 .receive_notification::<lsp::notification::DidCloseTextDocument>()
716 .await,
717 close_message,
718 );
719}
720
721#[gpui::test]
722async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
723 init_test(cx);
724
725 let fs = FakeFs::new(cx.executor());
726 fs.insert_tree(
727 "/the-root",
728 json!({
729 ".gitignore": "target\n",
730 "src": {
731 "a.rs": "",
732 "b.rs": "",
733 },
734 "target": {
735 "x": {
736 "out": {
737 "x.rs": ""
738 }
739 },
740 "y": {
741 "out": {
742 "y.rs": "",
743 }
744 },
745 "z": {
746 "out": {
747 "z.rs": ""
748 }
749 }
750 }
751 }),
752 )
753 .await;
754
755 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
756 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
757 language_registry.add(rust_lang());
758 let mut fake_servers = language_registry.register_fake_lsp(
759 "Rust",
760 FakeLspAdapter {
761 name: "the-language-server",
762 ..Default::default()
763 },
764 );
765
766 cx.executor().run_until_parked();
767
768 // Start the language server by opening a buffer with a compatible file extension.
769 let _buffer = project
770 .update(cx, |project, cx| {
771 project.open_local_buffer("/the-root/src/a.rs", cx)
772 })
773 .await
774 .unwrap();
775
776 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
777 project.update(cx, |project, cx| {
778 let worktree = project.worktrees(cx).next().unwrap();
779 assert_eq!(
780 worktree
781 .read(cx)
782 .snapshot()
783 .entries(true, 0)
784 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
785 .collect::<Vec<_>>(),
786 &[
787 (Path::new(""), false),
788 (Path::new(".gitignore"), false),
789 (Path::new("src"), false),
790 (Path::new("src/a.rs"), false),
791 (Path::new("src/b.rs"), false),
792 (Path::new("target"), true),
793 ]
794 );
795 });
796
797 let prev_read_dir_count = fs.read_dir_call_count();
798
799 // Keep track of the FS events reported to the language server.
800 let fake_server = fake_servers.next().await.unwrap();
801 let file_changes = Arc::new(Mutex::new(Vec::new()));
802 fake_server
803 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
804 registrations: vec![lsp::Registration {
805 id: Default::default(),
806 method: "workspace/didChangeWatchedFiles".to_string(),
807 register_options: serde_json::to_value(
808 lsp::DidChangeWatchedFilesRegistrationOptions {
809 watchers: vec![
810 lsp::FileSystemWatcher {
811 glob_pattern: lsp::GlobPattern::String(
812 "/the-root/Cargo.toml".to_string(),
813 ),
814 kind: None,
815 },
816 lsp::FileSystemWatcher {
817 glob_pattern: lsp::GlobPattern::String(
818 "/the-root/src/*.{rs,c}".to_string(),
819 ),
820 kind: None,
821 },
822 lsp::FileSystemWatcher {
823 glob_pattern: lsp::GlobPattern::String(
824 "/the-root/target/y/**/*.rs".to_string(),
825 ),
826 kind: None,
827 },
828 ],
829 },
830 )
831 .ok(),
832 }],
833 })
834 .await
835 .unwrap();
836 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
837 let file_changes = file_changes.clone();
838 move |params, _| {
839 let mut file_changes = file_changes.lock();
840 file_changes.extend(params.changes);
841 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
842 }
843 });
844
845 cx.executor().run_until_parked();
846 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
847 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
848
849 // Now the language server has asked us to watch an ignored directory path,
850 // so we recursively load it.
851 project.update(cx, |project, cx| {
852 let worktree = project.worktrees(cx).next().unwrap();
853 assert_eq!(
854 worktree
855 .read(cx)
856 .snapshot()
857 .entries(true, 0)
858 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
859 .collect::<Vec<_>>(),
860 &[
861 (Path::new(""), false),
862 (Path::new(".gitignore"), false),
863 (Path::new("src"), false),
864 (Path::new("src/a.rs"), false),
865 (Path::new("src/b.rs"), false),
866 (Path::new("target"), true),
867 (Path::new("target/x"), true),
868 (Path::new("target/y"), true),
869 (Path::new("target/y/out"), true),
870 (Path::new("target/y/out/y.rs"), true),
871 (Path::new("target/z"), true),
872 ]
873 );
874 });
875
876 // Perform some file system mutations, two of which match the watched patterns,
877 // and one of which does not.
878 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
879 .await
880 .unwrap();
881 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
882 .await
883 .unwrap();
884 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
885 .await
886 .unwrap();
887 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
888 .await
889 .unwrap();
890 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
891 .await
892 .unwrap();
893
894 // The language server receives events for the FS mutations that match its watch patterns.
895 cx.executor().run_until_parked();
896 assert_eq!(
897 &*file_changes.lock(),
898 &[
899 lsp::FileEvent {
900 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
901 typ: lsp::FileChangeType::DELETED,
902 },
903 lsp::FileEvent {
904 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
905 typ: lsp::FileChangeType::CREATED,
906 },
907 lsp::FileEvent {
908 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
909 typ: lsp::FileChangeType::CREATED,
910 },
911 ]
912 );
913}
914
915#[gpui::test]
916async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
917 init_test(cx);
918
919 let fs = FakeFs::new(cx.executor());
920 fs.insert_tree(
921 "/dir",
922 json!({
923 "a.rs": "let a = 1;",
924 "b.rs": "let b = 2;"
925 }),
926 )
927 .await;
928
929 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
930
931 let buffer_a = project
932 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
933 .await
934 .unwrap();
935 let buffer_b = project
936 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
937 .await
938 .unwrap();
939
940 project.update(cx, |project, cx| {
941 project
942 .update_diagnostics(
943 LanguageServerId(0),
944 lsp::PublishDiagnosticsParams {
945 uri: Url::from_file_path("/dir/a.rs").unwrap(),
946 version: None,
947 diagnostics: vec![lsp::Diagnostic {
948 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
949 severity: Some(lsp::DiagnosticSeverity::ERROR),
950 message: "error 1".to_string(),
951 ..Default::default()
952 }],
953 },
954 &[],
955 cx,
956 )
957 .unwrap();
958 project
959 .update_diagnostics(
960 LanguageServerId(0),
961 lsp::PublishDiagnosticsParams {
962 uri: Url::from_file_path("/dir/b.rs").unwrap(),
963 version: None,
964 diagnostics: vec![lsp::Diagnostic {
965 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
966 severity: Some(DiagnosticSeverity::WARNING),
967 message: "error 2".to_string(),
968 ..Default::default()
969 }],
970 },
971 &[],
972 cx,
973 )
974 .unwrap();
975 });
976
977 buffer_a.update(cx, |buffer, _| {
978 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
979 assert_eq!(
980 chunks
981 .iter()
982 .map(|(s, d)| (s.as_str(), *d))
983 .collect::<Vec<_>>(),
984 &[
985 ("let ", None),
986 ("a", Some(DiagnosticSeverity::ERROR)),
987 (" = 1;", None),
988 ]
989 );
990 });
991 buffer_b.update(cx, |buffer, _| {
992 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
993 assert_eq!(
994 chunks
995 .iter()
996 .map(|(s, d)| (s.as_str(), *d))
997 .collect::<Vec<_>>(),
998 &[
999 ("let ", None),
1000 ("b", Some(DiagnosticSeverity::WARNING)),
1001 (" = 2;", None),
1002 ]
1003 );
1004 });
1005}
1006
1007#[gpui::test]
1008async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1009 init_test(cx);
1010
1011 let fs = FakeFs::new(cx.executor());
1012 fs.insert_tree(
1013 "/root",
1014 json!({
1015 "dir": {
1016 ".git": {
1017 "HEAD": "ref: refs/heads/main",
1018 },
1019 ".gitignore": "b.rs",
1020 "a.rs": "let a = 1;",
1021 "b.rs": "let b = 2;",
1022 },
1023 "other.rs": "let b = c;"
1024 }),
1025 )
1026 .await;
1027
1028 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1029 let (worktree, _) = project
1030 .update(cx, |project, cx| {
1031 project.find_or_create_worktree("/root/dir", true, cx)
1032 })
1033 .await
1034 .unwrap();
1035 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1036
1037 let (worktree, _) = project
1038 .update(cx, |project, cx| {
1039 project.find_or_create_worktree("/root/other.rs", false, cx)
1040 })
1041 .await
1042 .unwrap();
1043 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1044
1045 let server_id = LanguageServerId(0);
1046 project.update(cx, |project, cx| {
1047 project
1048 .update_diagnostics(
1049 server_id,
1050 lsp::PublishDiagnosticsParams {
1051 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1052 version: None,
1053 diagnostics: vec![lsp::Diagnostic {
1054 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1055 severity: Some(lsp::DiagnosticSeverity::ERROR),
1056 message: "unused variable 'b'".to_string(),
1057 ..Default::default()
1058 }],
1059 },
1060 &[],
1061 cx,
1062 )
1063 .unwrap();
1064 project
1065 .update_diagnostics(
1066 server_id,
1067 lsp::PublishDiagnosticsParams {
1068 uri: Url::from_file_path("/root/other.rs").unwrap(),
1069 version: None,
1070 diagnostics: vec![lsp::Diagnostic {
1071 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1072 severity: Some(lsp::DiagnosticSeverity::ERROR),
1073 message: "unknown variable 'c'".to_string(),
1074 ..Default::default()
1075 }],
1076 },
1077 &[],
1078 cx,
1079 )
1080 .unwrap();
1081 });
1082
1083 let main_ignored_buffer = project
1084 .update(cx, |project, cx| {
1085 project.open_buffer((main_worktree_id, "b.rs"), cx)
1086 })
1087 .await
1088 .unwrap();
1089 main_ignored_buffer.update(cx, |buffer, _| {
1090 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1091 assert_eq!(
1092 chunks
1093 .iter()
1094 .map(|(s, d)| (s.as_str(), *d))
1095 .collect::<Vec<_>>(),
1096 &[
1097 ("let ", None),
1098 ("b", Some(DiagnosticSeverity::ERROR)),
1099 (" = 2;", None),
1100 ],
1101 "Gigitnored buffers should still get in-buffer diagnostics",
1102 );
1103 });
1104 let other_buffer = project
1105 .update(cx, |project, cx| {
1106 project.open_buffer((other_worktree_id, ""), cx)
1107 })
1108 .await
1109 .unwrap();
1110 other_buffer.update(cx, |buffer, _| {
1111 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1112 assert_eq!(
1113 chunks
1114 .iter()
1115 .map(|(s, d)| (s.as_str(), *d))
1116 .collect::<Vec<_>>(),
1117 &[
1118 ("let b = ", None),
1119 ("c", Some(DiagnosticSeverity::ERROR)),
1120 (";", None),
1121 ],
1122 "Buffers from hidden projects should still get in-buffer diagnostics"
1123 );
1124 });
1125
1126 project.update(cx, |project, cx| {
1127 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1128 assert_eq!(
1129 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1130 vec![(
1131 ProjectPath {
1132 worktree_id: main_worktree_id,
1133 path: Arc::from(Path::new("b.rs")),
1134 },
1135 server_id,
1136 DiagnosticSummary {
1137 error_count: 1,
1138 warning_count: 0,
1139 }
1140 )]
1141 );
1142 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1143 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1144 });
1145}
1146
1147#[gpui::test]
1148async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1149 init_test(cx);
1150
1151 let progress_token = "the-progress-token";
1152
1153 let fs = FakeFs::new(cx.executor());
1154 fs.insert_tree(
1155 "/dir",
1156 json!({
1157 "a.rs": "fn a() { A }",
1158 "b.rs": "const y: i32 = 1",
1159 }),
1160 )
1161 .await;
1162
1163 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1164 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1165
1166 language_registry.add(rust_lang());
1167 let mut fake_servers = language_registry.register_fake_lsp(
1168 "Rust",
1169 FakeLspAdapter {
1170 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1171 disk_based_diagnostics_sources: vec!["disk".into()],
1172 ..Default::default()
1173 },
1174 );
1175
1176 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1177
1178 // Cause worktree to start the fake language server
1179 let _buffer = project
1180 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1181 .await
1182 .unwrap();
1183
1184 let mut events = cx.events(&project);
1185
1186 let fake_server = fake_servers.next().await.unwrap();
1187 assert_eq!(
1188 events.next().await.unwrap(),
1189 Event::LanguageServerAdded(LanguageServerId(0)),
1190 );
1191
1192 fake_server
1193 .start_progress(format!("{}/0", progress_token))
1194 .await;
1195 assert_eq!(
1196 events.next().await.unwrap(),
1197 Event::DiskBasedDiagnosticsStarted {
1198 language_server_id: LanguageServerId(0),
1199 }
1200 );
1201
1202 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1203 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1204 version: None,
1205 diagnostics: vec![lsp::Diagnostic {
1206 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1207 severity: Some(lsp::DiagnosticSeverity::ERROR),
1208 message: "undefined variable 'A'".to_string(),
1209 ..Default::default()
1210 }],
1211 });
1212 assert_eq!(
1213 events.next().await.unwrap(),
1214 Event::DiagnosticsUpdated {
1215 language_server_id: LanguageServerId(0),
1216 path: (worktree_id, Path::new("a.rs")).into()
1217 }
1218 );
1219
1220 fake_server.end_progress(format!("{}/0", progress_token));
1221 assert_eq!(
1222 events.next().await.unwrap(),
1223 Event::DiskBasedDiagnosticsFinished {
1224 language_server_id: LanguageServerId(0)
1225 }
1226 );
1227
1228 let buffer = project
1229 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1230 .await
1231 .unwrap();
1232
1233 buffer.update(cx, |buffer, _| {
1234 let snapshot = buffer.snapshot();
1235 let diagnostics = snapshot
1236 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1237 .collect::<Vec<_>>();
1238 assert_eq!(
1239 diagnostics,
1240 &[DiagnosticEntry {
1241 range: Point::new(0, 9)..Point::new(0, 10),
1242 diagnostic: Diagnostic {
1243 severity: lsp::DiagnosticSeverity::ERROR,
1244 message: "undefined variable 'A'".to_string(),
1245 group_id: 0,
1246 is_primary: true,
1247 ..Default::default()
1248 }
1249 }]
1250 )
1251 });
1252
1253 // Ensure publishing empty diagnostics twice only results in one update event.
1254 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1255 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1256 version: None,
1257 diagnostics: Default::default(),
1258 });
1259 assert_eq!(
1260 events.next().await.unwrap(),
1261 Event::DiagnosticsUpdated {
1262 language_server_id: LanguageServerId(0),
1263 path: (worktree_id, Path::new("a.rs")).into()
1264 }
1265 );
1266
1267 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1268 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1269 version: None,
1270 diagnostics: Default::default(),
1271 });
1272 cx.executor().run_until_parked();
1273 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1274}
1275
1276#[gpui::test]
1277async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1278 init_test(cx);
1279
1280 let progress_token = "the-progress-token";
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1284
1285 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1286
1287 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1288 language_registry.add(rust_lang());
1289 let mut fake_servers = language_registry.register_fake_lsp(
1290 "Rust",
1291 FakeLspAdapter {
1292 name: "the-language-server",
1293 disk_based_diagnostics_sources: vec!["disk".into()],
1294 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1295 ..Default::default()
1296 },
1297 );
1298
1299 let buffer = project
1300 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1301 .await
1302 .unwrap();
1303
1304 // Simulate diagnostics starting to update.
1305 let fake_server = fake_servers.next().await.unwrap();
1306 fake_server.start_progress(progress_token).await;
1307
1308 // Restart the server before the diagnostics finish updating.
1309 project.update(cx, |project, cx| {
1310 project.restart_language_servers_for_buffers([buffer], cx);
1311 });
1312 let mut events = cx.events(&project);
1313
1314 // Simulate the newly started server sending more diagnostics.
1315 let fake_server = fake_servers.next().await.unwrap();
1316 assert_eq!(
1317 events.next().await.unwrap(),
1318 Event::LanguageServerAdded(LanguageServerId(1))
1319 );
1320 fake_server.start_progress(progress_token).await;
1321 assert_eq!(
1322 events.next().await.unwrap(),
1323 Event::DiskBasedDiagnosticsStarted {
1324 language_server_id: LanguageServerId(1)
1325 }
1326 );
1327 project.update(cx, |project, cx| {
1328 assert_eq!(
1329 project
1330 .language_servers_running_disk_based_diagnostics(cx)
1331 .collect::<Vec<_>>(),
1332 [LanguageServerId(1)]
1333 );
1334 });
1335
1336 // All diagnostics are considered done, despite the old server's diagnostic
1337 // task never completing.
1338 fake_server.end_progress(progress_token);
1339 assert_eq!(
1340 events.next().await.unwrap(),
1341 Event::DiskBasedDiagnosticsFinished {
1342 language_server_id: LanguageServerId(1)
1343 }
1344 );
1345 project.update(cx, |project, cx| {
1346 assert_eq!(
1347 project
1348 .language_servers_running_disk_based_diagnostics(cx)
1349 .collect::<Vec<_>>(),
1350 [] as [language::LanguageServerId; 0]
1351 );
1352 });
1353}
1354
1355#[gpui::test]
1356async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1357 init_test(cx);
1358
1359 let fs = FakeFs::new(cx.executor());
1360 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1361
1362 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1363
1364 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1365 language_registry.add(rust_lang());
1366 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1367
1368 let buffer = project
1369 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1370 .await
1371 .unwrap();
1372
1373 // Publish diagnostics
1374 let fake_server = fake_servers.next().await.unwrap();
1375 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1376 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1377 version: None,
1378 diagnostics: vec![lsp::Diagnostic {
1379 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1380 severity: Some(lsp::DiagnosticSeverity::ERROR),
1381 message: "the message".to_string(),
1382 ..Default::default()
1383 }],
1384 });
1385
1386 cx.executor().run_until_parked();
1387 buffer.update(cx, |buffer, _| {
1388 assert_eq!(
1389 buffer
1390 .snapshot()
1391 .diagnostics_in_range::<_, usize>(0..1, false)
1392 .map(|entry| entry.diagnostic.message.clone())
1393 .collect::<Vec<_>>(),
1394 ["the message".to_string()]
1395 );
1396 });
1397 project.update(cx, |project, cx| {
1398 assert_eq!(
1399 project.diagnostic_summary(false, cx),
1400 DiagnosticSummary {
1401 error_count: 1,
1402 warning_count: 0,
1403 }
1404 );
1405 });
1406
1407 project.update(cx, |project, cx| {
1408 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1409 });
1410
1411 // The diagnostics are cleared.
1412 cx.executor().run_until_parked();
1413 buffer.update(cx, |buffer, _| {
1414 assert_eq!(
1415 buffer
1416 .snapshot()
1417 .diagnostics_in_range::<_, usize>(0..1, false)
1418 .map(|entry| entry.diagnostic.message.clone())
1419 .collect::<Vec<_>>(),
1420 Vec::<String>::new(),
1421 );
1422 });
1423 project.update(cx, |project, cx| {
1424 assert_eq!(
1425 project.diagnostic_summary(false, cx),
1426 DiagnosticSummary {
1427 error_count: 0,
1428 warning_count: 0,
1429 }
1430 );
1431 });
1432}
1433
1434#[gpui::test]
1435async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1436 init_test(cx);
1437
1438 let fs = FakeFs::new(cx.executor());
1439 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1440
1441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1442 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1443
1444 language_registry.add(rust_lang());
1445 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1446
1447 let buffer = project
1448 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1449 .await
1450 .unwrap();
1451
1452 // Before restarting the server, report diagnostics with an unknown buffer version.
1453 let fake_server = fake_servers.next().await.unwrap();
1454 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1455 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1456 version: Some(10000),
1457 diagnostics: Vec::new(),
1458 });
1459 cx.executor().run_until_parked();
1460
1461 project.update(cx, |project, cx| {
1462 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1463 });
1464 let mut fake_server = fake_servers.next().await.unwrap();
1465 let notification = fake_server
1466 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1467 .await
1468 .text_document;
1469 assert_eq!(notification.version, 0);
1470}
1471
1472#[gpui::test]
1473async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1474 init_test(cx);
1475
1476 let progress_token = "the-progress-token";
1477
1478 let fs = FakeFs::new(cx.executor());
1479 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1480
1481 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1482
1483 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1484 language_registry.add(rust_lang());
1485 let mut fake_servers = language_registry.register_fake_lsp(
1486 "Rust",
1487 FakeLspAdapter {
1488 name: "the-language-server",
1489 disk_based_diagnostics_sources: vec!["disk".into()],
1490 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1491 ..Default::default()
1492 },
1493 );
1494
1495 let buffer = project
1496 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1497 .await
1498 .unwrap();
1499
1500 // Simulate diagnostics starting to update.
1501 let mut fake_server = fake_servers.next().await.unwrap();
1502 fake_server
1503 .start_progress_with(
1504 "another-token",
1505 lsp::WorkDoneProgressBegin {
1506 cancellable: Some(false),
1507 ..Default::default()
1508 },
1509 )
1510 .await;
1511 fake_server
1512 .start_progress_with(
1513 progress_token,
1514 lsp::WorkDoneProgressBegin {
1515 cancellable: Some(true),
1516 ..Default::default()
1517 },
1518 )
1519 .await;
1520 cx.executor().run_until_parked();
1521
1522 project.update(cx, |project, cx| {
1523 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1524 });
1525
1526 let cancel_notification = fake_server
1527 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1528 .await;
1529 assert_eq!(
1530 cancel_notification.token,
1531 NumberOrString::String(progress_token.into())
1532 );
1533}
1534
1535#[gpui::test]
1536async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1537 init_test(cx);
1538
1539 let fs = FakeFs::new(cx.executor());
1540 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1541 .await;
1542
1543 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "rust-lsp",
1550 ..Default::default()
1551 },
1552 );
1553 let mut fake_js_servers = language_registry.register_fake_lsp(
1554 "JavaScript",
1555 FakeLspAdapter {
1556 name: "js-lsp",
1557 ..Default::default()
1558 },
1559 );
1560 language_registry.add(rust_lang());
1561 language_registry.add(js_lang());
1562
1563 let _rs_buffer = project
1564 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1565 .await
1566 .unwrap();
1567 let _js_buffer = project
1568 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1569 .await
1570 .unwrap();
1571
1572 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1573 assert_eq!(
1574 fake_rust_server_1
1575 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1576 .await
1577 .text_document
1578 .uri
1579 .as_str(),
1580 "file:///dir/a.rs"
1581 );
1582
1583 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1584 assert_eq!(
1585 fake_js_server
1586 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1587 .await
1588 .text_document
1589 .uri
1590 .as_str(),
1591 "file:///dir/b.js"
1592 );
1593
1594 // Disable Rust language server, ensuring only that server gets stopped.
1595 cx.update(|cx| {
1596 SettingsStore::update_global(cx, |settings, cx| {
1597 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1598 settings.languages.insert(
1599 "Rust".into(),
1600 LanguageSettingsContent {
1601 enable_language_server: Some(false),
1602 ..Default::default()
1603 },
1604 );
1605 });
1606 })
1607 });
1608 fake_rust_server_1
1609 .receive_notification::<lsp::notification::Exit>()
1610 .await;
1611
1612 // Enable Rust and disable JavaScript language servers, ensuring that the
1613 // former gets started again and that the latter stops.
1614 cx.update(|cx| {
1615 SettingsStore::update_global(cx, |settings, cx| {
1616 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1617 settings.languages.insert(
1618 LanguageName::new("Rust"),
1619 LanguageSettingsContent {
1620 enable_language_server: Some(true),
1621 ..Default::default()
1622 },
1623 );
1624 settings.languages.insert(
1625 LanguageName::new("JavaScript"),
1626 LanguageSettingsContent {
1627 enable_language_server: Some(false),
1628 ..Default::default()
1629 },
1630 );
1631 });
1632 })
1633 });
1634 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1635 assert_eq!(
1636 fake_rust_server_2
1637 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1638 .await
1639 .text_document
1640 .uri
1641 .as_str(),
1642 "file:///dir/a.rs"
1643 );
1644 fake_js_server
1645 .receive_notification::<lsp::notification::Exit>()
1646 .await;
1647}
1648
1649#[gpui::test(iterations = 3)]
1650async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1651 init_test(cx);
1652
1653 let text = "
1654 fn a() { A }
1655 fn b() { BB }
1656 fn c() { CCC }
1657 "
1658 .unindent();
1659
1660 let fs = FakeFs::new(cx.executor());
1661 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1662
1663 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1664 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1665
1666 language_registry.add(rust_lang());
1667 let mut fake_servers = language_registry.register_fake_lsp(
1668 "Rust",
1669 FakeLspAdapter {
1670 disk_based_diagnostics_sources: vec!["disk".into()],
1671 ..Default::default()
1672 },
1673 );
1674
1675 let buffer = project
1676 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1677 .await
1678 .unwrap();
1679
1680 let mut fake_server = fake_servers.next().await.unwrap();
1681 let open_notification = fake_server
1682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1683 .await;
1684
1685 // Edit the buffer, moving the content down
1686 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1687 let change_notification_1 = fake_server
1688 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1689 .await;
1690 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1691
1692 // Report some diagnostics for the initial version of the buffer
1693 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1694 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1695 version: Some(open_notification.text_document.version),
1696 diagnostics: vec![
1697 lsp::Diagnostic {
1698 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1699 severity: Some(DiagnosticSeverity::ERROR),
1700 message: "undefined variable 'A'".to_string(),
1701 source: Some("disk".to_string()),
1702 ..Default::default()
1703 },
1704 lsp::Diagnostic {
1705 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1706 severity: Some(DiagnosticSeverity::ERROR),
1707 message: "undefined variable 'BB'".to_string(),
1708 source: Some("disk".to_string()),
1709 ..Default::default()
1710 },
1711 lsp::Diagnostic {
1712 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1713 severity: Some(DiagnosticSeverity::ERROR),
1714 source: Some("disk".to_string()),
1715 message: "undefined variable 'CCC'".to_string(),
1716 ..Default::default()
1717 },
1718 ],
1719 });
1720
1721 // The diagnostics have moved down since they were created.
1722 cx.executor().run_until_parked();
1723 buffer.update(cx, |buffer, _| {
1724 assert_eq!(
1725 buffer
1726 .snapshot()
1727 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1728 .collect::<Vec<_>>(),
1729 &[
1730 DiagnosticEntry {
1731 range: Point::new(3, 9)..Point::new(3, 11),
1732 diagnostic: Diagnostic {
1733 source: Some("disk".into()),
1734 severity: DiagnosticSeverity::ERROR,
1735 message: "undefined variable 'BB'".to_string(),
1736 is_disk_based: true,
1737 group_id: 1,
1738 is_primary: true,
1739 ..Default::default()
1740 },
1741 },
1742 DiagnosticEntry {
1743 range: Point::new(4, 9)..Point::new(4, 12),
1744 diagnostic: Diagnostic {
1745 source: Some("disk".into()),
1746 severity: DiagnosticSeverity::ERROR,
1747 message: "undefined variable 'CCC'".to_string(),
1748 is_disk_based: true,
1749 group_id: 2,
1750 is_primary: true,
1751 ..Default::default()
1752 }
1753 }
1754 ]
1755 );
1756 assert_eq!(
1757 chunks_with_diagnostics(buffer, 0..buffer.len()),
1758 [
1759 ("\n\nfn a() { ".to_string(), None),
1760 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1761 (" }\nfn b() { ".to_string(), None),
1762 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1763 (" }\nfn c() { ".to_string(), None),
1764 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1765 (" }\n".to_string(), None),
1766 ]
1767 );
1768 assert_eq!(
1769 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1770 [
1771 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1772 (" }\nfn c() { ".to_string(), None),
1773 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1774 ]
1775 );
1776 });
1777
1778 // Ensure overlapping diagnostics are highlighted correctly.
1779 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1780 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1781 version: Some(open_notification.text_document.version),
1782 diagnostics: vec![
1783 lsp::Diagnostic {
1784 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1785 severity: Some(DiagnosticSeverity::ERROR),
1786 message: "undefined variable 'A'".to_string(),
1787 source: Some("disk".to_string()),
1788 ..Default::default()
1789 },
1790 lsp::Diagnostic {
1791 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1792 severity: Some(DiagnosticSeverity::WARNING),
1793 message: "unreachable statement".to_string(),
1794 source: Some("disk".to_string()),
1795 ..Default::default()
1796 },
1797 ],
1798 });
1799
1800 cx.executor().run_until_parked();
1801 buffer.update(cx, |buffer, _| {
1802 assert_eq!(
1803 buffer
1804 .snapshot()
1805 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1806 .collect::<Vec<_>>(),
1807 &[
1808 DiagnosticEntry {
1809 range: Point::new(2, 9)..Point::new(2, 12),
1810 diagnostic: Diagnostic {
1811 source: Some("disk".into()),
1812 severity: DiagnosticSeverity::WARNING,
1813 message: "unreachable statement".to_string(),
1814 is_disk_based: true,
1815 group_id: 4,
1816 is_primary: true,
1817 ..Default::default()
1818 }
1819 },
1820 DiagnosticEntry {
1821 range: Point::new(2, 9)..Point::new(2, 10),
1822 diagnostic: Diagnostic {
1823 source: Some("disk".into()),
1824 severity: DiagnosticSeverity::ERROR,
1825 message: "undefined variable 'A'".to_string(),
1826 is_disk_based: true,
1827 group_id: 3,
1828 is_primary: true,
1829 ..Default::default()
1830 },
1831 }
1832 ]
1833 );
1834 assert_eq!(
1835 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1836 [
1837 ("fn a() { ".to_string(), None),
1838 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1839 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1840 ("\n".to_string(), None),
1841 ]
1842 );
1843 assert_eq!(
1844 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1845 [
1846 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1847 ("\n".to_string(), None),
1848 ]
1849 );
1850 });
1851
1852 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1853 // changes since the last save.
1854 buffer.update(cx, |buffer, cx| {
1855 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1856 buffer.edit(
1857 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1858 None,
1859 cx,
1860 );
1861 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1862 });
1863 let change_notification_2 = fake_server
1864 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1865 .await;
1866 assert!(
1867 change_notification_2.text_document.version > change_notification_1.text_document.version
1868 );
1869
1870 // Handle out-of-order diagnostics
1871 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1872 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1873 version: Some(change_notification_2.text_document.version),
1874 diagnostics: vec![
1875 lsp::Diagnostic {
1876 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1877 severity: Some(DiagnosticSeverity::ERROR),
1878 message: "undefined variable 'BB'".to_string(),
1879 source: Some("disk".to_string()),
1880 ..Default::default()
1881 },
1882 lsp::Diagnostic {
1883 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1884 severity: Some(DiagnosticSeverity::WARNING),
1885 message: "undefined variable 'A'".to_string(),
1886 source: Some("disk".to_string()),
1887 ..Default::default()
1888 },
1889 ],
1890 });
1891
1892 cx.executor().run_until_parked();
1893 buffer.update(cx, |buffer, _| {
1894 assert_eq!(
1895 buffer
1896 .snapshot()
1897 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1898 .collect::<Vec<_>>(),
1899 &[
1900 DiagnosticEntry {
1901 range: Point::new(2, 21)..Point::new(2, 22),
1902 diagnostic: Diagnostic {
1903 source: Some("disk".into()),
1904 severity: DiagnosticSeverity::WARNING,
1905 message: "undefined variable 'A'".to_string(),
1906 is_disk_based: true,
1907 group_id: 6,
1908 is_primary: true,
1909 ..Default::default()
1910 }
1911 },
1912 DiagnosticEntry {
1913 range: Point::new(3, 9)..Point::new(3, 14),
1914 diagnostic: Diagnostic {
1915 source: Some("disk".into()),
1916 severity: DiagnosticSeverity::ERROR,
1917 message: "undefined variable 'BB'".to_string(),
1918 is_disk_based: true,
1919 group_id: 5,
1920 is_primary: true,
1921 ..Default::default()
1922 },
1923 }
1924 ]
1925 );
1926 });
1927}
1928
1929#[gpui::test]
1930async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1931 init_test(cx);
1932
1933 let text = concat!(
1934 "let one = ;\n", //
1935 "let two = \n",
1936 "let three = 3;\n",
1937 );
1938
1939 let fs = FakeFs::new(cx.executor());
1940 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1941
1942 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1943 let buffer = project
1944 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1945 .await
1946 .unwrap();
1947
1948 project.update(cx, |project, cx| {
1949 project.lsp_store.update(cx, |lsp_store, cx| {
1950 lsp_store
1951 .update_buffer_diagnostics(
1952 &buffer,
1953 LanguageServerId(0),
1954 None,
1955 vec![
1956 DiagnosticEntry {
1957 range: Unclipped(PointUtf16::new(0, 10))
1958 ..Unclipped(PointUtf16::new(0, 10)),
1959 diagnostic: Diagnostic {
1960 severity: DiagnosticSeverity::ERROR,
1961 message: "syntax error 1".to_string(),
1962 ..Default::default()
1963 },
1964 },
1965 DiagnosticEntry {
1966 range: Unclipped(PointUtf16::new(1, 10))
1967 ..Unclipped(PointUtf16::new(1, 10)),
1968 diagnostic: Diagnostic {
1969 severity: DiagnosticSeverity::ERROR,
1970 message: "syntax error 2".to_string(),
1971 ..Default::default()
1972 },
1973 },
1974 ],
1975 cx,
1976 )
1977 .unwrap();
1978 })
1979 });
1980
1981 // An empty range is extended forward to include the following character.
1982 // At the end of a line, an empty range is extended backward to include
1983 // the preceding character.
1984 buffer.update(cx, |buffer, _| {
1985 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1986 assert_eq!(
1987 chunks
1988 .iter()
1989 .map(|(s, d)| (s.as_str(), *d))
1990 .collect::<Vec<_>>(),
1991 &[
1992 ("let one = ", None),
1993 (";", Some(DiagnosticSeverity::ERROR)),
1994 ("\nlet two =", None),
1995 (" ", Some(DiagnosticSeverity::ERROR)),
1996 ("\nlet three = 3;\n", None)
1997 ]
1998 );
1999 });
2000}
2001
2002#[gpui::test]
2003async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let fs = FakeFs::new(cx.executor());
2007 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2008 .await;
2009
2010 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2011
2012 project.update(cx, |project, cx| {
2013 project
2014 .update_diagnostic_entries(
2015 LanguageServerId(0),
2016 Path::new("/dir/a.rs").to_owned(),
2017 None,
2018 vec![DiagnosticEntry {
2019 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2020 diagnostic: Diagnostic {
2021 severity: DiagnosticSeverity::ERROR,
2022 is_primary: true,
2023 message: "syntax error a1".to_string(),
2024 ..Default::default()
2025 },
2026 }],
2027 cx,
2028 )
2029 .unwrap();
2030 project
2031 .update_diagnostic_entries(
2032 LanguageServerId(1),
2033 Path::new("/dir/a.rs").to_owned(),
2034 None,
2035 vec![DiagnosticEntry {
2036 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2037 diagnostic: Diagnostic {
2038 severity: DiagnosticSeverity::ERROR,
2039 is_primary: true,
2040 message: "syntax error b1".to_string(),
2041 ..Default::default()
2042 },
2043 }],
2044 cx,
2045 )
2046 .unwrap();
2047
2048 assert_eq!(
2049 project.diagnostic_summary(false, cx),
2050 DiagnosticSummary {
2051 error_count: 2,
2052 warning_count: 0,
2053 }
2054 );
2055 });
2056}
2057
2058#[gpui::test]
2059async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2060 init_test(cx);
2061
2062 let text = "
2063 fn a() {
2064 f1();
2065 }
2066 fn b() {
2067 f2();
2068 }
2069 fn c() {
2070 f3();
2071 }
2072 "
2073 .unindent();
2074
2075 let fs = FakeFs::new(cx.executor());
2076 fs.insert_tree(
2077 "/dir",
2078 json!({
2079 "a.rs": text.clone(),
2080 }),
2081 )
2082 .await;
2083
2084 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2085 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2086
2087 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2088 language_registry.add(rust_lang());
2089 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2090
2091 let buffer = project
2092 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2093 .await
2094 .unwrap();
2095
2096 let mut fake_server = fake_servers.next().await.unwrap();
2097 let lsp_document_version = fake_server
2098 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2099 .await
2100 .text_document
2101 .version;
2102
2103 // Simulate editing the buffer after the language server computes some edits.
2104 buffer.update(cx, |buffer, cx| {
2105 buffer.edit(
2106 [(
2107 Point::new(0, 0)..Point::new(0, 0),
2108 "// above first function\n",
2109 )],
2110 None,
2111 cx,
2112 );
2113 buffer.edit(
2114 [(
2115 Point::new(2, 0)..Point::new(2, 0),
2116 " // inside first function\n",
2117 )],
2118 None,
2119 cx,
2120 );
2121 buffer.edit(
2122 [(
2123 Point::new(6, 4)..Point::new(6, 4),
2124 "// inside second function ",
2125 )],
2126 None,
2127 cx,
2128 );
2129
2130 assert_eq!(
2131 buffer.text(),
2132 "
2133 // above first function
2134 fn a() {
2135 // inside first function
2136 f1();
2137 }
2138 fn b() {
2139 // inside second function f2();
2140 }
2141 fn c() {
2142 f3();
2143 }
2144 "
2145 .unindent()
2146 );
2147 });
2148
2149 let edits = lsp_store
2150 .update(cx, |lsp_store, cx| {
2151 lsp_store.edits_from_lsp(
2152 &buffer,
2153 vec![
2154 // replace body of first function
2155 lsp::TextEdit {
2156 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2157 new_text: "
2158 fn a() {
2159 f10();
2160 }
2161 "
2162 .unindent(),
2163 },
2164 // edit inside second function
2165 lsp::TextEdit {
2166 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2167 new_text: "00".into(),
2168 },
2169 // edit inside third function via two distinct edits
2170 lsp::TextEdit {
2171 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2172 new_text: "4000".into(),
2173 },
2174 lsp::TextEdit {
2175 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2176 new_text: "".into(),
2177 },
2178 ],
2179 LanguageServerId(0),
2180 Some(lsp_document_version),
2181 cx,
2182 )
2183 })
2184 .await
2185 .unwrap();
2186
2187 buffer.update(cx, |buffer, cx| {
2188 for (range, new_text) in edits {
2189 buffer.edit([(range, new_text)], None, cx);
2190 }
2191 assert_eq!(
2192 buffer.text(),
2193 "
2194 // above first function
2195 fn a() {
2196 // inside first function
2197 f10();
2198 }
2199 fn b() {
2200 // inside second function f200();
2201 }
2202 fn c() {
2203 f4000();
2204 }
2205 "
2206 .unindent()
2207 );
2208 });
2209}
2210
2211#[gpui::test]
2212async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2213 init_test(cx);
2214
2215 let text = "
2216 use a::b;
2217 use a::c;
2218
2219 fn f() {
2220 b();
2221 c();
2222 }
2223 "
2224 .unindent();
2225
2226 let fs = FakeFs::new(cx.executor());
2227 fs.insert_tree(
2228 "/dir",
2229 json!({
2230 "a.rs": text.clone(),
2231 }),
2232 )
2233 .await;
2234
2235 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2236 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2237 let buffer = project
2238 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2239 .await
2240 .unwrap();
2241
2242 // Simulate the language server sending us a small edit in the form of a very large diff.
2243 // Rust-analyzer does this when performing a merge-imports code action.
2244 let edits = lsp_store
2245 .update(cx, |lsp_store, cx| {
2246 lsp_store.edits_from_lsp(
2247 &buffer,
2248 [
2249 // Replace the first use statement without editing the semicolon.
2250 lsp::TextEdit {
2251 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2252 new_text: "a::{b, c}".into(),
2253 },
2254 // Reinsert the remainder of the file between the semicolon and the final
2255 // newline of the file.
2256 lsp::TextEdit {
2257 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2258 new_text: "\n\n".into(),
2259 },
2260 lsp::TextEdit {
2261 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2262 new_text: "
2263 fn f() {
2264 b();
2265 c();
2266 }"
2267 .unindent(),
2268 },
2269 // Delete everything after the first newline of the file.
2270 lsp::TextEdit {
2271 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2272 new_text: "".into(),
2273 },
2274 ],
2275 LanguageServerId(0),
2276 None,
2277 cx,
2278 )
2279 })
2280 .await
2281 .unwrap();
2282
2283 buffer.update(cx, |buffer, cx| {
2284 let edits = edits
2285 .into_iter()
2286 .map(|(range, text)| {
2287 (
2288 range.start.to_point(buffer)..range.end.to_point(buffer),
2289 text,
2290 )
2291 })
2292 .collect::<Vec<_>>();
2293
2294 assert_eq!(
2295 edits,
2296 [
2297 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2298 (Point::new(1, 0)..Point::new(2, 0), "".into())
2299 ]
2300 );
2301
2302 for (range, new_text) in edits {
2303 buffer.edit([(range, new_text)], None, cx);
2304 }
2305 assert_eq!(
2306 buffer.text(),
2307 "
2308 use a::{b, c};
2309
2310 fn f() {
2311 b();
2312 c();
2313 }
2314 "
2315 .unindent()
2316 );
2317 });
2318}
2319
2320#[gpui::test]
2321async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2322 init_test(cx);
2323
2324 let text = "
2325 use a::b;
2326 use a::c;
2327
2328 fn f() {
2329 b();
2330 c();
2331 }
2332 "
2333 .unindent();
2334
2335 let fs = FakeFs::new(cx.executor());
2336 fs.insert_tree(
2337 "/dir",
2338 json!({
2339 "a.rs": text.clone(),
2340 }),
2341 )
2342 .await;
2343
2344 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2345 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2346 let buffer = project
2347 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2348 .await
2349 .unwrap();
2350
2351 // Simulate the language server sending us edits in a non-ordered fashion,
2352 // with ranges sometimes being inverted or pointing to invalid locations.
2353 let edits = lsp_store
2354 .update(cx, |lsp_store, cx| {
2355 lsp_store.edits_from_lsp(
2356 &buffer,
2357 [
2358 lsp::TextEdit {
2359 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2360 new_text: "\n\n".into(),
2361 },
2362 lsp::TextEdit {
2363 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2364 new_text: "a::{b, c}".into(),
2365 },
2366 lsp::TextEdit {
2367 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2368 new_text: "".into(),
2369 },
2370 lsp::TextEdit {
2371 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2372 new_text: "
2373 fn f() {
2374 b();
2375 c();
2376 }"
2377 .unindent(),
2378 },
2379 ],
2380 LanguageServerId(0),
2381 None,
2382 cx,
2383 )
2384 })
2385 .await
2386 .unwrap();
2387
2388 buffer.update(cx, |buffer, cx| {
2389 let edits = edits
2390 .into_iter()
2391 .map(|(range, text)| {
2392 (
2393 range.start.to_point(buffer)..range.end.to_point(buffer),
2394 text,
2395 )
2396 })
2397 .collect::<Vec<_>>();
2398
2399 assert_eq!(
2400 edits,
2401 [
2402 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2403 (Point::new(1, 0)..Point::new(2, 0), "".into())
2404 ]
2405 );
2406
2407 for (range, new_text) in edits {
2408 buffer.edit([(range, new_text)], None, cx);
2409 }
2410 assert_eq!(
2411 buffer.text(),
2412 "
2413 use a::{b, c};
2414
2415 fn f() {
2416 b();
2417 c();
2418 }
2419 "
2420 .unindent()
2421 );
2422 });
2423}
2424
2425fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2426 buffer: &Buffer,
2427 range: Range<T>,
2428) -> Vec<(String, Option<DiagnosticSeverity>)> {
2429 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2430 for chunk in buffer.snapshot().chunks(range, true) {
2431 if chunks.last().map_or(false, |prev_chunk| {
2432 prev_chunk.1 == chunk.diagnostic_severity
2433 }) {
2434 chunks.last_mut().unwrap().0.push_str(chunk.text);
2435 } else {
2436 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2437 }
2438 }
2439 chunks
2440}
2441
2442#[gpui::test(iterations = 10)]
2443async fn test_definition(cx: &mut gpui::TestAppContext) {
2444 init_test(cx);
2445
2446 let fs = FakeFs::new(cx.executor());
2447 fs.insert_tree(
2448 "/dir",
2449 json!({
2450 "a.rs": "const fn a() { A }",
2451 "b.rs": "const y: i32 = crate::a()",
2452 }),
2453 )
2454 .await;
2455
2456 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2457
2458 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2459 language_registry.add(rust_lang());
2460 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2461
2462 let buffer = project
2463 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2464 .await
2465 .unwrap();
2466
2467 let fake_server = fake_servers.next().await.unwrap();
2468 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2469 let params = params.text_document_position_params;
2470 assert_eq!(
2471 params.text_document.uri.to_file_path().unwrap(),
2472 Path::new("/dir/b.rs"),
2473 );
2474 assert_eq!(params.position, lsp::Position::new(0, 22));
2475
2476 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2477 lsp::Location::new(
2478 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2479 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2480 ),
2481 )))
2482 });
2483
2484 let mut definitions = project
2485 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2486 .await
2487 .unwrap();
2488
2489 // Assert no new language server started
2490 cx.executor().run_until_parked();
2491 assert!(fake_servers.try_next().is_err());
2492
2493 assert_eq!(definitions.len(), 1);
2494 let definition = definitions.pop().unwrap();
2495 cx.update(|cx| {
2496 let target_buffer = definition.target.buffer.read(cx);
2497 assert_eq!(
2498 target_buffer
2499 .file()
2500 .unwrap()
2501 .as_local()
2502 .unwrap()
2503 .abs_path(cx),
2504 Path::new("/dir/a.rs"),
2505 );
2506 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2507 assert_eq!(
2508 list_worktrees(&project, cx),
2509 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2510 );
2511
2512 drop(definition);
2513 });
2514 cx.update(|cx| {
2515 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2516 });
2517
2518 fn list_worktrees<'a>(
2519 project: &'a Model<Project>,
2520 cx: &'a AppContext,
2521 ) -> Vec<(&'a Path, bool)> {
2522 project
2523 .read(cx)
2524 .worktrees(cx)
2525 .map(|worktree| {
2526 let worktree = worktree.read(cx);
2527 (
2528 worktree.as_local().unwrap().abs_path().as_ref(),
2529 worktree.is_visible(),
2530 )
2531 })
2532 .collect::<Vec<_>>()
2533 }
2534}
2535
2536#[gpui::test]
2537async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2538 init_test(cx);
2539
2540 let fs = FakeFs::new(cx.executor());
2541 fs.insert_tree(
2542 "/dir",
2543 json!({
2544 "a.ts": "",
2545 }),
2546 )
2547 .await;
2548
2549 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2550
2551 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2552 language_registry.add(typescript_lang());
2553 let mut fake_language_servers = language_registry.register_fake_lsp(
2554 "TypeScript",
2555 FakeLspAdapter {
2556 capabilities: lsp::ServerCapabilities {
2557 completion_provider: Some(lsp::CompletionOptions {
2558 trigger_characters: Some(vec![":".to_string()]),
2559 ..Default::default()
2560 }),
2561 ..Default::default()
2562 },
2563 ..Default::default()
2564 },
2565 );
2566
2567 let buffer = project
2568 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2569 .await
2570 .unwrap();
2571
2572 let fake_server = fake_language_servers.next().await.unwrap();
2573
2574 let text = "let a = b.fqn";
2575 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2576 let completions = project.update(cx, |project, cx| {
2577 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2578 });
2579
2580 fake_server
2581 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2582 Ok(Some(lsp::CompletionResponse::Array(vec![
2583 lsp::CompletionItem {
2584 label: "fullyQualifiedName?".into(),
2585 insert_text: Some("fullyQualifiedName".into()),
2586 ..Default::default()
2587 },
2588 ])))
2589 })
2590 .next()
2591 .await;
2592 let completions = completions.await.unwrap();
2593 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2594 assert_eq!(completions.len(), 1);
2595 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2596 assert_eq!(
2597 completions[0].old_range.to_offset(&snapshot),
2598 text.len() - 3..text.len()
2599 );
2600
2601 let text = "let a = \"atoms/cmp\"";
2602 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2603 let completions = project.update(cx, |project, cx| {
2604 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2605 });
2606
2607 fake_server
2608 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2609 Ok(Some(lsp::CompletionResponse::Array(vec![
2610 lsp::CompletionItem {
2611 label: "component".into(),
2612 ..Default::default()
2613 },
2614 ])))
2615 })
2616 .next()
2617 .await;
2618 let completions = completions.await.unwrap();
2619 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2620 assert_eq!(completions.len(), 1);
2621 assert_eq!(completions[0].new_text, "component");
2622 assert_eq!(
2623 completions[0].old_range.to_offset(&snapshot),
2624 text.len() - 4..text.len() - 1
2625 );
2626}
2627
2628#[gpui::test]
2629async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2630 init_test(cx);
2631
2632 let fs = FakeFs::new(cx.executor());
2633 fs.insert_tree(
2634 "/dir",
2635 json!({
2636 "a.ts": "",
2637 }),
2638 )
2639 .await;
2640
2641 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2642
2643 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2644 language_registry.add(typescript_lang());
2645 let mut fake_language_servers = language_registry.register_fake_lsp(
2646 "TypeScript",
2647 FakeLspAdapter {
2648 capabilities: lsp::ServerCapabilities {
2649 completion_provider: Some(lsp::CompletionOptions {
2650 trigger_characters: Some(vec![":".to_string()]),
2651 ..Default::default()
2652 }),
2653 ..Default::default()
2654 },
2655 ..Default::default()
2656 },
2657 );
2658
2659 let buffer = project
2660 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2661 .await
2662 .unwrap();
2663
2664 let fake_server = fake_language_servers.next().await.unwrap();
2665
2666 let text = "let a = b.fqn";
2667 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2668 let completions = project.update(cx, |project, cx| {
2669 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2670 });
2671
2672 fake_server
2673 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2674 Ok(Some(lsp::CompletionResponse::Array(vec![
2675 lsp::CompletionItem {
2676 label: "fullyQualifiedName?".into(),
2677 insert_text: Some("fully\rQualified\r\nName".into()),
2678 ..Default::default()
2679 },
2680 ])))
2681 })
2682 .next()
2683 .await;
2684 let completions = completions.await.unwrap();
2685 assert_eq!(completions.len(), 1);
2686 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2687}
2688
2689#[gpui::test(iterations = 10)]
2690async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2691 init_test(cx);
2692
2693 let fs = FakeFs::new(cx.executor());
2694 fs.insert_tree(
2695 "/dir",
2696 json!({
2697 "a.ts": "a",
2698 }),
2699 )
2700 .await;
2701
2702 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2703
2704 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2705 language_registry.add(typescript_lang());
2706 let mut fake_language_servers = language_registry.register_fake_lsp(
2707 "TypeScript",
2708 FakeLspAdapter {
2709 capabilities: lsp::ServerCapabilities {
2710 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2711 lsp::CodeActionOptions {
2712 resolve_provider: Some(true),
2713 ..lsp::CodeActionOptions::default()
2714 },
2715 )),
2716 ..lsp::ServerCapabilities::default()
2717 },
2718 ..FakeLspAdapter::default()
2719 },
2720 );
2721
2722 let buffer = project
2723 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2724 .await
2725 .unwrap();
2726
2727 let fake_server = fake_language_servers.next().await.unwrap();
2728
2729 // Language server returns code actions that contain commands, and not edits.
2730 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2731 fake_server
2732 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2733 Ok(Some(vec![
2734 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2735 title: "The code action".into(),
2736 data: Some(serde_json::json!({
2737 "command": "_the/command",
2738 })),
2739 ..lsp::CodeAction::default()
2740 }),
2741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2742 title: "two".into(),
2743 ..lsp::CodeAction::default()
2744 }),
2745 ]))
2746 })
2747 .next()
2748 .await;
2749
2750 let action = actions.await.unwrap()[0].clone();
2751 let apply = project.update(cx, |project, cx| {
2752 project.apply_code_action(buffer.clone(), action, true, cx)
2753 });
2754
2755 // Resolving the code action does not populate its edits. In absence of
2756 // edits, we must execute the given command.
2757 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2758 |mut action, _| async move {
2759 if action.data.is_some() {
2760 action.command = Some(lsp::Command {
2761 title: "The command".into(),
2762 command: "_the/command".into(),
2763 arguments: Some(vec![json!("the-argument")]),
2764 });
2765 }
2766 Ok(action)
2767 },
2768 );
2769
2770 // While executing the command, the language server sends the editor
2771 // a `workspaceEdit` request.
2772 fake_server
2773 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2774 let fake = fake_server.clone();
2775 move |params, _| {
2776 assert_eq!(params.command, "_the/command");
2777 let fake = fake.clone();
2778 async move {
2779 fake.server
2780 .request::<lsp::request::ApplyWorkspaceEdit>(
2781 lsp::ApplyWorkspaceEditParams {
2782 label: None,
2783 edit: lsp::WorkspaceEdit {
2784 changes: Some(
2785 [(
2786 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2787 vec![lsp::TextEdit {
2788 range: lsp::Range::new(
2789 lsp::Position::new(0, 0),
2790 lsp::Position::new(0, 0),
2791 ),
2792 new_text: "X".into(),
2793 }],
2794 )]
2795 .into_iter()
2796 .collect(),
2797 ),
2798 ..Default::default()
2799 },
2800 },
2801 )
2802 .await
2803 .unwrap();
2804 Ok(Some(json!(null)))
2805 }
2806 }
2807 })
2808 .next()
2809 .await;
2810
2811 // Applying the code action returns a project transaction containing the edits
2812 // sent by the language server in its `workspaceEdit` request.
2813 let transaction = apply.await.unwrap();
2814 assert!(transaction.0.contains_key(&buffer));
2815 buffer.update(cx, |buffer, cx| {
2816 assert_eq!(buffer.text(), "Xa");
2817 buffer.undo(cx);
2818 assert_eq!(buffer.text(), "a");
2819 });
2820}
2821
2822#[gpui::test(iterations = 10)]
2823async fn test_save_file(cx: &mut gpui::TestAppContext) {
2824 init_test(cx);
2825
2826 let fs = FakeFs::new(cx.executor());
2827 fs.insert_tree(
2828 "/dir",
2829 json!({
2830 "file1": "the old contents",
2831 }),
2832 )
2833 .await;
2834
2835 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2836 let buffer = project
2837 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2838 .await
2839 .unwrap();
2840 buffer.update(cx, |buffer, cx| {
2841 assert_eq!(buffer.text(), "the old contents");
2842 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2843 });
2844
2845 project
2846 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2847 .await
2848 .unwrap();
2849
2850 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2851 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2852}
2853
2854#[gpui::test(iterations = 30)]
2855async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2856 init_test(cx);
2857
2858 let fs = FakeFs::new(cx.executor().clone());
2859 fs.insert_tree(
2860 "/dir",
2861 json!({
2862 "file1": "the original contents",
2863 }),
2864 )
2865 .await;
2866
2867 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2868 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2869 let buffer = project
2870 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2871 .await
2872 .unwrap();
2873
2874 // Simulate buffer diffs being slow, so that they don't complete before
2875 // the next file change occurs.
2876 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2877
2878 // Change the buffer's file on disk, and then wait for the file change
2879 // to be detected by the worktree, so that the buffer starts reloading.
2880 fs.save(
2881 "/dir/file1".as_ref(),
2882 &"the first contents".into(),
2883 Default::default(),
2884 )
2885 .await
2886 .unwrap();
2887 worktree.next_event(cx).await;
2888
2889 // Change the buffer's file again. Depending on the random seed, the
2890 // previous file change may still be in progress.
2891 fs.save(
2892 "/dir/file1".as_ref(),
2893 &"the second contents".into(),
2894 Default::default(),
2895 )
2896 .await
2897 .unwrap();
2898 worktree.next_event(cx).await;
2899
2900 cx.executor().run_until_parked();
2901 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2902 buffer.read_with(cx, |buffer, _| {
2903 assert_eq!(buffer.text(), on_disk_text);
2904 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2905 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2906 });
2907}
2908
2909#[gpui::test(iterations = 30)]
2910async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2911 init_test(cx);
2912
2913 let fs = FakeFs::new(cx.executor().clone());
2914 fs.insert_tree(
2915 "/dir",
2916 json!({
2917 "file1": "the original contents",
2918 }),
2919 )
2920 .await;
2921
2922 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2923 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2924 let buffer = project
2925 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2926 .await
2927 .unwrap();
2928
2929 // Simulate buffer diffs being slow, so that they don't complete before
2930 // the next file change occurs.
2931 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2932
2933 // Change the buffer's file on disk, and then wait for the file change
2934 // to be detected by the worktree, so that the buffer starts reloading.
2935 fs.save(
2936 "/dir/file1".as_ref(),
2937 &"the first contents".into(),
2938 Default::default(),
2939 )
2940 .await
2941 .unwrap();
2942 worktree.next_event(cx).await;
2943
2944 cx.executor()
2945 .spawn(cx.executor().simulate_random_delay())
2946 .await;
2947
2948 // Perform a noop edit, causing the buffer's version to increase.
2949 buffer.update(cx, |buffer, cx| {
2950 buffer.edit([(0..0, " ")], None, cx);
2951 buffer.undo(cx);
2952 });
2953
2954 cx.executor().run_until_parked();
2955 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2956 buffer.read_with(cx, |buffer, _| {
2957 let buffer_text = buffer.text();
2958 if buffer_text == on_disk_text {
2959 assert!(
2960 !buffer.is_dirty() && !buffer.has_conflict(),
2961 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2962 );
2963 }
2964 // If the file change occurred while the buffer was processing the first
2965 // change, the buffer will be in a conflicting state.
2966 else {
2967 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2968 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2969 }
2970 });
2971}
2972
2973#[gpui::test]
2974async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2975 init_test(cx);
2976
2977 let fs = FakeFs::new(cx.executor());
2978 fs.insert_tree(
2979 "/dir",
2980 json!({
2981 "file1": "the old contents",
2982 }),
2983 )
2984 .await;
2985
2986 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2987 let buffer = project
2988 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2989 .await
2990 .unwrap();
2991 buffer.update(cx, |buffer, cx| {
2992 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2993 });
2994
2995 project
2996 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2997 .await
2998 .unwrap();
2999
3000 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3001 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3002}
3003
3004#[gpui::test]
3005async fn test_save_as(cx: &mut gpui::TestAppContext) {
3006 init_test(cx);
3007
3008 let fs = FakeFs::new(cx.executor());
3009 fs.insert_tree("/dir", json!({})).await;
3010
3011 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3012
3013 let languages = project.update(cx, |project, _| project.languages().clone());
3014 languages.add(rust_lang());
3015
3016 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3017 buffer.update(cx, |buffer, cx| {
3018 buffer.edit([(0..0, "abc")], None, cx);
3019 assert!(buffer.is_dirty());
3020 assert!(!buffer.has_conflict());
3021 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3022 });
3023 project
3024 .update(cx, |project, cx| {
3025 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3026 let path = ProjectPath {
3027 worktree_id,
3028 path: Arc::from(Path::new("file1.rs")),
3029 };
3030 project.save_buffer_as(buffer.clone(), path, cx)
3031 })
3032 .await
3033 .unwrap();
3034 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3035
3036 cx.executor().run_until_parked();
3037 buffer.update(cx, |buffer, cx| {
3038 assert_eq!(
3039 buffer.file().unwrap().full_path(cx),
3040 Path::new("dir/file1.rs")
3041 );
3042 assert!(!buffer.is_dirty());
3043 assert!(!buffer.has_conflict());
3044 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3045 });
3046
3047 let opened_buffer = project
3048 .update(cx, |project, cx| {
3049 project.open_local_buffer("/dir/file1.rs", cx)
3050 })
3051 .await
3052 .unwrap();
3053 assert_eq!(opened_buffer, buffer);
3054}
3055
3056#[gpui::test(retries = 5)]
3057async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3058 use worktree::WorktreeModelHandle as _;
3059
3060 init_test(cx);
3061 cx.executor().allow_parking();
3062
3063 let dir = temp_tree(json!({
3064 "a": {
3065 "file1": "",
3066 "file2": "",
3067 "file3": "",
3068 },
3069 "b": {
3070 "c": {
3071 "file4": "",
3072 "file5": "",
3073 }
3074 }
3075 }));
3076
3077 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3078
3079 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3080 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3081 async move { buffer.await.unwrap() }
3082 };
3083 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3084 project.update(cx, |project, cx| {
3085 let tree = project.worktrees(cx).next().unwrap();
3086 tree.read(cx)
3087 .entry_for_path(path)
3088 .unwrap_or_else(|| panic!("no entry for path {}", path))
3089 .id
3090 })
3091 };
3092
3093 let buffer2 = buffer_for_path("a/file2", cx).await;
3094 let buffer3 = buffer_for_path("a/file3", cx).await;
3095 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3096 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3097
3098 let file2_id = id_for_path("a/file2", cx);
3099 let file3_id = id_for_path("a/file3", cx);
3100 let file4_id = id_for_path("b/c/file4", cx);
3101
3102 // Create a remote copy of this worktree.
3103 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3104 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3105
3106 let updates = Arc::new(Mutex::new(Vec::new()));
3107 tree.update(cx, |tree, cx| {
3108 let updates = updates.clone();
3109 tree.observe_updates(0, cx, move |update| {
3110 updates.lock().push(update);
3111 async { true }
3112 });
3113 });
3114
3115 let remote =
3116 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3117
3118 cx.executor().run_until_parked();
3119
3120 cx.update(|cx| {
3121 assert!(!buffer2.read(cx).is_dirty());
3122 assert!(!buffer3.read(cx).is_dirty());
3123 assert!(!buffer4.read(cx).is_dirty());
3124 assert!(!buffer5.read(cx).is_dirty());
3125 });
3126
3127 // Rename and delete files and directories.
3128 tree.flush_fs_events(cx).await;
3129 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3130 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3131 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3132 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3133 tree.flush_fs_events(cx).await;
3134
3135 let expected_paths = vec![
3136 "a",
3137 "a/file1",
3138 "a/file2.new",
3139 "b",
3140 "d",
3141 "d/file3",
3142 "d/file4",
3143 ];
3144
3145 cx.update(|app| {
3146 assert_eq!(
3147 tree.read(app)
3148 .paths()
3149 .map(|p| p.to_str().unwrap())
3150 .collect::<Vec<_>>(),
3151 expected_paths
3152 );
3153 });
3154
3155 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3156 assert_eq!(id_for_path("d/file3", cx), file3_id);
3157 assert_eq!(id_for_path("d/file4", cx), file4_id);
3158
3159 cx.update(|cx| {
3160 assert_eq!(
3161 buffer2.read(cx).file().unwrap().path().as_ref(),
3162 Path::new("a/file2.new")
3163 );
3164 assert_eq!(
3165 buffer3.read(cx).file().unwrap().path().as_ref(),
3166 Path::new("d/file3")
3167 );
3168 assert_eq!(
3169 buffer4.read(cx).file().unwrap().path().as_ref(),
3170 Path::new("d/file4")
3171 );
3172 assert_eq!(
3173 buffer5.read(cx).file().unwrap().path().as_ref(),
3174 Path::new("b/c/file5")
3175 );
3176
3177 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3178 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3179 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3180 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3181 });
3182
3183 // Update the remote worktree. Check that it becomes consistent with the
3184 // local worktree.
3185 cx.executor().run_until_parked();
3186
3187 remote.update(cx, |remote, _| {
3188 for update in updates.lock().drain(..) {
3189 remote.as_remote_mut().unwrap().update_from_remote(update);
3190 }
3191 });
3192 cx.executor().run_until_parked();
3193 remote.update(cx, |remote, _| {
3194 assert_eq!(
3195 remote
3196 .paths()
3197 .map(|p| p.to_str().unwrap())
3198 .collect::<Vec<_>>(),
3199 expected_paths
3200 );
3201 });
3202}
3203
3204#[gpui::test(iterations = 10)]
3205async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3206 init_test(cx);
3207
3208 let fs = FakeFs::new(cx.executor());
3209 fs.insert_tree(
3210 "/dir",
3211 json!({
3212 "a": {
3213 "file1": "",
3214 }
3215 }),
3216 )
3217 .await;
3218
3219 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3220 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3221 let tree_id = tree.update(cx, |tree, _| tree.id());
3222
3223 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3224 project.update(cx, |project, cx| {
3225 let tree = project.worktrees(cx).next().unwrap();
3226 tree.read(cx)
3227 .entry_for_path(path)
3228 .unwrap_or_else(|| panic!("no entry for path {}", path))
3229 .id
3230 })
3231 };
3232
3233 let dir_id = id_for_path("a", cx);
3234 let file_id = id_for_path("a/file1", cx);
3235 let buffer = project
3236 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3237 .await
3238 .unwrap();
3239 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3240
3241 project
3242 .update(cx, |project, cx| {
3243 project.rename_entry(dir_id, Path::new("b"), cx)
3244 })
3245 .unwrap()
3246 .await
3247 .to_included()
3248 .unwrap();
3249 cx.executor().run_until_parked();
3250
3251 assert_eq!(id_for_path("b", cx), dir_id);
3252 assert_eq!(id_for_path("b/file1", cx), file_id);
3253 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3254}
3255
3256#[gpui::test]
3257async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3258 init_test(cx);
3259
3260 let fs = FakeFs::new(cx.executor());
3261 fs.insert_tree(
3262 "/dir",
3263 json!({
3264 "a.txt": "a-contents",
3265 "b.txt": "b-contents",
3266 }),
3267 )
3268 .await;
3269
3270 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3271
3272 // Spawn multiple tasks to open paths, repeating some paths.
3273 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3274 (
3275 p.open_local_buffer("/dir/a.txt", cx),
3276 p.open_local_buffer("/dir/b.txt", cx),
3277 p.open_local_buffer("/dir/a.txt", cx),
3278 )
3279 });
3280
3281 let buffer_a_1 = buffer_a_1.await.unwrap();
3282 let buffer_a_2 = buffer_a_2.await.unwrap();
3283 let buffer_b = buffer_b.await.unwrap();
3284 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3285 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3286
3287 // There is only one buffer per path.
3288 let buffer_a_id = buffer_a_1.entity_id();
3289 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3290
3291 // Open the same path again while it is still open.
3292 drop(buffer_a_1);
3293 let buffer_a_3 = project
3294 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3295 .await
3296 .unwrap();
3297
3298 // There's still only one buffer per path.
3299 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3300}
3301
3302#[gpui::test]
3303async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3304 init_test(cx);
3305
3306 let fs = FakeFs::new(cx.executor());
3307 fs.insert_tree(
3308 "/dir",
3309 json!({
3310 "file1": "abc",
3311 "file2": "def",
3312 "file3": "ghi",
3313 }),
3314 )
3315 .await;
3316
3317 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3318
3319 let buffer1 = project
3320 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3321 .await
3322 .unwrap();
3323 let events = Arc::new(Mutex::new(Vec::new()));
3324
3325 // initially, the buffer isn't dirty.
3326 buffer1.update(cx, |buffer, cx| {
3327 cx.subscribe(&buffer1, {
3328 let events = events.clone();
3329 move |_, _, event, _| match event {
3330 BufferEvent::Operation { .. } => {}
3331 _ => events.lock().push(event.clone()),
3332 }
3333 })
3334 .detach();
3335
3336 assert!(!buffer.is_dirty());
3337 assert!(events.lock().is_empty());
3338
3339 buffer.edit([(1..2, "")], None, cx);
3340 });
3341
3342 // after the first edit, the buffer is dirty, and emits a dirtied event.
3343 buffer1.update(cx, |buffer, cx| {
3344 assert!(buffer.text() == "ac");
3345 assert!(buffer.is_dirty());
3346 assert_eq!(
3347 *events.lock(),
3348 &[
3349 language::BufferEvent::Edited,
3350 language::BufferEvent::DirtyChanged
3351 ]
3352 );
3353 events.lock().clear();
3354 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3355 });
3356
3357 // after saving, the buffer is not dirty, and emits a saved event.
3358 buffer1.update(cx, |buffer, cx| {
3359 assert!(!buffer.is_dirty());
3360 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3361 events.lock().clear();
3362
3363 buffer.edit([(1..1, "B")], None, cx);
3364 buffer.edit([(2..2, "D")], None, cx);
3365 });
3366
3367 // after editing again, the buffer is dirty, and emits another dirty event.
3368 buffer1.update(cx, |buffer, cx| {
3369 assert!(buffer.text() == "aBDc");
3370 assert!(buffer.is_dirty());
3371 assert_eq!(
3372 *events.lock(),
3373 &[
3374 language::BufferEvent::Edited,
3375 language::BufferEvent::DirtyChanged,
3376 language::BufferEvent::Edited,
3377 ],
3378 );
3379 events.lock().clear();
3380
3381 // After restoring the buffer to its previously-saved state,
3382 // the buffer is not considered dirty anymore.
3383 buffer.edit([(1..3, "")], None, cx);
3384 assert!(buffer.text() == "ac");
3385 assert!(!buffer.is_dirty());
3386 });
3387
3388 assert_eq!(
3389 *events.lock(),
3390 &[
3391 language::BufferEvent::Edited,
3392 language::BufferEvent::DirtyChanged
3393 ]
3394 );
3395
3396 // When a file is deleted, the buffer is considered dirty.
3397 let events = Arc::new(Mutex::new(Vec::new()));
3398 let buffer2 = project
3399 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3400 .await
3401 .unwrap();
3402 buffer2.update(cx, |_, cx| {
3403 cx.subscribe(&buffer2, {
3404 let events = events.clone();
3405 move |_, _, event, _| events.lock().push(event.clone())
3406 })
3407 .detach();
3408 });
3409
3410 fs.remove_file("/dir/file2".as_ref(), Default::default())
3411 .await
3412 .unwrap();
3413 cx.executor().run_until_parked();
3414 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3415 assert_eq!(
3416 *events.lock(),
3417 &[
3418 language::BufferEvent::DirtyChanged,
3419 language::BufferEvent::FileHandleChanged
3420 ]
3421 );
3422
3423 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3424 let events = Arc::new(Mutex::new(Vec::new()));
3425 let buffer3 = project
3426 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3427 .await
3428 .unwrap();
3429 buffer3.update(cx, |_, cx| {
3430 cx.subscribe(&buffer3, {
3431 let events = events.clone();
3432 move |_, _, event, _| events.lock().push(event.clone())
3433 })
3434 .detach();
3435 });
3436
3437 buffer3.update(cx, |buffer, cx| {
3438 buffer.edit([(0..0, "x")], None, cx);
3439 });
3440 events.lock().clear();
3441 fs.remove_file("/dir/file3".as_ref(), Default::default())
3442 .await
3443 .unwrap();
3444 cx.executor().run_until_parked();
3445 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3446 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3447}
3448
3449#[gpui::test]
3450async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3451 init_test(cx);
3452
3453 let initial_contents = "aaa\nbbbbb\nc\n";
3454 let fs = FakeFs::new(cx.executor());
3455 fs.insert_tree(
3456 "/dir",
3457 json!({
3458 "the-file": initial_contents,
3459 }),
3460 )
3461 .await;
3462 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3463 let buffer = project
3464 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3465 .await
3466 .unwrap();
3467
3468 let anchors = (0..3)
3469 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3470 .collect::<Vec<_>>();
3471
3472 // Change the file on disk, adding two new lines of text, and removing
3473 // one line.
3474 buffer.update(cx, |buffer, _| {
3475 assert!(!buffer.is_dirty());
3476 assert!(!buffer.has_conflict());
3477 });
3478 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3479 fs.save(
3480 "/dir/the-file".as_ref(),
3481 &new_contents.into(),
3482 LineEnding::Unix,
3483 )
3484 .await
3485 .unwrap();
3486
3487 // Because the buffer was not modified, it is reloaded from disk. Its
3488 // contents are edited according to the diff between the old and new
3489 // file contents.
3490 cx.executor().run_until_parked();
3491 buffer.update(cx, |buffer, _| {
3492 assert_eq!(buffer.text(), new_contents);
3493 assert!(!buffer.is_dirty());
3494 assert!(!buffer.has_conflict());
3495
3496 let anchor_positions = anchors
3497 .iter()
3498 .map(|anchor| anchor.to_point(&*buffer))
3499 .collect::<Vec<_>>();
3500 assert_eq!(
3501 anchor_positions,
3502 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3503 );
3504 });
3505
3506 // Modify the buffer
3507 buffer.update(cx, |buffer, cx| {
3508 buffer.edit([(0..0, " ")], None, cx);
3509 assert!(buffer.is_dirty());
3510 assert!(!buffer.has_conflict());
3511 });
3512
3513 // Change the file on disk again, adding blank lines to the beginning.
3514 fs.save(
3515 "/dir/the-file".as_ref(),
3516 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3517 LineEnding::Unix,
3518 )
3519 .await
3520 .unwrap();
3521
3522 // Because the buffer is modified, it doesn't reload from disk, but is
3523 // marked as having a conflict.
3524 cx.executor().run_until_parked();
3525 buffer.update(cx, |buffer, _| {
3526 assert!(buffer.has_conflict());
3527 });
3528}
3529
3530#[gpui::test]
3531async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3532 init_test(cx);
3533
3534 let fs = FakeFs::new(cx.executor());
3535 fs.insert_tree(
3536 "/dir",
3537 json!({
3538 "file1": "a\nb\nc\n",
3539 "file2": "one\r\ntwo\r\nthree\r\n",
3540 }),
3541 )
3542 .await;
3543
3544 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3545 let buffer1 = project
3546 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3547 .await
3548 .unwrap();
3549 let buffer2 = project
3550 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3551 .await
3552 .unwrap();
3553
3554 buffer1.update(cx, |buffer, _| {
3555 assert_eq!(buffer.text(), "a\nb\nc\n");
3556 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3557 });
3558 buffer2.update(cx, |buffer, _| {
3559 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3560 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3561 });
3562
3563 // Change a file's line endings on disk from unix to windows. The buffer's
3564 // state updates correctly.
3565 fs.save(
3566 "/dir/file1".as_ref(),
3567 &"aaa\nb\nc\n".into(),
3568 LineEnding::Windows,
3569 )
3570 .await
3571 .unwrap();
3572 cx.executor().run_until_parked();
3573 buffer1.update(cx, |buffer, _| {
3574 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3575 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3576 });
3577
3578 // Save a file with windows line endings. The file is written correctly.
3579 buffer2.update(cx, |buffer, cx| {
3580 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3581 });
3582 project
3583 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3584 .await
3585 .unwrap();
3586 assert_eq!(
3587 fs.load("/dir/file2".as_ref()).await.unwrap(),
3588 "one\r\ntwo\r\nthree\r\nfour\r\n",
3589 );
3590}
3591
3592#[gpui::test]
3593async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3594 init_test(cx);
3595
3596 let fs = FakeFs::new(cx.executor());
3597 fs.insert_tree(
3598 "/the-dir",
3599 json!({
3600 "a.rs": "
3601 fn foo(mut v: Vec<usize>) {
3602 for x in &v {
3603 v.push(1);
3604 }
3605 }
3606 "
3607 .unindent(),
3608 }),
3609 )
3610 .await;
3611
3612 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3613 let buffer = project
3614 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3615 .await
3616 .unwrap();
3617
3618 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3619 let message = lsp::PublishDiagnosticsParams {
3620 uri: buffer_uri.clone(),
3621 diagnostics: vec![
3622 lsp::Diagnostic {
3623 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3624 severity: Some(DiagnosticSeverity::WARNING),
3625 message: "error 1".to_string(),
3626 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3627 location: lsp::Location {
3628 uri: buffer_uri.clone(),
3629 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3630 },
3631 message: "error 1 hint 1".to_string(),
3632 }]),
3633 ..Default::default()
3634 },
3635 lsp::Diagnostic {
3636 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3637 severity: Some(DiagnosticSeverity::HINT),
3638 message: "error 1 hint 1".to_string(),
3639 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3640 location: lsp::Location {
3641 uri: buffer_uri.clone(),
3642 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3643 },
3644 message: "original diagnostic".to_string(),
3645 }]),
3646 ..Default::default()
3647 },
3648 lsp::Diagnostic {
3649 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3650 severity: Some(DiagnosticSeverity::ERROR),
3651 message: "error 2".to_string(),
3652 related_information: Some(vec![
3653 lsp::DiagnosticRelatedInformation {
3654 location: lsp::Location {
3655 uri: buffer_uri.clone(),
3656 range: lsp::Range::new(
3657 lsp::Position::new(1, 13),
3658 lsp::Position::new(1, 15),
3659 ),
3660 },
3661 message: "error 2 hint 1".to_string(),
3662 },
3663 lsp::DiagnosticRelatedInformation {
3664 location: lsp::Location {
3665 uri: buffer_uri.clone(),
3666 range: lsp::Range::new(
3667 lsp::Position::new(1, 13),
3668 lsp::Position::new(1, 15),
3669 ),
3670 },
3671 message: "error 2 hint 2".to_string(),
3672 },
3673 ]),
3674 ..Default::default()
3675 },
3676 lsp::Diagnostic {
3677 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3678 severity: Some(DiagnosticSeverity::HINT),
3679 message: "error 2 hint 1".to_string(),
3680 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3681 location: lsp::Location {
3682 uri: buffer_uri.clone(),
3683 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3684 },
3685 message: "original diagnostic".to_string(),
3686 }]),
3687 ..Default::default()
3688 },
3689 lsp::Diagnostic {
3690 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3691 severity: Some(DiagnosticSeverity::HINT),
3692 message: "error 2 hint 2".to_string(),
3693 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3694 location: lsp::Location {
3695 uri: buffer_uri,
3696 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3697 },
3698 message: "original diagnostic".to_string(),
3699 }]),
3700 ..Default::default()
3701 },
3702 ],
3703 version: None,
3704 };
3705
3706 project
3707 .update(cx, |p, cx| {
3708 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3709 })
3710 .unwrap();
3711 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3712
3713 assert_eq!(
3714 buffer
3715 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3716 .collect::<Vec<_>>(),
3717 &[
3718 DiagnosticEntry {
3719 range: Point::new(1, 8)..Point::new(1, 9),
3720 diagnostic: Diagnostic {
3721 severity: DiagnosticSeverity::WARNING,
3722 message: "error 1".to_string(),
3723 group_id: 1,
3724 is_primary: true,
3725 ..Default::default()
3726 }
3727 },
3728 DiagnosticEntry {
3729 range: Point::new(1, 8)..Point::new(1, 9),
3730 diagnostic: Diagnostic {
3731 severity: DiagnosticSeverity::HINT,
3732 message: "error 1 hint 1".to_string(),
3733 group_id: 1,
3734 is_primary: false,
3735 ..Default::default()
3736 }
3737 },
3738 DiagnosticEntry {
3739 range: Point::new(1, 13)..Point::new(1, 15),
3740 diagnostic: Diagnostic {
3741 severity: DiagnosticSeverity::HINT,
3742 message: "error 2 hint 1".to_string(),
3743 group_id: 0,
3744 is_primary: false,
3745 ..Default::default()
3746 }
3747 },
3748 DiagnosticEntry {
3749 range: Point::new(1, 13)..Point::new(1, 15),
3750 diagnostic: Diagnostic {
3751 severity: DiagnosticSeverity::HINT,
3752 message: "error 2 hint 2".to_string(),
3753 group_id: 0,
3754 is_primary: false,
3755 ..Default::default()
3756 }
3757 },
3758 DiagnosticEntry {
3759 range: Point::new(2, 8)..Point::new(2, 17),
3760 diagnostic: Diagnostic {
3761 severity: DiagnosticSeverity::ERROR,
3762 message: "error 2".to_string(),
3763 group_id: 0,
3764 is_primary: true,
3765 ..Default::default()
3766 }
3767 }
3768 ]
3769 );
3770
3771 assert_eq!(
3772 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3773 &[
3774 DiagnosticEntry {
3775 range: Point::new(1, 13)..Point::new(1, 15),
3776 diagnostic: Diagnostic {
3777 severity: DiagnosticSeverity::HINT,
3778 message: "error 2 hint 1".to_string(),
3779 group_id: 0,
3780 is_primary: false,
3781 ..Default::default()
3782 }
3783 },
3784 DiagnosticEntry {
3785 range: Point::new(1, 13)..Point::new(1, 15),
3786 diagnostic: Diagnostic {
3787 severity: DiagnosticSeverity::HINT,
3788 message: "error 2 hint 2".to_string(),
3789 group_id: 0,
3790 is_primary: false,
3791 ..Default::default()
3792 }
3793 },
3794 DiagnosticEntry {
3795 range: Point::new(2, 8)..Point::new(2, 17),
3796 diagnostic: Diagnostic {
3797 severity: DiagnosticSeverity::ERROR,
3798 message: "error 2".to_string(),
3799 group_id: 0,
3800 is_primary: true,
3801 ..Default::default()
3802 }
3803 }
3804 ]
3805 );
3806
3807 assert_eq!(
3808 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3809 &[
3810 DiagnosticEntry {
3811 range: Point::new(1, 8)..Point::new(1, 9),
3812 diagnostic: Diagnostic {
3813 severity: DiagnosticSeverity::WARNING,
3814 message: "error 1".to_string(),
3815 group_id: 1,
3816 is_primary: true,
3817 ..Default::default()
3818 }
3819 },
3820 DiagnosticEntry {
3821 range: Point::new(1, 8)..Point::new(1, 9),
3822 diagnostic: Diagnostic {
3823 severity: DiagnosticSeverity::HINT,
3824 message: "error 1 hint 1".to_string(),
3825 group_id: 1,
3826 is_primary: false,
3827 ..Default::default()
3828 }
3829 },
3830 ]
3831 );
3832}
3833
3834#[gpui::test]
3835async fn test_rename(cx: &mut gpui::TestAppContext) {
3836 // hi
3837 init_test(cx);
3838
3839 let fs = FakeFs::new(cx.executor());
3840 fs.insert_tree(
3841 "/dir",
3842 json!({
3843 "one.rs": "const ONE: usize = 1;",
3844 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3845 }),
3846 )
3847 .await;
3848
3849 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3850
3851 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3852 language_registry.add(rust_lang());
3853 let mut fake_servers = language_registry.register_fake_lsp(
3854 "Rust",
3855 FakeLspAdapter {
3856 capabilities: lsp::ServerCapabilities {
3857 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3858 prepare_provider: Some(true),
3859 work_done_progress_options: Default::default(),
3860 })),
3861 ..Default::default()
3862 },
3863 ..Default::default()
3864 },
3865 );
3866
3867 let buffer = project
3868 .update(cx, |project, cx| {
3869 project.open_local_buffer("/dir/one.rs", cx)
3870 })
3871 .await
3872 .unwrap();
3873
3874 let fake_server = fake_servers.next().await.unwrap();
3875
3876 let response = project.update(cx, |project, cx| {
3877 project.prepare_rename(buffer.clone(), 7, cx)
3878 });
3879 fake_server
3880 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3881 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3882 assert_eq!(params.position, lsp::Position::new(0, 7));
3883 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3884 lsp::Position::new(0, 6),
3885 lsp::Position::new(0, 9),
3886 ))))
3887 })
3888 .next()
3889 .await
3890 .unwrap();
3891 let range = response.await.unwrap().unwrap();
3892 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3893 assert_eq!(range, 6..9);
3894
3895 let response = project.update(cx, |project, cx| {
3896 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3897 });
3898 fake_server
3899 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3900 assert_eq!(
3901 params.text_document_position.text_document.uri.as_str(),
3902 "file:///dir/one.rs"
3903 );
3904 assert_eq!(
3905 params.text_document_position.position,
3906 lsp::Position::new(0, 7)
3907 );
3908 assert_eq!(params.new_name, "THREE");
3909 Ok(Some(lsp::WorkspaceEdit {
3910 changes: Some(
3911 [
3912 (
3913 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3914 vec![lsp::TextEdit::new(
3915 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3916 "THREE".to_string(),
3917 )],
3918 ),
3919 (
3920 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3921 vec![
3922 lsp::TextEdit::new(
3923 lsp::Range::new(
3924 lsp::Position::new(0, 24),
3925 lsp::Position::new(0, 27),
3926 ),
3927 "THREE".to_string(),
3928 ),
3929 lsp::TextEdit::new(
3930 lsp::Range::new(
3931 lsp::Position::new(0, 35),
3932 lsp::Position::new(0, 38),
3933 ),
3934 "THREE".to_string(),
3935 ),
3936 ],
3937 ),
3938 ]
3939 .into_iter()
3940 .collect(),
3941 ),
3942 ..Default::default()
3943 }))
3944 })
3945 .next()
3946 .await
3947 .unwrap();
3948 let mut transaction = response.await.unwrap().0;
3949 assert_eq!(transaction.len(), 2);
3950 assert_eq!(
3951 transaction
3952 .remove_entry(&buffer)
3953 .unwrap()
3954 .0
3955 .update(cx, |buffer, _| buffer.text()),
3956 "const THREE: usize = 1;"
3957 );
3958 assert_eq!(
3959 transaction
3960 .into_keys()
3961 .next()
3962 .unwrap()
3963 .update(cx, |buffer, _| buffer.text()),
3964 "const TWO: usize = one::THREE + one::THREE;"
3965 );
3966}
3967
3968#[gpui::test]
3969async fn test_search(cx: &mut gpui::TestAppContext) {
3970 init_test(cx);
3971
3972 let fs = FakeFs::new(cx.executor());
3973 fs.insert_tree(
3974 "/dir",
3975 json!({
3976 "one.rs": "const ONE: usize = 1;",
3977 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3978 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3979 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3980 }),
3981 )
3982 .await;
3983 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3984 assert_eq!(
3985 search(
3986 &project,
3987 SearchQuery::text(
3988 "TWO",
3989 false,
3990 true,
3991 false,
3992 Default::default(),
3993 Default::default(),
3994 None
3995 )
3996 .unwrap(),
3997 cx
3998 )
3999 .await
4000 .unwrap(),
4001 HashMap::from_iter([
4002 ("dir/two.rs".to_string(), vec![6..9]),
4003 ("dir/three.rs".to_string(), vec![37..40])
4004 ])
4005 );
4006
4007 let buffer_4 = project
4008 .update(cx, |project, cx| {
4009 project.open_local_buffer("/dir/four.rs", cx)
4010 })
4011 .await
4012 .unwrap();
4013 buffer_4.update(cx, |buffer, cx| {
4014 let text = "two::TWO";
4015 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4016 });
4017
4018 assert_eq!(
4019 search(
4020 &project,
4021 SearchQuery::text(
4022 "TWO",
4023 false,
4024 true,
4025 false,
4026 Default::default(),
4027 Default::default(),
4028 None,
4029 )
4030 .unwrap(),
4031 cx
4032 )
4033 .await
4034 .unwrap(),
4035 HashMap::from_iter([
4036 ("dir/two.rs".to_string(), vec![6..9]),
4037 ("dir/three.rs".to_string(), vec![37..40]),
4038 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4039 ])
4040 );
4041}
4042
4043#[gpui::test]
4044async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4045 init_test(cx);
4046
4047 let search_query = "file";
4048
4049 let fs = FakeFs::new(cx.executor());
4050 fs.insert_tree(
4051 "/dir",
4052 json!({
4053 "one.rs": r#"// Rust file one"#,
4054 "one.ts": r#"// TypeScript file one"#,
4055 "two.rs": r#"// Rust file two"#,
4056 "two.ts": r#"// TypeScript file two"#,
4057 }),
4058 )
4059 .await;
4060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4061
4062 assert!(
4063 search(
4064 &project,
4065 SearchQuery::text(
4066 search_query,
4067 false,
4068 true,
4069 false,
4070 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4071 Default::default(),
4072 None
4073 )
4074 .unwrap(),
4075 cx
4076 )
4077 .await
4078 .unwrap()
4079 .is_empty(),
4080 "If no inclusions match, no files should be returned"
4081 );
4082
4083 assert_eq!(
4084 search(
4085 &project,
4086 SearchQuery::text(
4087 search_query,
4088 false,
4089 true,
4090 false,
4091 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4092 Default::default(),
4093 None
4094 )
4095 .unwrap(),
4096 cx
4097 )
4098 .await
4099 .unwrap(),
4100 HashMap::from_iter([
4101 ("dir/one.rs".to_string(), vec![8..12]),
4102 ("dir/two.rs".to_string(), vec![8..12]),
4103 ]),
4104 "Rust only search should give only Rust files"
4105 );
4106
4107 assert_eq!(
4108 search(
4109 &project,
4110 SearchQuery::text(
4111 search_query,
4112 false,
4113 true,
4114 false,
4115
4116 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4117
4118 Default::default(),
4119 None,
4120 ).unwrap(),
4121 cx
4122 )
4123 .await
4124 .unwrap(),
4125 HashMap::from_iter([
4126 ("dir/one.ts".to_string(), vec![14..18]),
4127 ("dir/two.ts".to_string(), vec![14..18]),
4128 ]),
4129 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4130 );
4131
4132 assert_eq!(
4133 search(
4134 &project,
4135 SearchQuery::text(
4136 search_query,
4137 false,
4138 true,
4139 false,
4140
4141 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4142
4143 Default::default(),
4144 None,
4145 ).unwrap(),
4146 cx
4147 )
4148 .await
4149 .unwrap(),
4150 HashMap::from_iter([
4151 ("dir/two.ts".to_string(), vec![14..18]),
4152 ("dir/one.rs".to_string(), vec![8..12]),
4153 ("dir/one.ts".to_string(), vec![14..18]),
4154 ("dir/two.rs".to_string(), vec![8..12]),
4155 ]),
4156 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4157 );
4158}
4159
4160#[gpui::test]
4161async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4162 init_test(cx);
4163
4164 let search_query = "file";
4165
4166 let fs = FakeFs::new(cx.executor());
4167 fs.insert_tree(
4168 "/dir",
4169 json!({
4170 "one.rs": r#"// Rust file one"#,
4171 "one.ts": r#"// TypeScript file one"#,
4172 "two.rs": r#"// Rust file two"#,
4173 "two.ts": r#"// TypeScript file two"#,
4174 }),
4175 )
4176 .await;
4177 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4178
4179 assert_eq!(
4180 search(
4181 &project,
4182 SearchQuery::text(
4183 search_query,
4184 false,
4185 true,
4186 false,
4187 Default::default(),
4188 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4189 None,
4190 )
4191 .unwrap(),
4192 cx
4193 )
4194 .await
4195 .unwrap(),
4196 HashMap::from_iter([
4197 ("dir/one.rs".to_string(), vec![8..12]),
4198 ("dir/one.ts".to_string(), vec![14..18]),
4199 ("dir/two.rs".to_string(), vec![8..12]),
4200 ("dir/two.ts".to_string(), vec![14..18]),
4201 ]),
4202 "If no exclusions match, all files should be returned"
4203 );
4204
4205 assert_eq!(
4206 search(
4207 &project,
4208 SearchQuery::text(
4209 search_query,
4210 false,
4211 true,
4212 false,
4213 Default::default(),
4214 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4215 None,
4216 )
4217 .unwrap(),
4218 cx
4219 )
4220 .await
4221 .unwrap(),
4222 HashMap::from_iter([
4223 ("dir/one.ts".to_string(), vec![14..18]),
4224 ("dir/two.ts".to_string(), vec![14..18]),
4225 ]),
4226 "Rust exclusion search should give only TypeScript files"
4227 );
4228
4229 assert_eq!(
4230 search(
4231 &project,
4232 SearchQuery::text(
4233 search_query,
4234 false,
4235 true,
4236 false,
4237 Default::default(),
4238 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4239 None,
4240 ).unwrap(),
4241 cx
4242 )
4243 .await
4244 .unwrap(),
4245 HashMap::from_iter([
4246 ("dir/one.rs".to_string(), vec![8..12]),
4247 ("dir/two.rs".to_string(), vec![8..12]),
4248 ]),
4249 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4250 );
4251
4252 assert!(
4253 search(
4254 &project,
4255 SearchQuery::text(
4256 search_query,
4257 false,
4258 true,
4259 false,
4260 Default::default(),
4261
4262 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4263 None,
4264
4265 ).unwrap(),
4266 cx
4267 )
4268 .await
4269 .unwrap().is_empty(),
4270 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4271 );
4272}
4273
4274#[gpui::test]
4275async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4276 init_test(cx);
4277
4278 let search_query = "file";
4279
4280 let fs = FakeFs::new(cx.executor());
4281 fs.insert_tree(
4282 "/dir",
4283 json!({
4284 "one.rs": r#"// Rust file one"#,
4285 "one.ts": r#"// TypeScript file one"#,
4286 "two.rs": r#"// Rust file two"#,
4287 "two.ts": r#"// TypeScript file two"#,
4288 }),
4289 )
4290 .await;
4291 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4292
4293 assert!(
4294 search(
4295 &project,
4296 SearchQuery::text(
4297 search_query,
4298 false,
4299 true,
4300 false,
4301 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4302 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4303 None,
4304 )
4305 .unwrap(),
4306 cx
4307 )
4308 .await
4309 .unwrap()
4310 .is_empty(),
4311 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4312 );
4313
4314 assert!(
4315 search(
4316 &project,
4317 SearchQuery::text(
4318 search_query,
4319 false,
4320 true,
4321 false,
4322 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4323 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4324 None,
4325 ).unwrap(),
4326 cx
4327 )
4328 .await
4329 .unwrap()
4330 .is_empty(),
4331 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4332 );
4333
4334 assert!(
4335 search(
4336 &project,
4337 SearchQuery::text(
4338 search_query,
4339 false,
4340 true,
4341 false,
4342 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4343 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4344 None,
4345 )
4346 .unwrap(),
4347 cx
4348 )
4349 .await
4350 .unwrap()
4351 .is_empty(),
4352 "Non-matching inclusions and exclusions should not change that."
4353 );
4354
4355 assert_eq!(
4356 search(
4357 &project,
4358 SearchQuery::text(
4359 search_query,
4360 false,
4361 true,
4362 false,
4363 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4364 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4365 None,
4366 )
4367 .unwrap(),
4368 cx
4369 )
4370 .await
4371 .unwrap(),
4372 HashMap::from_iter([
4373 ("dir/one.ts".to_string(), vec![14..18]),
4374 ("dir/two.ts".to_string(), vec![14..18]),
4375 ]),
4376 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4377 );
4378}
4379
4380#[gpui::test]
4381async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4382 init_test(cx);
4383
4384 let fs = FakeFs::new(cx.executor());
4385 fs.insert_tree(
4386 "/worktree-a",
4387 json!({
4388 "haystack.rs": r#"// NEEDLE"#,
4389 "haystack.ts": r#"// NEEDLE"#,
4390 }),
4391 )
4392 .await;
4393 fs.insert_tree(
4394 "/worktree-b",
4395 json!({
4396 "haystack.rs": r#"// NEEDLE"#,
4397 "haystack.ts": r#"// NEEDLE"#,
4398 }),
4399 )
4400 .await;
4401
4402 let project = Project::test(
4403 fs.clone(),
4404 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4405 cx,
4406 )
4407 .await;
4408
4409 assert_eq!(
4410 search(
4411 &project,
4412 SearchQuery::text(
4413 "NEEDLE",
4414 false,
4415 true,
4416 false,
4417 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4418 Default::default(),
4419 None,
4420 )
4421 .unwrap(),
4422 cx
4423 )
4424 .await
4425 .unwrap(),
4426 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4427 "should only return results from included worktree"
4428 );
4429 assert_eq!(
4430 search(
4431 &project,
4432 SearchQuery::text(
4433 "NEEDLE",
4434 false,
4435 true,
4436 false,
4437 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4438 Default::default(),
4439 None,
4440 )
4441 .unwrap(),
4442 cx
4443 )
4444 .await
4445 .unwrap(),
4446 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4447 "should only return results from included worktree"
4448 );
4449
4450 assert_eq!(
4451 search(
4452 &project,
4453 SearchQuery::text(
4454 "NEEDLE",
4455 false,
4456 true,
4457 false,
4458 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4459 Default::default(),
4460 None,
4461 )
4462 .unwrap(),
4463 cx
4464 )
4465 .await
4466 .unwrap(),
4467 HashMap::from_iter([
4468 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4469 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4470 ]),
4471 "should return results from both worktrees"
4472 );
4473}
4474
4475#[gpui::test]
4476async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4477 init_test(cx);
4478
4479 let fs = FakeFs::new(cx.background_executor.clone());
4480 fs.insert_tree(
4481 "/dir",
4482 json!({
4483 ".git": {},
4484 ".gitignore": "**/target\n/node_modules\n",
4485 "target": {
4486 "index.txt": "index_key:index_value"
4487 },
4488 "node_modules": {
4489 "eslint": {
4490 "index.ts": "const eslint_key = 'eslint value'",
4491 "package.json": r#"{ "some_key": "some value" }"#,
4492 },
4493 "prettier": {
4494 "index.ts": "const prettier_key = 'prettier value'",
4495 "package.json": r#"{ "other_key": "other value" }"#,
4496 },
4497 },
4498 "package.json": r#"{ "main_key": "main value" }"#,
4499 }),
4500 )
4501 .await;
4502 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4503
4504 let query = "key";
4505 assert_eq!(
4506 search(
4507 &project,
4508 SearchQuery::text(
4509 query,
4510 false,
4511 false,
4512 false,
4513 Default::default(),
4514 Default::default(),
4515 None,
4516 )
4517 .unwrap(),
4518 cx
4519 )
4520 .await
4521 .unwrap(),
4522 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4523 "Only one non-ignored file should have the query"
4524 );
4525
4526 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4527 assert_eq!(
4528 search(
4529 &project,
4530 SearchQuery::text(
4531 query,
4532 false,
4533 false,
4534 true,
4535 Default::default(),
4536 Default::default(),
4537 None,
4538 )
4539 .unwrap(),
4540 cx
4541 )
4542 .await
4543 .unwrap(),
4544 HashMap::from_iter([
4545 ("dir/package.json".to_string(), vec![8..11]),
4546 ("dir/target/index.txt".to_string(), vec![6..9]),
4547 (
4548 "dir/node_modules/prettier/package.json".to_string(),
4549 vec![9..12]
4550 ),
4551 (
4552 "dir/node_modules/prettier/index.ts".to_string(),
4553 vec![15..18]
4554 ),
4555 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4556 (
4557 "dir/node_modules/eslint/package.json".to_string(),
4558 vec![8..11]
4559 ),
4560 ]),
4561 "Unrestricted search with ignored directories should find every file with the query"
4562 );
4563
4564 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4565 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4566 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4567 assert_eq!(
4568 search(
4569 &project,
4570 SearchQuery::text(
4571 query,
4572 false,
4573 false,
4574 true,
4575 files_to_include,
4576 files_to_exclude,
4577 None,
4578 )
4579 .unwrap(),
4580 cx
4581 )
4582 .await
4583 .unwrap(),
4584 HashMap::from_iter([(
4585 "dir/node_modules/prettier/package.json".to_string(),
4586 vec![9..12]
4587 )]),
4588 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4589 );
4590}
4591
4592#[gpui::test]
4593async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4594 init_test(cx);
4595
4596 let fs = FakeFs::new(cx.background_executor.clone());
4597 fs.insert_tree(
4598 "/dir",
4599 json!({
4600 ".git": {},
4601 ".gitignore": "**/target\n/node_modules\n",
4602 "aaa.txt": "key:value",
4603 "bbb": {
4604 "index.txt": "index_key:index_value"
4605 },
4606 "node_modules": {
4607 "10 eleven": "key",
4608 "1 two": "key"
4609 },
4610 }),
4611 )
4612 .await;
4613 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4614
4615 let mut search = project.update(cx, |project, cx| {
4616 project.search(
4617 SearchQuery::text(
4618 "key",
4619 false,
4620 false,
4621 true,
4622 Default::default(),
4623 Default::default(),
4624 None,
4625 )
4626 .unwrap(),
4627 cx,
4628 )
4629 });
4630
4631 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4632 match search_result.unwrap() {
4633 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4634 buffer.file().unwrap().path().to_string_lossy().to_string()
4635 }),
4636 _ => panic!("Expected buffer"),
4637 }
4638 }
4639
4640 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4641 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4642 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4643 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4644 assert!(search.next().await.is_none())
4645}
4646
4647#[gpui::test]
4648async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4649 init_test(cx);
4650
4651 let fs = FakeFs::new(cx.executor().clone());
4652 fs.insert_tree(
4653 "/one/two",
4654 json!({
4655 "three": {
4656 "a.txt": "",
4657 "four": {}
4658 },
4659 "c.rs": ""
4660 }),
4661 )
4662 .await;
4663
4664 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4665 project
4666 .update(cx, |project, cx| {
4667 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4668 project.create_entry((id, "b.."), true, cx)
4669 })
4670 .await
4671 .unwrap()
4672 .to_included()
4673 .unwrap();
4674
4675 // Can't create paths outside the project
4676 let result = project
4677 .update(cx, |project, cx| {
4678 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4679 project.create_entry((id, "../../boop"), true, cx)
4680 })
4681 .await;
4682 assert!(result.is_err());
4683
4684 // Can't create paths with '..'
4685 let result = project
4686 .update(cx, |project, cx| {
4687 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4688 project.create_entry((id, "four/../beep"), true, cx)
4689 })
4690 .await;
4691 assert!(result.is_err());
4692
4693 assert_eq!(
4694 fs.paths(true),
4695 vec![
4696 PathBuf::from("/"),
4697 PathBuf::from("/one"),
4698 PathBuf::from("/one/two"),
4699 PathBuf::from("/one/two/c.rs"),
4700 PathBuf::from("/one/two/three"),
4701 PathBuf::from("/one/two/three/a.txt"),
4702 PathBuf::from("/one/two/three/b.."),
4703 PathBuf::from("/one/two/three/four"),
4704 ]
4705 );
4706
4707 // And we cannot open buffers with '..'
4708 let result = project
4709 .update(cx, |project, cx| {
4710 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4711 project.open_buffer((id, "../c.rs"), cx)
4712 })
4713 .await;
4714 assert!(result.is_err())
4715}
4716
4717#[gpui::test]
4718async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4719 init_test(cx);
4720
4721 let fs = FakeFs::new(cx.executor());
4722 fs.insert_tree(
4723 "/dir",
4724 json!({
4725 "a.tsx": "a",
4726 }),
4727 )
4728 .await;
4729
4730 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4731
4732 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4733 language_registry.add(tsx_lang());
4734 let language_server_names = [
4735 "TypeScriptServer",
4736 "TailwindServer",
4737 "ESLintServer",
4738 "NoHoverCapabilitiesServer",
4739 ];
4740 let mut language_servers = [
4741 language_registry.register_fake_lsp(
4742 "tsx",
4743 FakeLspAdapter {
4744 name: language_server_names[0],
4745 capabilities: lsp::ServerCapabilities {
4746 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4747 ..lsp::ServerCapabilities::default()
4748 },
4749 ..FakeLspAdapter::default()
4750 },
4751 ),
4752 language_registry.register_fake_lsp(
4753 "tsx",
4754 FakeLspAdapter {
4755 name: language_server_names[1],
4756 capabilities: lsp::ServerCapabilities {
4757 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4758 ..lsp::ServerCapabilities::default()
4759 },
4760 ..FakeLspAdapter::default()
4761 },
4762 ),
4763 language_registry.register_fake_lsp(
4764 "tsx",
4765 FakeLspAdapter {
4766 name: language_server_names[2],
4767 capabilities: lsp::ServerCapabilities {
4768 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4769 ..lsp::ServerCapabilities::default()
4770 },
4771 ..FakeLspAdapter::default()
4772 },
4773 ),
4774 language_registry.register_fake_lsp(
4775 "tsx",
4776 FakeLspAdapter {
4777 name: language_server_names[3],
4778 capabilities: lsp::ServerCapabilities {
4779 hover_provider: None,
4780 ..lsp::ServerCapabilities::default()
4781 },
4782 ..FakeLspAdapter::default()
4783 },
4784 ),
4785 ];
4786
4787 let buffer = project
4788 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4789 .await
4790 .unwrap();
4791 cx.executor().run_until_parked();
4792
4793 let mut servers_with_hover_requests = HashMap::default();
4794 for i in 0..language_server_names.len() {
4795 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4796 panic!(
4797 "Failed to get language server #{i} with name {}",
4798 &language_server_names[i]
4799 )
4800 });
4801 let new_server_name = new_server.server.name();
4802 assert!(
4803 !servers_with_hover_requests.contains_key(new_server_name),
4804 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4805 );
4806 let new_server_name = new_server_name.to_string();
4807 match new_server_name.as_str() {
4808 "TailwindServer" | "TypeScriptServer" => {
4809 servers_with_hover_requests.insert(
4810 new_server_name.clone(),
4811 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4812 let name = new_server_name.clone();
4813 async move {
4814 Ok(Some(lsp::Hover {
4815 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4816 format!("{name} hover"),
4817 )),
4818 range: None,
4819 }))
4820 }
4821 }),
4822 );
4823 }
4824 "ESLintServer" => {
4825 servers_with_hover_requests.insert(
4826 new_server_name,
4827 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4828 |_, _| async move { Ok(None) },
4829 ),
4830 );
4831 }
4832 "NoHoverCapabilitiesServer" => {
4833 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4834 |_, _| async move {
4835 panic!(
4836 "Should not call for hovers server with no corresponding capabilities"
4837 )
4838 },
4839 );
4840 }
4841 unexpected => panic!("Unexpected server name: {unexpected}"),
4842 }
4843 }
4844
4845 let hover_task = project.update(cx, |project, cx| {
4846 project.hover(&buffer, Point::new(0, 0), cx)
4847 });
4848 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4849 |mut hover_request| async move {
4850 hover_request
4851 .next()
4852 .await
4853 .expect("All hover requests should have been triggered")
4854 },
4855 ))
4856 .await;
4857 assert_eq!(
4858 vec!["TailwindServer hover", "TypeScriptServer hover"],
4859 hover_task
4860 .await
4861 .into_iter()
4862 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4863 .sorted()
4864 .collect::<Vec<_>>(),
4865 "Should receive hover responses from all related servers with hover capabilities"
4866 );
4867}
4868
4869#[gpui::test]
4870async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4871 init_test(cx);
4872
4873 let fs = FakeFs::new(cx.executor());
4874 fs.insert_tree(
4875 "/dir",
4876 json!({
4877 "a.ts": "a",
4878 }),
4879 )
4880 .await;
4881
4882 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4883
4884 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4885 language_registry.add(typescript_lang());
4886 let mut fake_language_servers = language_registry.register_fake_lsp(
4887 "TypeScript",
4888 FakeLspAdapter {
4889 capabilities: lsp::ServerCapabilities {
4890 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4891 ..lsp::ServerCapabilities::default()
4892 },
4893 ..FakeLspAdapter::default()
4894 },
4895 );
4896
4897 let buffer = project
4898 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4899 .await
4900 .unwrap();
4901 cx.executor().run_until_parked();
4902
4903 let fake_server = fake_language_servers
4904 .next()
4905 .await
4906 .expect("failed to get the language server");
4907
4908 let mut request_handled =
4909 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4910 Ok(Some(lsp::Hover {
4911 contents: lsp::HoverContents::Array(vec![
4912 lsp::MarkedString::String("".to_string()),
4913 lsp::MarkedString::String(" ".to_string()),
4914 lsp::MarkedString::String("\n\n\n".to_string()),
4915 ]),
4916 range: None,
4917 }))
4918 });
4919
4920 let hover_task = project.update(cx, |project, cx| {
4921 project.hover(&buffer, Point::new(0, 0), cx)
4922 });
4923 let () = request_handled
4924 .next()
4925 .await
4926 .expect("All hover requests should have been triggered");
4927 assert_eq!(
4928 Vec::<String>::new(),
4929 hover_task
4930 .await
4931 .into_iter()
4932 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4933 .sorted()
4934 .collect::<Vec<_>>(),
4935 "Empty hover parts should be ignored"
4936 );
4937}
4938
4939#[gpui::test]
4940async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4941 init_test(cx);
4942
4943 let fs = FakeFs::new(cx.executor());
4944 fs.insert_tree(
4945 "/dir",
4946 json!({
4947 "a.tsx": "a",
4948 }),
4949 )
4950 .await;
4951
4952 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4953
4954 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4955 language_registry.add(tsx_lang());
4956 let language_server_names = [
4957 "TypeScriptServer",
4958 "TailwindServer",
4959 "ESLintServer",
4960 "NoActionsCapabilitiesServer",
4961 ];
4962
4963 let mut language_server_rxs = [
4964 language_registry.register_fake_lsp(
4965 "tsx",
4966 FakeLspAdapter {
4967 name: language_server_names[0],
4968 capabilities: lsp::ServerCapabilities {
4969 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4970 ..lsp::ServerCapabilities::default()
4971 },
4972 ..FakeLspAdapter::default()
4973 },
4974 ),
4975 language_registry.register_fake_lsp(
4976 "tsx",
4977 FakeLspAdapter {
4978 name: language_server_names[1],
4979 capabilities: lsp::ServerCapabilities {
4980 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4981 ..lsp::ServerCapabilities::default()
4982 },
4983 ..FakeLspAdapter::default()
4984 },
4985 ),
4986 language_registry.register_fake_lsp(
4987 "tsx",
4988 FakeLspAdapter {
4989 name: language_server_names[2],
4990 capabilities: lsp::ServerCapabilities {
4991 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4992 ..lsp::ServerCapabilities::default()
4993 },
4994 ..FakeLspAdapter::default()
4995 },
4996 ),
4997 language_registry.register_fake_lsp(
4998 "tsx",
4999 FakeLspAdapter {
5000 name: language_server_names[3],
5001 capabilities: lsp::ServerCapabilities {
5002 code_action_provider: None,
5003 ..lsp::ServerCapabilities::default()
5004 },
5005 ..FakeLspAdapter::default()
5006 },
5007 ),
5008 ];
5009
5010 let buffer = project
5011 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5012 .await
5013 .unwrap();
5014 cx.executor().run_until_parked();
5015
5016 let mut servers_with_actions_requests = HashMap::default();
5017 for i in 0..language_server_names.len() {
5018 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5019 panic!(
5020 "Failed to get language server #{i} with name {}",
5021 &language_server_names[i]
5022 )
5023 });
5024 let new_server_name = new_server.server.name();
5025
5026 assert!(
5027 !servers_with_actions_requests.contains_key(new_server_name),
5028 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5029 );
5030 let new_server_name = new_server_name.to_string();
5031 match new_server_name.as_str() {
5032 "TailwindServer" | "TypeScriptServer" => {
5033 servers_with_actions_requests.insert(
5034 new_server_name.clone(),
5035 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5036 move |_, _| {
5037 let name = new_server_name.clone();
5038 async move {
5039 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5040 lsp::CodeAction {
5041 title: format!("{name} code action"),
5042 ..lsp::CodeAction::default()
5043 },
5044 )]))
5045 }
5046 },
5047 ),
5048 );
5049 }
5050 "ESLintServer" => {
5051 servers_with_actions_requests.insert(
5052 new_server_name,
5053 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5054 |_, _| async move { Ok(None) },
5055 ),
5056 );
5057 }
5058 "NoActionsCapabilitiesServer" => {
5059 let _never_handled = new_server
5060 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5061 panic!(
5062 "Should not call for code actions server with no corresponding capabilities"
5063 )
5064 });
5065 }
5066 unexpected => panic!("Unexpected server name: {unexpected}"),
5067 }
5068 }
5069
5070 let code_actions_task = project.update(cx, |project, cx| {
5071 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5072 });
5073
5074 // cx.run_until_parked();
5075 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5076 |mut code_actions_request| async move {
5077 code_actions_request
5078 .next()
5079 .await
5080 .expect("All code actions requests should have been triggered")
5081 },
5082 ))
5083 .await;
5084 assert_eq!(
5085 vec!["TailwindServer code action", "TypeScriptServer code action"],
5086 code_actions_task
5087 .await
5088 .unwrap()
5089 .into_iter()
5090 .map(|code_action| code_action.lsp_action.title)
5091 .sorted()
5092 .collect::<Vec<_>>(),
5093 "Should receive code actions responses from all related servers with hover capabilities"
5094 );
5095}
5096
5097#[gpui::test]
5098async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5099 init_test(cx);
5100
5101 let fs = FakeFs::new(cx.executor());
5102 fs.insert_tree(
5103 "/dir",
5104 json!({
5105 "a.rs": "let a = 1;",
5106 "b.rs": "let b = 2;",
5107 "c.rs": "let c = 2;",
5108 }),
5109 )
5110 .await;
5111
5112 let project = Project::test(
5113 fs,
5114 [
5115 "/dir/a.rs".as_ref(),
5116 "/dir/b.rs".as_ref(),
5117 "/dir/c.rs".as_ref(),
5118 ],
5119 cx,
5120 )
5121 .await;
5122
5123 // check the initial state and get the worktrees
5124 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5125 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5126 assert_eq!(worktrees.len(), 3);
5127
5128 let worktree_a = worktrees[0].read(cx);
5129 let worktree_b = worktrees[1].read(cx);
5130 let worktree_c = worktrees[2].read(cx);
5131
5132 // check they start in the right order
5133 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5134 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5135 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5136
5137 (
5138 worktrees[0].clone(),
5139 worktrees[1].clone(),
5140 worktrees[2].clone(),
5141 )
5142 });
5143
5144 // move first worktree to after the second
5145 // [a, b, c] -> [b, a, c]
5146 project
5147 .update(cx, |project, cx| {
5148 let first = worktree_a.read(cx);
5149 let second = worktree_b.read(cx);
5150 project.move_worktree(first.id(), second.id(), cx)
5151 })
5152 .expect("moving first after second");
5153
5154 // check the state after moving
5155 project.update(cx, |project, cx| {
5156 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5157 assert_eq!(worktrees.len(), 3);
5158
5159 let first = worktrees[0].read(cx);
5160 let second = worktrees[1].read(cx);
5161 let third = worktrees[2].read(cx);
5162
5163 // check they are now in the right order
5164 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5165 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5166 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5167 });
5168
5169 // move the second worktree to before the first
5170 // [b, a, c] -> [a, b, c]
5171 project
5172 .update(cx, |project, cx| {
5173 let second = worktree_a.read(cx);
5174 let first = worktree_b.read(cx);
5175 project.move_worktree(first.id(), second.id(), cx)
5176 })
5177 .expect("moving second before first");
5178
5179 // check the state after moving
5180 project.update(cx, |project, cx| {
5181 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5182 assert_eq!(worktrees.len(), 3);
5183
5184 let first = worktrees[0].read(cx);
5185 let second = worktrees[1].read(cx);
5186 let third = worktrees[2].read(cx);
5187
5188 // check they are now in the right order
5189 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5190 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5191 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5192 });
5193
5194 // move the second worktree to after the third
5195 // [a, b, c] -> [a, c, b]
5196 project
5197 .update(cx, |project, cx| {
5198 let second = worktree_b.read(cx);
5199 let third = worktree_c.read(cx);
5200 project.move_worktree(second.id(), third.id(), cx)
5201 })
5202 .expect("moving second after third");
5203
5204 // check the state after moving
5205 project.update(cx, |project, cx| {
5206 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5207 assert_eq!(worktrees.len(), 3);
5208
5209 let first = worktrees[0].read(cx);
5210 let second = worktrees[1].read(cx);
5211 let third = worktrees[2].read(cx);
5212
5213 // check they are now in the right order
5214 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5215 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5216 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5217 });
5218
5219 // move the third worktree to before the second
5220 // [a, c, b] -> [a, b, c]
5221 project
5222 .update(cx, |project, cx| {
5223 let third = worktree_c.read(cx);
5224 let second = worktree_b.read(cx);
5225 project.move_worktree(third.id(), second.id(), cx)
5226 })
5227 .expect("moving third before second");
5228
5229 // check the state after moving
5230 project.update(cx, |project, cx| {
5231 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5232 assert_eq!(worktrees.len(), 3);
5233
5234 let first = worktrees[0].read(cx);
5235 let second = worktrees[1].read(cx);
5236 let third = worktrees[2].read(cx);
5237
5238 // check they are now in the right order
5239 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5240 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5241 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5242 });
5243
5244 // move the first worktree to after the third
5245 // [a, b, c] -> [b, c, a]
5246 project
5247 .update(cx, |project, cx| {
5248 let first = worktree_a.read(cx);
5249 let third = worktree_c.read(cx);
5250 project.move_worktree(first.id(), third.id(), cx)
5251 })
5252 .expect("moving first after third");
5253
5254 // check the state after moving
5255 project.update(cx, |project, cx| {
5256 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5257 assert_eq!(worktrees.len(), 3);
5258
5259 let first = worktrees[0].read(cx);
5260 let second = worktrees[1].read(cx);
5261 let third = worktrees[2].read(cx);
5262
5263 // check they are now in the right order
5264 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5265 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5266 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5267 });
5268
5269 // move the third worktree to before the first
5270 // [b, c, a] -> [a, b, c]
5271 project
5272 .update(cx, |project, cx| {
5273 let third = worktree_a.read(cx);
5274 let first = worktree_b.read(cx);
5275 project.move_worktree(third.id(), first.id(), cx)
5276 })
5277 .expect("moving third before first");
5278
5279 // check the state after moving
5280 project.update(cx, |project, cx| {
5281 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5282 assert_eq!(worktrees.len(), 3);
5283
5284 let first = worktrees[0].read(cx);
5285 let second = worktrees[1].read(cx);
5286 let third = worktrees[2].read(cx);
5287
5288 // check they are now in the right order
5289 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5290 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5291 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5292 });
5293}
5294
5295async fn search(
5296 project: &Model<Project>,
5297 query: SearchQuery,
5298 cx: &mut gpui::TestAppContext,
5299) -> Result<HashMap<String, Vec<Range<usize>>>> {
5300 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5301 let mut results = HashMap::default();
5302 while let Some(search_result) = search_rx.next().await {
5303 match search_result {
5304 SearchResult::Buffer { buffer, ranges } => {
5305 results.entry(buffer).or_insert(ranges);
5306 }
5307 SearchResult::LimitReached => {}
5308 }
5309 }
5310 Ok(results
5311 .into_iter()
5312 .map(|(buffer, ranges)| {
5313 buffer.update(cx, |buffer, cx| {
5314 let path = buffer
5315 .file()
5316 .unwrap()
5317 .full_path(cx)
5318 .to_string_lossy()
5319 .to_string();
5320 let ranges = ranges
5321 .into_iter()
5322 .map(|range| range.to_offset(buffer))
5323 .collect::<Vec<_>>();
5324 (path, ranges)
5325 })
5326 })
5327 .collect())
5328}
5329
5330pub fn init_test(cx: &mut gpui::TestAppContext) {
5331 if std::env::var("RUST_LOG").is_ok() {
5332 env_logger::try_init().ok();
5333 }
5334
5335 cx.update(|cx| {
5336 let settings_store = SettingsStore::test(cx);
5337 cx.set_global(settings_store);
5338 release_channel::init(SemanticVersion::default(), cx);
5339 language::init(cx);
5340 Project::init_settings(cx);
5341 });
5342}
5343
5344fn json_lang() -> Arc<Language> {
5345 Arc::new(Language::new(
5346 LanguageConfig {
5347 name: "JSON".into(),
5348 matcher: LanguageMatcher {
5349 path_suffixes: vec!["json".to_string()],
5350 ..Default::default()
5351 },
5352 ..Default::default()
5353 },
5354 None,
5355 ))
5356}
5357
5358fn js_lang() -> Arc<Language> {
5359 Arc::new(Language::new(
5360 LanguageConfig {
5361 name: "JavaScript".into(),
5362 matcher: LanguageMatcher {
5363 path_suffixes: vec!["js".to_string()],
5364 ..Default::default()
5365 },
5366 ..Default::default()
5367 },
5368 None,
5369 ))
5370}
5371
5372fn rust_lang() -> Arc<Language> {
5373 Arc::new(Language::new(
5374 LanguageConfig {
5375 name: "Rust".into(),
5376 matcher: LanguageMatcher {
5377 path_suffixes: vec!["rs".to_string()],
5378 ..Default::default()
5379 },
5380 ..Default::default()
5381 },
5382 Some(tree_sitter_rust::LANGUAGE.into()),
5383 ))
5384}
5385
5386fn typescript_lang() -> Arc<Language> {
5387 Arc::new(Language::new(
5388 LanguageConfig {
5389 name: "TypeScript".into(),
5390 matcher: LanguageMatcher {
5391 path_suffixes: vec!["ts".to_string()],
5392 ..Default::default()
5393 },
5394 ..Default::default()
5395 },
5396 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5397 ))
5398}
5399
5400fn tsx_lang() -> Arc<Language> {
5401 Arc::new(Language::new(
5402 LanguageConfig {
5403 name: "tsx".into(),
5404 matcher: LanguageMatcher {
5405 path_suffixes: vec!["tsx".to_string()],
5406 ..Default::default()
5407 },
5408 ..Default::default()
5409 },
5410 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5411 ))
5412}
5413
5414fn get_all_tasks(
5415 project: &Model<Project>,
5416 worktree_id: Option<WorktreeId>,
5417 task_context: &TaskContext,
5418 cx: &mut AppContext,
5419) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5420 let resolved_tasks = project.update(cx, |project, cx| {
5421 project
5422 .task_inventory()
5423 .read(cx)
5424 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5425 });
5426
5427 cx.spawn(|_| async move {
5428 let (mut old, new) = resolved_tasks.await;
5429 old.extend(new);
5430 old
5431 })
5432}