1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::NumberOrString;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/tmp").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128 let task_context = TaskContext::default();
129
130 cx.executor().run_until_parked();
131 let worktree_id = cx.update(|cx| {
132 project.update(cx, |project, cx| {
133 project.worktrees().next().unwrap().read(cx).id()
134 })
135 });
136 let global_task_source_kind = TaskSourceKind::Worktree {
137 id: worktree_id,
138 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
139 id_base: "local_tasks_for_worktree".into(),
140 };
141
142 let all_tasks = cx
143 .update(|cx| {
144 let tree = worktree.read(cx);
145
146 let settings_a = language_settings(
147 None,
148 Some(
149 &(File::for_entry(
150 tree.entry_for_path("a/a.rs").unwrap().clone(),
151 worktree.clone(),
152 ) as _),
153 ),
154 cx,
155 );
156 let settings_b = language_settings(
157 None,
158 Some(
159 &(File::for_entry(
160 tree.entry_for_path("b/b.rs").unwrap().clone(),
161 worktree.clone(),
162 ) as _),
163 ),
164 cx,
165 );
166
167 assert_eq!(settings_a.tab_size.get(), 8);
168 assert_eq!(settings_b.tab_size.get(), 2);
169
170 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
171 })
172 .await
173 .into_iter()
174 .map(|(source_kind, task)| {
175 let resolved = task.resolved.unwrap();
176 (
177 source_kind,
178 task.resolved_label,
179 resolved.args,
180 resolved.env,
181 )
182 })
183 .collect::<Vec<_>>();
184 assert_eq!(
185 all_tasks,
186 vec![
187 (
188 global_task_source_kind.clone(),
189 "cargo check".to_string(),
190 vec!["check".to_string(), "--all".to_string()],
191 HashMap::default(),
192 ),
193 (
194 TaskSourceKind::Worktree {
195 id: worktree_id,
196 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
197 id_base: "local_tasks_for_worktree".into(),
198 },
199 "cargo check".to_string(),
200 vec!["check".to_string()],
201 HashMap::default(),
202 ),
203 ]
204 );
205
206 let (_, resolved_task) = cx
207 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
208 .await
209 .into_iter()
210 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
211 .expect("should have one global task");
212 project.update(cx, |project, cx| {
213 project.task_inventory().update(cx, |inventory, _| {
214 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
215 });
216 });
217
218 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
219 label: "cargo check".to_string(),
220 command: "cargo".to_string(),
221 args: vec![
222 "check".to_string(),
223 "--all".to_string(),
224 "--all-targets".to_string(),
225 ],
226 env: HashMap::from_iter(Some((
227 "RUSTFLAGS".to_string(),
228 "-Zunstable-options".to_string(),
229 ))),
230 ..TaskTemplate::default()
231 }]))
232 .unwrap();
233 let (tx, rx) = futures::channel::mpsc::unbounded();
234 cx.update(|cx| {
235 project.update(cx, |project, cx| {
236 project.task_inventory().update(cx, |inventory, cx| {
237 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
238 inventory.add_source(
239 global_task_source_kind.clone(),
240 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
241 cx,
242 );
243 });
244 })
245 });
246 tx.unbounded_send(tasks).unwrap();
247
248 cx.run_until_parked();
249 let all_tasks = cx
250 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
251 .await
252 .into_iter()
253 .map(|(source_kind, task)| {
254 let resolved = task.resolved.unwrap();
255 (
256 source_kind,
257 task.resolved_label,
258 resolved.args,
259 resolved.env,
260 )
261 })
262 .collect::<Vec<_>>();
263 assert_eq!(
264 all_tasks,
265 vec![
266 (
267 TaskSourceKind::Worktree {
268 id: worktree_id,
269 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
270 id_base: "local_tasks_for_worktree".into(),
271 },
272 "cargo check".to_string(),
273 vec![
274 "check".to_string(),
275 "--all".to_string(),
276 "--all-targets".to_string()
277 ],
278 HashMap::from_iter(Some((
279 "RUSTFLAGS".to_string(),
280 "-Zunstable-options".to_string()
281 ))),
282 ),
283 (
284 TaskSourceKind::Worktree {
285 id: worktree_id,
286 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
287 id_base: "local_tasks_for_worktree".into(),
288 },
289 "cargo check".to_string(),
290 vec!["check".to_string()],
291 HashMap::default(),
292 ),
293 ]
294 );
295}
296
297#[gpui::test]
298async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
299 init_test(cx);
300
301 let fs = FakeFs::new(cx.executor());
302 fs.insert_tree(
303 "/the-root",
304 json!({
305 "test.rs": "const A: i32 = 1;",
306 "test2.rs": "",
307 "Cargo.toml": "a = 1",
308 "package.json": "{\"a\": 1}",
309 }),
310 )
311 .await;
312
313 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
315
316 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
317 "Rust",
318 FakeLspAdapter {
319 name: "the-rust-language-server",
320 capabilities: lsp::ServerCapabilities {
321 completion_provider: Some(lsp::CompletionOptions {
322 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
323 ..Default::default()
324 }),
325 ..Default::default()
326 },
327 ..Default::default()
328 },
329 );
330 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
331 "JSON",
332 FakeLspAdapter {
333 name: "the-json-language-server",
334 capabilities: lsp::ServerCapabilities {
335 completion_provider: Some(lsp::CompletionOptions {
336 trigger_characters: Some(vec![":".to_string()]),
337 ..Default::default()
338 }),
339 ..Default::default()
340 },
341 ..Default::default()
342 },
343 );
344
345 // Open a buffer without an associated language server.
346 let toml_buffer = project
347 .update(cx, |project, cx| {
348 project.open_local_buffer("/the-root/Cargo.toml", cx)
349 })
350 .await
351 .unwrap();
352
353 // Open a buffer with an associated language server before the language for it has been loaded.
354 let rust_buffer = project
355 .update(cx, |project, cx| {
356 project.open_local_buffer("/the-root/test.rs", cx)
357 })
358 .await
359 .unwrap();
360 rust_buffer.update(cx, |buffer, _| {
361 assert_eq!(buffer.language().map(|l| l.name()), None);
362 });
363
364 // Now we add the languages to the project, and ensure they get assigned to all
365 // the relevant open buffers.
366 language_registry.add(json_lang());
367 language_registry.add(rust_lang());
368 cx.executor().run_until_parked();
369 rust_buffer.update(cx, |buffer, _| {
370 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
371 });
372
373 // A server is started up, and it is notified about Rust files.
374 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
375 assert_eq!(
376 fake_rust_server
377 .receive_notification::<lsp::notification::DidOpenTextDocument>()
378 .await
379 .text_document,
380 lsp::TextDocumentItem {
381 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
382 version: 0,
383 text: "const A: i32 = 1;".to_string(),
384 language_id: "rust".to_string(),
385 }
386 );
387
388 // The buffer is configured based on the language server's capabilities.
389 rust_buffer.update(cx, |buffer, _| {
390 assert_eq!(
391 buffer.completion_triggers(),
392 &[".".to_string(), "::".to_string()]
393 );
394 });
395 toml_buffer.update(cx, |buffer, _| {
396 assert!(buffer.completion_triggers().is_empty());
397 });
398
399 // Edit a buffer. The changes are reported to the language server.
400 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
401 assert_eq!(
402 fake_rust_server
403 .receive_notification::<lsp::notification::DidChangeTextDocument>()
404 .await
405 .text_document,
406 lsp::VersionedTextDocumentIdentifier::new(
407 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
408 1
409 )
410 );
411
412 // Open a third buffer with a different associated language server.
413 let json_buffer = project
414 .update(cx, |project, cx| {
415 project.open_local_buffer("/the-root/package.json", cx)
416 })
417 .await
418 .unwrap();
419
420 // A json language server is started up and is only notified about the json buffer.
421 let mut fake_json_server = fake_json_servers.next().await.unwrap();
422 assert_eq!(
423 fake_json_server
424 .receive_notification::<lsp::notification::DidOpenTextDocument>()
425 .await
426 .text_document,
427 lsp::TextDocumentItem {
428 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
429 version: 0,
430 text: "{\"a\": 1}".to_string(),
431 language_id: "json".to_string(),
432 }
433 );
434
435 // This buffer is configured based on the second language server's
436 // capabilities.
437 json_buffer.update(cx, |buffer, _| {
438 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
439 });
440
441 // When opening another buffer whose language server is already running,
442 // it is also configured based on the existing language server's capabilities.
443 let rust_buffer2 = project
444 .update(cx, |project, cx| {
445 project.open_local_buffer("/the-root/test2.rs", cx)
446 })
447 .await
448 .unwrap();
449 rust_buffer2.update(cx, |buffer, _| {
450 assert_eq!(
451 buffer.completion_triggers(),
452 &[".".to_string(), "::".to_string()]
453 );
454 });
455
456 // Changes are reported only to servers matching the buffer's language.
457 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
458 rust_buffer2.update(cx, |buffer, cx| {
459 buffer.edit([(0..0, "let x = 1;")], None, cx)
460 });
461 assert_eq!(
462 fake_rust_server
463 .receive_notification::<lsp::notification::DidChangeTextDocument>()
464 .await
465 .text_document,
466 lsp::VersionedTextDocumentIdentifier::new(
467 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
468 1
469 )
470 );
471
472 // Save notifications are reported to all servers.
473 project
474 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
475 .await
476 .unwrap();
477 assert_eq!(
478 fake_rust_server
479 .receive_notification::<lsp::notification::DidSaveTextDocument>()
480 .await
481 .text_document,
482 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
483 );
484 assert_eq!(
485 fake_json_server
486 .receive_notification::<lsp::notification::DidSaveTextDocument>()
487 .await
488 .text_document,
489 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
490 );
491
492 // Renames are reported only to servers matching the buffer's language.
493 fs.rename(
494 Path::new("/the-root/test2.rs"),
495 Path::new("/the-root/test3.rs"),
496 Default::default(),
497 )
498 .await
499 .unwrap();
500 assert_eq!(
501 fake_rust_server
502 .receive_notification::<lsp::notification::DidCloseTextDocument>()
503 .await
504 .text_document,
505 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
506 );
507 assert_eq!(
508 fake_rust_server
509 .receive_notification::<lsp::notification::DidOpenTextDocument>()
510 .await
511 .text_document,
512 lsp::TextDocumentItem {
513 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
514 version: 0,
515 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
516 language_id: "rust".to_string(),
517 },
518 );
519
520 rust_buffer2.update(cx, |buffer, cx| {
521 buffer.update_diagnostics(
522 LanguageServerId(0),
523 DiagnosticSet::from_sorted_entries(
524 vec![DiagnosticEntry {
525 diagnostic: Default::default(),
526 range: Anchor::MIN..Anchor::MAX,
527 }],
528 &buffer.snapshot(),
529 ),
530 cx,
531 );
532 assert_eq!(
533 buffer
534 .snapshot()
535 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
536 .count(),
537 1
538 );
539 });
540
541 // When the rename changes the extension of the file, the buffer gets closed on the old
542 // language server and gets opened on the new one.
543 fs.rename(
544 Path::new("/the-root/test3.rs"),
545 Path::new("/the-root/test3.json"),
546 Default::default(),
547 )
548 .await
549 .unwrap();
550 assert_eq!(
551 fake_rust_server
552 .receive_notification::<lsp::notification::DidCloseTextDocument>()
553 .await
554 .text_document,
555 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
556 );
557 assert_eq!(
558 fake_json_server
559 .receive_notification::<lsp::notification::DidOpenTextDocument>()
560 .await
561 .text_document,
562 lsp::TextDocumentItem {
563 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
564 version: 0,
565 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
566 language_id: "json".to_string(),
567 },
568 );
569
570 // We clear the diagnostics, since the language has changed.
571 rust_buffer2.update(cx, |buffer, _| {
572 assert_eq!(
573 buffer
574 .snapshot()
575 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
576 .count(),
577 0
578 );
579 });
580
581 // The renamed file's version resets after changing language server.
582 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
583 assert_eq!(
584 fake_json_server
585 .receive_notification::<lsp::notification::DidChangeTextDocument>()
586 .await
587 .text_document,
588 lsp::VersionedTextDocumentIdentifier::new(
589 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
590 1
591 )
592 );
593
594 // Restart language servers
595 project.update(cx, |project, cx| {
596 project.restart_language_servers_for_buffers(
597 vec![rust_buffer.clone(), json_buffer.clone()],
598 cx,
599 );
600 });
601
602 let mut rust_shutdown_requests = fake_rust_server
603 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
604 let mut json_shutdown_requests = fake_json_server
605 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
606 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
607
608 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
609 let mut fake_json_server = fake_json_servers.next().await.unwrap();
610
611 // Ensure rust document is reopened in new rust language server
612 assert_eq!(
613 fake_rust_server
614 .receive_notification::<lsp::notification::DidOpenTextDocument>()
615 .await
616 .text_document,
617 lsp::TextDocumentItem {
618 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
619 version: 0,
620 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
621 language_id: "rust".to_string(),
622 }
623 );
624
625 // Ensure json documents are reopened in new json language server
626 assert_set_eq!(
627 [
628 fake_json_server
629 .receive_notification::<lsp::notification::DidOpenTextDocument>()
630 .await
631 .text_document,
632 fake_json_server
633 .receive_notification::<lsp::notification::DidOpenTextDocument>()
634 .await
635 .text_document,
636 ],
637 [
638 lsp::TextDocumentItem {
639 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
640 version: 0,
641 text: json_buffer.update(cx, |buffer, _| buffer.text()),
642 language_id: "json".to_string(),
643 },
644 lsp::TextDocumentItem {
645 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
646 version: 0,
647 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
648 language_id: "json".to_string(),
649 }
650 ]
651 );
652
653 // Close notifications are reported only to servers matching the buffer's language.
654 cx.update(|_| drop(json_buffer));
655 let close_message = lsp::DidCloseTextDocumentParams {
656 text_document: lsp::TextDocumentIdentifier::new(
657 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
658 ),
659 };
660 assert_eq!(
661 fake_json_server
662 .receive_notification::<lsp::notification::DidCloseTextDocument>()
663 .await,
664 close_message,
665 );
666}
667
668#[gpui::test]
669async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
670 init_test(cx);
671
672 let fs = FakeFs::new(cx.executor());
673 fs.insert_tree(
674 "/the-root",
675 json!({
676 ".gitignore": "target\n",
677 "src": {
678 "a.rs": "",
679 "b.rs": "",
680 },
681 "target": {
682 "x": {
683 "out": {
684 "x.rs": ""
685 }
686 },
687 "y": {
688 "out": {
689 "y.rs": "",
690 }
691 },
692 "z": {
693 "out": {
694 "z.rs": ""
695 }
696 }
697 }
698 }),
699 )
700 .await;
701
702 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
704 language_registry.add(rust_lang());
705 let mut fake_servers = language_registry.register_fake_lsp_adapter(
706 "Rust",
707 FakeLspAdapter {
708 name: "the-language-server",
709 ..Default::default()
710 },
711 );
712
713 cx.executor().run_until_parked();
714
715 // Start the language server by opening a buffer with a compatible file extension.
716 let _buffer = project
717 .update(cx, |project, cx| {
718 project.open_local_buffer("/the-root/src/a.rs", cx)
719 })
720 .await
721 .unwrap();
722
723 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
724 project.update(cx, |project, cx| {
725 let worktree = project.worktrees().next().unwrap();
726 assert_eq!(
727 worktree
728 .read(cx)
729 .snapshot()
730 .entries(true, 0)
731 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
732 .collect::<Vec<_>>(),
733 &[
734 (Path::new(""), false),
735 (Path::new(".gitignore"), false),
736 (Path::new("src"), false),
737 (Path::new("src/a.rs"), false),
738 (Path::new("src/b.rs"), false),
739 (Path::new("target"), true),
740 ]
741 );
742 });
743
744 let prev_read_dir_count = fs.read_dir_call_count();
745
746 // Keep track of the FS events reported to the language server.
747 let fake_server = fake_servers.next().await.unwrap();
748 let file_changes = Arc::new(Mutex::new(Vec::new()));
749 fake_server
750 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
751 registrations: vec![lsp::Registration {
752 id: Default::default(),
753 method: "workspace/didChangeWatchedFiles".to_string(),
754 register_options: serde_json::to_value(
755 lsp::DidChangeWatchedFilesRegistrationOptions {
756 watchers: vec![
757 lsp::FileSystemWatcher {
758 glob_pattern: lsp::GlobPattern::String(
759 "/the-root/Cargo.toml".to_string(),
760 ),
761 kind: None,
762 },
763 lsp::FileSystemWatcher {
764 glob_pattern: lsp::GlobPattern::String(
765 "/the-root/src/*.{rs,c}".to_string(),
766 ),
767 kind: None,
768 },
769 lsp::FileSystemWatcher {
770 glob_pattern: lsp::GlobPattern::String(
771 "/the-root/target/y/**/*.rs".to_string(),
772 ),
773 kind: None,
774 },
775 ],
776 },
777 )
778 .ok(),
779 }],
780 })
781 .await
782 .unwrap();
783 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
784 let file_changes = file_changes.clone();
785 move |params, _| {
786 let mut file_changes = file_changes.lock();
787 file_changes.extend(params.changes);
788 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
789 }
790 });
791
792 cx.executor().run_until_parked();
793 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
794 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
795
796 // Now the language server has asked us to watch an ignored directory path,
797 // so we recursively load it.
798 project.update(cx, |project, cx| {
799 let worktree = project.worktrees().next().unwrap();
800 assert_eq!(
801 worktree
802 .read(cx)
803 .snapshot()
804 .entries(true, 0)
805 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
806 .collect::<Vec<_>>(),
807 &[
808 (Path::new(""), false),
809 (Path::new(".gitignore"), false),
810 (Path::new("src"), false),
811 (Path::new("src/a.rs"), false),
812 (Path::new("src/b.rs"), false),
813 (Path::new("target"), true),
814 (Path::new("target/x"), true),
815 (Path::new("target/y"), true),
816 (Path::new("target/y/out"), true),
817 (Path::new("target/y/out/y.rs"), true),
818 (Path::new("target/z"), true),
819 ]
820 );
821 });
822
823 // Perform some file system mutations, two of which match the watched patterns,
824 // and one of which does not.
825 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
826 .await
827 .unwrap();
828 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
829 .await
830 .unwrap();
831 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
832 .await
833 .unwrap();
834 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
835 .await
836 .unwrap();
837 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
838 .await
839 .unwrap();
840
841 // The language server receives events for the FS mutations that match its watch patterns.
842 cx.executor().run_until_parked();
843 assert_eq!(
844 &*file_changes.lock(),
845 &[
846 lsp::FileEvent {
847 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
848 typ: lsp::FileChangeType::DELETED,
849 },
850 lsp::FileEvent {
851 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
852 typ: lsp::FileChangeType::CREATED,
853 },
854 lsp::FileEvent {
855 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
856 typ: lsp::FileChangeType::CREATED,
857 },
858 ]
859 );
860}
861
862#[gpui::test]
863async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
864 init_test(cx);
865
866 let fs = FakeFs::new(cx.executor());
867 fs.insert_tree(
868 "/dir",
869 json!({
870 "a.rs": "let a = 1;",
871 "b.rs": "let b = 2;"
872 }),
873 )
874 .await;
875
876 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
877
878 let buffer_a = project
879 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
880 .await
881 .unwrap();
882 let buffer_b = project
883 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
884 .await
885 .unwrap();
886
887 project.update(cx, |project, cx| {
888 project
889 .update_diagnostics(
890 LanguageServerId(0),
891 lsp::PublishDiagnosticsParams {
892 uri: Url::from_file_path("/dir/a.rs").unwrap(),
893 version: None,
894 diagnostics: vec![lsp::Diagnostic {
895 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
896 severity: Some(lsp::DiagnosticSeverity::ERROR),
897 message: "error 1".to_string(),
898 ..Default::default()
899 }],
900 },
901 &[],
902 cx,
903 )
904 .unwrap();
905 project
906 .update_diagnostics(
907 LanguageServerId(0),
908 lsp::PublishDiagnosticsParams {
909 uri: Url::from_file_path("/dir/b.rs").unwrap(),
910 version: None,
911 diagnostics: vec![lsp::Diagnostic {
912 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
913 severity: Some(lsp::DiagnosticSeverity::WARNING),
914 message: "error 2".to_string(),
915 ..Default::default()
916 }],
917 },
918 &[],
919 cx,
920 )
921 .unwrap();
922 });
923
924 buffer_a.update(cx, |buffer, _| {
925 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
926 assert_eq!(
927 chunks
928 .iter()
929 .map(|(s, d)| (s.as_str(), *d))
930 .collect::<Vec<_>>(),
931 &[
932 ("let ", None),
933 ("a", Some(DiagnosticSeverity::ERROR)),
934 (" = 1;", None),
935 ]
936 );
937 });
938 buffer_b.update(cx, |buffer, _| {
939 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
940 assert_eq!(
941 chunks
942 .iter()
943 .map(|(s, d)| (s.as_str(), *d))
944 .collect::<Vec<_>>(),
945 &[
946 ("let ", None),
947 ("b", Some(DiagnosticSeverity::WARNING)),
948 (" = 2;", None),
949 ]
950 );
951 });
952}
953
954#[gpui::test]
955async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
956 init_test(cx);
957
958 let fs = FakeFs::new(cx.executor());
959 fs.insert_tree(
960 "/root",
961 json!({
962 "dir": {
963 ".git": {
964 "HEAD": "ref: refs/heads/main",
965 },
966 ".gitignore": "b.rs",
967 "a.rs": "let a = 1;",
968 "b.rs": "let b = 2;",
969 },
970 "other.rs": "let b = c;"
971 }),
972 )
973 .await;
974
975 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
976 let (worktree, _) = project
977 .update(cx, |project, cx| {
978 project.find_or_create_local_worktree("/root/dir", true, cx)
979 })
980 .await
981 .unwrap();
982 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
983
984 let (worktree, _) = project
985 .update(cx, |project, cx| {
986 project.find_or_create_local_worktree("/root/other.rs", false, cx)
987 })
988 .await
989 .unwrap();
990 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
991
992 let server_id = LanguageServerId(0);
993 project.update(cx, |project, cx| {
994 project
995 .update_diagnostics(
996 server_id,
997 lsp::PublishDiagnosticsParams {
998 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
999 version: None,
1000 diagnostics: vec![lsp::Diagnostic {
1001 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1002 severity: Some(lsp::DiagnosticSeverity::ERROR),
1003 message: "unused variable 'b'".to_string(),
1004 ..Default::default()
1005 }],
1006 },
1007 &[],
1008 cx,
1009 )
1010 .unwrap();
1011 project
1012 .update_diagnostics(
1013 server_id,
1014 lsp::PublishDiagnosticsParams {
1015 uri: Url::from_file_path("/root/other.rs").unwrap(),
1016 version: None,
1017 diagnostics: vec![lsp::Diagnostic {
1018 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1019 severity: Some(lsp::DiagnosticSeverity::ERROR),
1020 message: "unknown variable 'c'".to_string(),
1021 ..Default::default()
1022 }],
1023 },
1024 &[],
1025 cx,
1026 )
1027 .unwrap();
1028 });
1029
1030 let main_ignored_buffer = project
1031 .update(cx, |project, cx| {
1032 project.open_buffer((main_worktree_id, "b.rs"), cx)
1033 })
1034 .await
1035 .unwrap();
1036 main_ignored_buffer.update(cx, |buffer, _| {
1037 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1038 assert_eq!(
1039 chunks
1040 .iter()
1041 .map(|(s, d)| (s.as_str(), *d))
1042 .collect::<Vec<_>>(),
1043 &[
1044 ("let ", None),
1045 ("b", Some(DiagnosticSeverity::ERROR)),
1046 (" = 2;", None),
1047 ],
1048 "Gigitnored buffers should still get in-buffer diagnostics",
1049 );
1050 });
1051 let other_buffer = project
1052 .update(cx, |project, cx| {
1053 project.open_buffer((other_worktree_id, ""), cx)
1054 })
1055 .await
1056 .unwrap();
1057 other_buffer.update(cx, |buffer, _| {
1058 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1059 assert_eq!(
1060 chunks
1061 .iter()
1062 .map(|(s, d)| (s.as_str(), *d))
1063 .collect::<Vec<_>>(),
1064 &[
1065 ("let b = ", None),
1066 ("c", Some(DiagnosticSeverity::ERROR)),
1067 (";", None),
1068 ],
1069 "Buffers from hidden projects should still get in-buffer diagnostics"
1070 );
1071 });
1072
1073 project.update(cx, |project, cx| {
1074 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1075 assert_eq!(
1076 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1077 vec![(
1078 ProjectPath {
1079 worktree_id: main_worktree_id,
1080 path: Arc::from(Path::new("b.rs")),
1081 },
1082 server_id,
1083 DiagnosticSummary {
1084 error_count: 1,
1085 warning_count: 0,
1086 }
1087 )]
1088 );
1089 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1090 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1091 });
1092}
1093
1094#[gpui::test]
1095async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1096 init_test(cx);
1097
1098 let progress_token = "the-progress-token";
1099
1100 let fs = FakeFs::new(cx.executor());
1101 fs.insert_tree(
1102 "/dir",
1103 json!({
1104 "a.rs": "fn a() { A }",
1105 "b.rs": "const y: i32 = 1",
1106 }),
1107 )
1108 .await;
1109
1110 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1111 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1112
1113 language_registry.add(rust_lang());
1114 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1115 "Rust",
1116 FakeLspAdapter {
1117 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1118 disk_based_diagnostics_sources: vec!["disk".into()],
1119 ..Default::default()
1120 },
1121 );
1122
1123 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1124
1125 // Cause worktree to start the fake language server
1126 let _buffer = project
1127 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1128 .await
1129 .unwrap();
1130
1131 let mut events = cx.events(&project);
1132
1133 let fake_server = fake_servers.next().await.unwrap();
1134 assert_eq!(
1135 events.next().await.unwrap(),
1136 Event::LanguageServerAdded(LanguageServerId(0)),
1137 );
1138
1139 fake_server
1140 .start_progress(format!("{}/0", progress_token))
1141 .await;
1142 assert_eq!(
1143 events.next().await.unwrap(),
1144 Event::DiskBasedDiagnosticsStarted {
1145 language_server_id: LanguageServerId(0),
1146 }
1147 );
1148
1149 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1150 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1151 version: None,
1152 diagnostics: vec![lsp::Diagnostic {
1153 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1154 severity: Some(lsp::DiagnosticSeverity::ERROR),
1155 message: "undefined variable 'A'".to_string(),
1156 ..Default::default()
1157 }],
1158 });
1159 assert_eq!(
1160 events.next().await.unwrap(),
1161 Event::DiagnosticsUpdated {
1162 language_server_id: LanguageServerId(0),
1163 path: (worktree_id, Path::new("a.rs")).into()
1164 }
1165 );
1166
1167 fake_server.end_progress(format!("{}/0", progress_token));
1168 assert_eq!(
1169 events.next().await.unwrap(),
1170 Event::DiskBasedDiagnosticsFinished {
1171 language_server_id: LanguageServerId(0)
1172 }
1173 );
1174
1175 let buffer = project
1176 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1177 .await
1178 .unwrap();
1179
1180 buffer.update(cx, |buffer, _| {
1181 let snapshot = buffer.snapshot();
1182 let diagnostics = snapshot
1183 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1184 .collect::<Vec<_>>();
1185 assert_eq!(
1186 diagnostics,
1187 &[DiagnosticEntry {
1188 range: Point::new(0, 9)..Point::new(0, 10),
1189 diagnostic: Diagnostic {
1190 severity: lsp::DiagnosticSeverity::ERROR,
1191 message: "undefined variable 'A'".to_string(),
1192 group_id: 0,
1193 is_primary: true,
1194 ..Default::default()
1195 }
1196 }]
1197 )
1198 });
1199
1200 // Ensure publishing empty diagnostics twice only results in one update event.
1201 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1202 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1203 version: None,
1204 diagnostics: Default::default(),
1205 });
1206 assert_eq!(
1207 events.next().await.unwrap(),
1208 Event::DiagnosticsUpdated {
1209 language_server_id: LanguageServerId(0),
1210 path: (worktree_id, Path::new("a.rs")).into()
1211 }
1212 );
1213
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: None,
1217 diagnostics: Default::default(),
1218 });
1219 cx.executor().run_until_parked();
1220 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1221}
1222
1223#[gpui::test]
1224async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1225 init_test(cx);
1226
1227 let progress_token = "the-progress-token";
1228
1229 let fs = FakeFs::new(cx.executor());
1230 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1231
1232 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1233
1234 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1235 language_registry.add(rust_lang());
1236 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1237 "Rust",
1238 FakeLspAdapter {
1239 name: "the-language-server",
1240 disk_based_diagnostics_sources: vec!["disk".into()],
1241 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1242 ..Default::default()
1243 },
1244 );
1245
1246 let buffer = project
1247 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1248 .await
1249 .unwrap();
1250
1251 // Simulate diagnostics starting to update.
1252 let fake_server = fake_servers.next().await.unwrap();
1253 fake_server.start_progress(progress_token).await;
1254
1255 // Restart the server before the diagnostics finish updating.
1256 project.update(cx, |project, cx| {
1257 project.restart_language_servers_for_buffers([buffer], cx);
1258 });
1259 let mut events = cx.events(&project);
1260
1261 // Simulate the newly started server sending more diagnostics.
1262 let fake_server = fake_servers.next().await.unwrap();
1263 assert_eq!(
1264 events.next().await.unwrap(),
1265 Event::LanguageServerAdded(LanguageServerId(1))
1266 );
1267 fake_server.start_progress(progress_token).await;
1268 assert_eq!(
1269 events.next().await.unwrap(),
1270 Event::DiskBasedDiagnosticsStarted {
1271 language_server_id: LanguageServerId(1)
1272 }
1273 );
1274 project.update(cx, |project, _| {
1275 assert_eq!(
1276 project
1277 .language_servers_running_disk_based_diagnostics()
1278 .collect::<Vec<_>>(),
1279 [LanguageServerId(1)]
1280 );
1281 });
1282
1283 // All diagnostics are considered done, despite the old server's diagnostic
1284 // task never completing.
1285 fake_server.end_progress(progress_token);
1286 assert_eq!(
1287 events.next().await.unwrap(),
1288 Event::DiskBasedDiagnosticsFinished {
1289 language_server_id: LanguageServerId(1)
1290 }
1291 );
1292 project.update(cx, |project, _| {
1293 assert_eq!(
1294 project
1295 .language_servers_running_disk_based_diagnostics()
1296 .collect::<Vec<_>>(),
1297 [] as [language::LanguageServerId; 0]
1298 );
1299 });
1300}
1301
1302#[gpui::test]
1303async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1304 init_test(cx);
1305
1306 let fs = FakeFs::new(cx.executor());
1307 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1308
1309 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1310
1311 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1312 language_registry.add(rust_lang());
1313 let mut fake_servers =
1314 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1315
1316 let buffer = project
1317 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1318 .await
1319 .unwrap();
1320
1321 // Publish diagnostics
1322 let fake_server = fake_servers.next().await.unwrap();
1323 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1324 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1325 version: None,
1326 diagnostics: vec![lsp::Diagnostic {
1327 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1328 severity: Some(lsp::DiagnosticSeverity::ERROR),
1329 message: "the message".to_string(),
1330 ..Default::default()
1331 }],
1332 });
1333
1334 cx.executor().run_until_parked();
1335 buffer.update(cx, |buffer, _| {
1336 assert_eq!(
1337 buffer
1338 .snapshot()
1339 .diagnostics_in_range::<_, usize>(0..1, false)
1340 .map(|entry| entry.diagnostic.message.clone())
1341 .collect::<Vec<_>>(),
1342 ["the message".to_string()]
1343 );
1344 });
1345 project.update(cx, |project, cx| {
1346 assert_eq!(
1347 project.diagnostic_summary(false, cx),
1348 DiagnosticSummary {
1349 error_count: 1,
1350 warning_count: 0,
1351 }
1352 );
1353 });
1354
1355 project.update(cx, |project, cx| {
1356 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1357 });
1358
1359 // The diagnostics are cleared.
1360 cx.executor().run_until_parked();
1361 buffer.update(cx, |buffer, _| {
1362 assert_eq!(
1363 buffer
1364 .snapshot()
1365 .diagnostics_in_range::<_, usize>(0..1, false)
1366 .map(|entry| entry.diagnostic.message.clone())
1367 .collect::<Vec<_>>(),
1368 Vec::<String>::new(),
1369 );
1370 });
1371 project.update(cx, |project, cx| {
1372 assert_eq!(
1373 project.diagnostic_summary(false, cx),
1374 DiagnosticSummary {
1375 error_count: 0,
1376 warning_count: 0,
1377 }
1378 );
1379 });
1380}
1381
1382#[gpui::test]
1383async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1384 init_test(cx);
1385
1386 let fs = FakeFs::new(cx.executor());
1387 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1388
1389 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391
1392 language_registry.add(rust_lang());
1393 let mut fake_servers =
1394 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1395
1396 let buffer = project
1397 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1398 .await
1399 .unwrap();
1400
1401 // Before restarting the server, report diagnostics with an unknown buffer version.
1402 let fake_server = fake_servers.next().await.unwrap();
1403 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1404 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1405 version: Some(10000),
1406 diagnostics: Vec::new(),
1407 });
1408 cx.executor().run_until_parked();
1409
1410 project.update(cx, |project, cx| {
1411 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1412 });
1413 let mut fake_server = fake_servers.next().await.unwrap();
1414 let notification = fake_server
1415 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1416 .await
1417 .text_document;
1418 assert_eq!(notification.version, 0);
1419}
1420
1421#[gpui::test]
1422async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1423 init_test(cx);
1424
1425 let progress_token = "the-progress-token";
1426
1427 let fs = FakeFs::new(cx.executor());
1428 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1429
1430 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1431
1432 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1433 language_registry.add(rust_lang());
1434 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1435 "Rust",
1436 FakeLspAdapter {
1437 name: "the-language-server",
1438 disk_based_diagnostics_sources: vec!["disk".into()],
1439 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1440 ..Default::default()
1441 },
1442 );
1443
1444 let buffer = project
1445 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1446 .await
1447 .unwrap();
1448
1449 // Simulate diagnostics starting to update.
1450 let mut fake_server = fake_servers.next().await.unwrap();
1451 fake_server
1452 .start_progress_with(
1453 "another-token",
1454 lsp::WorkDoneProgressBegin {
1455 cancellable: Some(false),
1456 ..Default::default()
1457 },
1458 )
1459 .await;
1460 fake_server
1461 .start_progress_with(
1462 progress_token,
1463 lsp::WorkDoneProgressBegin {
1464 cancellable: Some(true),
1465 ..Default::default()
1466 },
1467 )
1468 .await;
1469 cx.executor().run_until_parked();
1470
1471 project.update(cx, |project, cx| {
1472 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1473 });
1474
1475 let cancel_notification = fake_server
1476 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1477 .await;
1478 assert_eq!(
1479 cancel_notification.token,
1480 NumberOrString::String(progress_token.into())
1481 );
1482}
1483
1484#[gpui::test]
1485async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1486 init_test(cx);
1487
1488 let fs = FakeFs::new(cx.executor());
1489 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1490 .await;
1491
1492 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1493 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1494
1495 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1496 "Rust",
1497 FakeLspAdapter {
1498 name: "rust-lsp",
1499 ..Default::default()
1500 },
1501 );
1502 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1503 "JavaScript",
1504 FakeLspAdapter {
1505 name: "js-lsp",
1506 ..Default::default()
1507 },
1508 );
1509 language_registry.add(rust_lang());
1510 language_registry.add(js_lang());
1511
1512 let _rs_buffer = project
1513 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1514 .await
1515 .unwrap();
1516 let _js_buffer = project
1517 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1518 .await
1519 .unwrap();
1520
1521 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1522 assert_eq!(
1523 fake_rust_server_1
1524 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1525 .await
1526 .text_document
1527 .uri
1528 .as_str(),
1529 "file:///dir/a.rs"
1530 );
1531
1532 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1533 assert_eq!(
1534 fake_js_server
1535 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1536 .await
1537 .text_document
1538 .uri
1539 .as_str(),
1540 "file:///dir/b.js"
1541 );
1542
1543 // Disable Rust language server, ensuring only that server gets stopped.
1544 cx.update(|cx| {
1545 SettingsStore::update_global(cx, |settings, cx| {
1546 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1547 settings.languages.insert(
1548 Arc::from("Rust"),
1549 LanguageSettingsContent {
1550 enable_language_server: Some(false),
1551 ..Default::default()
1552 },
1553 );
1554 });
1555 })
1556 });
1557 fake_rust_server_1
1558 .receive_notification::<lsp::notification::Exit>()
1559 .await;
1560
1561 // Enable Rust and disable JavaScript language servers, ensuring that the
1562 // former gets started again and that the latter stops.
1563 cx.update(|cx| {
1564 SettingsStore::update_global(cx, |settings, cx| {
1565 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1566 settings.languages.insert(
1567 Arc::from("Rust"),
1568 LanguageSettingsContent {
1569 enable_language_server: Some(true),
1570 ..Default::default()
1571 },
1572 );
1573 settings.languages.insert(
1574 Arc::from("JavaScript"),
1575 LanguageSettingsContent {
1576 enable_language_server: Some(false),
1577 ..Default::default()
1578 },
1579 );
1580 });
1581 })
1582 });
1583 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1584 assert_eq!(
1585 fake_rust_server_2
1586 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1587 .await
1588 .text_document
1589 .uri
1590 .as_str(),
1591 "file:///dir/a.rs"
1592 );
1593 fake_js_server
1594 .receive_notification::<lsp::notification::Exit>()
1595 .await;
1596}
1597
1598#[gpui::test(iterations = 3)]
1599async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1600 init_test(cx);
1601
1602 let text = "
1603 fn a() { A }
1604 fn b() { BB }
1605 fn c() { CCC }
1606 "
1607 .unindent();
1608
1609 let fs = FakeFs::new(cx.executor());
1610 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1611
1612 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1614
1615 language_registry.add(rust_lang());
1616 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1617 "Rust",
1618 FakeLspAdapter {
1619 disk_based_diagnostics_sources: vec!["disk".into()],
1620 ..Default::default()
1621 },
1622 );
1623
1624 let buffer = project
1625 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1626 .await
1627 .unwrap();
1628
1629 let mut fake_server = fake_servers.next().await.unwrap();
1630 let open_notification = fake_server
1631 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1632 .await;
1633
1634 // Edit the buffer, moving the content down
1635 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1636 let change_notification_1 = fake_server
1637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1638 .await;
1639 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1640
1641 // Report some diagnostics for the initial version of the buffer
1642 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1643 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1644 version: Some(open_notification.text_document.version),
1645 diagnostics: vec![
1646 lsp::Diagnostic {
1647 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1648 severity: Some(DiagnosticSeverity::ERROR),
1649 message: "undefined variable 'A'".to_string(),
1650 source: Some("disk".to_string()),
1651 ..Default::default()
1652 },
1653 lsp::Diagnostic {
1654 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1655 severity: Some(DiagnosticSeverity::ERROR),
1656 message: "undefined variable 'BB'".to_string(),
1657 source: Some("disk".to_string()),
1658 ..Default::default()
1659 },
1660 lsp::Diagnostic {
1661 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1662 severity: Some(DiagnosticSeverity::ERROR),
1663 source: Some("disk".to_string()),
1664 message: "undefined variable 'CCC'".to_string(),
1665 ..Default::default()
1666 },
1667 ],
1668 });
1669
1670 // The diagnostics have moved down since they were created.
1671 cx.executor().run_until_parked();
1672 buffer.update(cx, |buffer, _| {
1673 assert_eq!(
1674 buffer
1675 .snapshot()
1676 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1677 .collect::<Vec<_>>(),
1678 &[
1679 DiagnosticEntry {
1680 range: Point::new(3, 9)..Point::new(3, 11),
1681 diagnostic: Diagnostic {
1682 source: Some("disk".into()),
1683 severity: DiagnosticSeverity::ERROR,
1684 message: "undefined variable 'BB'".to_string(),
1685 is_disk_based: true,
1686 group_id: 1,
1687 is_primary: true,
1688 ..Default::default()
1689 },
1690 },
1691 DiagnosticEntry {
1692 range: Point::new(4, 9)..Point::new(4, 12),
1693 diagnostic: Diagnostic {
1694 source: Some("disk".into()),
1695 severity: DiagnosticSeverity::ERROR,
1696 message: "undefined variable 'CCC'".to_string(),
1697 is_disk_based: true,
1698 group_id: 2,
1699 is_primary: true,
1700 ..Default::default()
1701 }
1702 }
1703 ]
1704 );
1705 assert_eq!(
1706 chunks_with_diagnostics(buffer, 0..buffer.len()),
1707 [
1708 ("\n\nfn a() { ".to_string(), None),
1709 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1710 (" }\nfn b() { ".to_string(), None),
1711 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1712 (" }\nfn c() { ".to_string(), None),
1713 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1714 (" }\n".to_string(), None),
1715 ]
1716 );
1717 assert_eq!(
1718 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1719 [
1720 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1721 (" }\nfn c() { ".to_string(), None),
1722 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1723 ]
1724 );
1725 });
1726
1727 // Ensure overlapping diagnostics are highlighted correctly.
1728 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1729 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1730 version: Some(open_notification.text_document.version),
1731 diagnostics: vec![
1732 lsp::Diagnostic {
1733 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1734 severity: Some(DiagnosticSeverity::ERROR),
1735 message: "undefined variable 'A'".to_string(),
1736 source: Some("disk".to_string()),
1737 ..Default::default()
1738 },
1739 lsp::Diagnostic {
1740 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1741 severity: Some(DiagnosticSeverity::WARNING),
1742 message: "unreachable statement".to_string(),
1743 source: Some("disk".to_string()),
1744 ..Default::default()
1745 },
1746 ],
1747 });
1748
1749 cx.executor().run_until_parked();
1750 buffer.update(cx, |buffer, _| {
1751 assert_eq!(
1752 buffer
1753 .snapshot()
1754 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1755 .collect::<Vec<_>>(),
1756 &[
1757 DiagnosticEntry {
1758 range: Point::new(2, 9)..Point::new(2, 12),
1759 diagnostic: Diagnostic {
1760 source: Some("disk".into()),
1761 severity: DiagnosticSeverity::WARNING,
1762 message: "unreachable statement".to_string(),
1763 is_disk_based: true,
1764 group_id: 4,
1765 is_primary: true,
1766 ..Default::default()
1767 }
1768 },
1769 DiagnosticEntry {
1770 range: Point::new(2, 9)..Point::new(2, 10),
1771 diagnostic: Diagnostic {
1772 source: Some("disk".into()),
1773 severity: DiagnosticSeverity::ERROR,
1774 message: "undefined variable 'A'".to_string(),
1775 is_disk_based: true,
1776 group_id: 3,
1777 is_primary: true,
1778 ..Default::default()
1779 },
1780 }
1781 ]
1782 );
1783 assert_eq!(
1784 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1785 [
1786 ("fn a() { ".to_string(), None),
1787 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1788 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1789 ("\n".to_string(), None),
1790 ]
1791 );
1792 assert_eq!(
1793 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1794 [
1795 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1796 ("\n".to_string(), None),
1797 ]
1798 );
1799 });
1800
1801 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1802 // changes since the last save.
1803 buffer.update(cx, |buffer, cx| {
1804 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1805 buffer.edit(
1806 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1807 None,
1808 cx,
1809 );
1810 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1811 });
1812 let change_notification_2 = fake_server
1813 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1814 .await;
1815 assert!(
1816 change_notification_2.text_document.version > change_notification_1.text_document.version
1817 );
1818
1819 // Handle out-of-order diagnostics
1820 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1821 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1822 version: Some(change_notification_2.text_document.version),
1823 diagnostics: vec![
1824 lsp::Diagnostic {
1825 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1826 severity: Some(DiagnosticSeverity::ERROR),
1827 message: "undefined variable 'BB'".to_string(),
1828 source: Some("disk".to_string()),
1829 ..Default::default()
1830 },
1831 lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1833 severity: Some(DiagnosticSeverity::WARNING),
1834 message: "undefined variable 'A'".to_string(),
1835 source: Some("disk".to_string()),
1836 ..Default::default()
1837 },
1838 ],
1839 });
1840
1841 cx.executor().run_until_parked();
1842 buffer.update(cx, |buffer, _| {
1843 assert_eq!(
1844 buffer
1845 .snapshot()
1846 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1847 .collect::<Vec<_>>(),
1848 &[
1849 DiagnosticEntry {
1850 range: Point::new(2, 21)..Point::new(2, 22),
1851 diagnostic: Diagnostic {
1852 source: Some("disk".into()),
1853 severity: DiagnosticSeverity::WARNING,
1854 message: "undefined variable 'A'".to_string(),
1855 is_disk_based: true,
1856 group_id: 6,
1857 is_primary: true,
1858 ..Default::default()
1859 }
1860 },
1861 DiagnosticEntry {
1862 range: Point::new(3, 9)..Point::new(3, 14),
1863 diagnostic: Diagnostic {
1864 source: Some("disk".into()),
1865 severity: DiagnosticSeverity::ERROR,
1866 message: "undefined variable 'BB'".to_string(),
1867 is_disk_based: true,
1868 group_id: 5,
1869 is_primary: true,
1870 ..Default::default()
1871 },
1872 }
1873 ]
1874 );
1875 });
1876}
1877
1878#[gpui::test]
1879async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1880 init_test(cx);
1881
1882 let text = concat!(
1883 "let one = ;\n", //
1884 "let two = \n",
1885 "let three = 3;\n",
1886 );
1887
1888 let fs = FakeFs::new(cx.executor());
1889 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1890
1891 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1892 let buffer = project
1893 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1894 .await
1895 .unwrap();
1896
1897 project.update(cx, |project, cx| {
1898 project
1899 .update_buffer_diagnostics(
1900 &buffer,
1901 LanguageServerId(0),
1902 None,
1903 vec![
1904 DiagnosticEntry {
1905 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1906 diagnostic: Diagnostic {
1907 severity: DiagnosticSeverity::ERROR,
1908 message: "syntax error 1".to_string(),
1909 ..Default::default()
1910 },
1911 },
1912 DiagnosticEntry {
1913 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1914 diagnostic: Diagnostic {
1915 severity: DiagnosticSeverity::ERROR,
1916 message: "syntax error 2".to_string(),
1917 ..Default::default()
1918 },
1919 },
1920 ],
1921 cx,
1922 )
1923 .unwrap();
1924 });
1925
1926 // An empty range is extended forward to include the following character.
1927 // At the end of a line, an empty range is extended backward to include
1928 // the preceding character.
1929 buffer.update(cx, |buffer, _| {
1930 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1931 assert_eq!(
1932 chunks
1933 .iter()
1934 .map(|(s, d)| (s.as_str(), *d))
1935 .collect::<Vec<_>>(),
1936 &[
1937 ("let one = ", None),
1938 (";", Some(DiagnosticSeverity::ERROR)),
1939 ("\nlet two =", None),
1940 (" ", Some(DiagnosticSeverity::ERROR)),
1941 ("\nlet three = 3;\n", None)
1942 ]
1943 );
1944 });
1945}
1946
1947#[gpui::test]
1948async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1949 init_test(cx);
1950
1951 let fs = FakeFs::new(cx.executor());
1952 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1953 .await;
1954
1955 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1956
1957 project.update(cx, |project, cx| {
1958 project
1959 .update_diagnostic_entries(
1960 LanguageServerId(0),
1961 Path::new("/dir/a.rs").to_owned(),
1962 None,
1963 vec![DiagnosticEntry {
1964 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1965 diagnostic: Diagnostic {
1966 severity: DiagnosticSeverity::ERROR,
1967 is_primary: true,
1968 message: "syntax error a1".to_string(),
1969 ..Default::default()
1970 },
1971 }],
1972 cx,
1973 )
1974 .unwrap();
1975 project
1976 .update_diagnostic_entries(
1977 LanguageServerId(1),
1978 Path::new("/dir/a.rs").to_owned(),
1979 None,
1980 vec![DiagnosticEntry {
1981 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1982 diagnostic: Diagnostic {
1983 severity: DiagnosticSeverity::ERROR,
1984 is_primary: true,
1985 message: "syntax error b1".to_string(),
1986 ..Default::default()
1987 },
1988 }],
1989 cx,
1990 )
1991 .unwrap();
1992
1993 assert_eq!(
1994 project.diagnostic_summary(false, cx),
1995 DiagnosticSummary {
1996 error_count: 2,
1997 warning_count: 0,
1998 }
1999 );
2000 });
2001}
2002
2003#[gpui::test]
2004async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2005 init_test(cx);
2006
2007 let text = "
2008 fn a() {
2009 f1();
2010 }
2011 fn b() {
2012 f2();
2013 }
2014 fn c() {
2015 f3();
2016 }
2017 "
2018 .unindent();
2019
2020 let fs = FakeFs::new(cx.executor());
2021 fs.insert_tree(
2022 "/dir",
2023 json!({
2024 "a.rs": text.clone(),
2025 }),
2026 )
2027 .await;
2028
2029 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2030
2031 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2032 language_registry.add(rust_lang());
2033 let mut fake_servers =
2034 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2035
2036 let buffer = project
2037 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2038 .await
2039 .unwrap();
2040
2041 let mut fake_server = fake_servers.next().await.unwrap();
2042 let lsp_document_version = fake_server
2043 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2044 .await
2045 .text_document
2046 .version;
2047
2048 // Simulate editing the buffer after the language server computes some edits.
2049 buffer.update(cx, |buffer, cx| {
2050 buffer.edit(
2051 [(
2052 Point::new(0, 0)..Point::new(0, 0),
2053 "// above first function\n",
2054 )],
2055 None,
2056 cx,
2057 );
2058 buffer.edit(
2059 [(
2060 Point::new(2, 0)..Point::new(2, 0),
2061 " // inside first function\n",
2062 )],
2063 None,
2064 cx,
2065 );
2066 buffer.edit(
2067 [(
2068 Point::new(6, 4)..Point::new(6, 4),
2069 "// inside second function ",
2070 )],
2071 None,
2072 cx,
2073 );
2074
2075 assert_eq!(
2076 buffer.text(),
2077 "
2078 // above first function
2079 fn a() {
2080 // inside first function
2081 f1();
2082 }
2083 fn b() {
2084 // inside second function f2();
2085 }
2086 fn c() {
2087 f3();
2088 }
2089 "
2090 .unindent()
2091 );
2092 });
2093
2094 let edits = project
2095 .update(cx, |project, cx| {
2096 project.edits_from_lsp(
2097 &buffer,
2098 vec![
2099 // replace body of first function
2100 lsp::TextEdit {
2101 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2102 new_text: "
2103 fn a() {
2104 f10();
2105 }
2106 "
2107 .unindent(),
2108 },
2109 // edit inside second function
2110 lsp::TextEdit {
2111 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2112 new_text: "00".into(),
2113 },
2114 // edit inside third function via two distinct edits
2115 lsp::TextEdit {
2116 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2117 new_text: "4000".into(),
2118 },
2119 lsp::TextEdit {
2120 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2121 new_text: "".into(),
2122 },
2123 ],
2124 LanguageServerId(0),
2125 Some(lsp_document_version),
2126 cx,
2127 )
2128 })
2129 .await
2130 .unwrap();
2131
2132 buffer.update(cx, |buffer, cx| {
2133 for (range, new_text) in edits {
2134 buffer.edit([(range, new_text)], None, cx);
2135 }
2136 assert_eq!(
2137 buffer.text(),
2138 "
2139 // above first function
2140 fn a() {
2141 // inside first function
2142 f10();
2143 }
2144 fn b() {
2145 // inside second function f200();
2146 }
2147 fn c() {
2148 f4000();
2149 }
2150 "
2151 .unindent()
2152 );
2153 });
2154}
2155
2156#[gpui::test]
2157async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2158 init_test(cx);
2159
2160 let text = "
2161 use a::b;
2162 use a::c;
2163
2164 fn f() {
2165 b();
2166 c();
2167 }
2168 "
2169 .unindent();
2170
2171 let fs = FakeFs::new(cx.executor());
2172 fs.insert_tree(
2173 "/dir",
2174 json!({
2175 "a.rs": text.clone(),
2176 }),
2177 )
2178 .await;
2179
2180 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2181 let buffer = project
2182 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2183 .await
2184 .unwrap();
2185
2186 // Simulate the language server sending us a small edit in the form of a very large diff.
2187 // Rust-analyzer does this when performing a merge-imports code action.
2188 let edits = project
2189 .update(cx, |project, cx| {
2190 project.edits_from_lsp(
2191 &buffer,
2192 [
2193 // Replace the first use statement without editing the semicolon.
2194 lsp::TextEdit {
2195 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2196 new_text: "a::{b, c}".into(),
2197 },
2198 // Reinsert the remainder of the file between the semicolon and the final
2199 // newline of the file.
2200 lsp::TextEdit {
2201 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2202 new_text: "\n\n".into(),
2203 },
2204 lsp::TextEdit {
2205 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2206 new_text: "
2207 fn f() {
2208 b();
2209 c();
2210 }"
2211 .unindent(),
2212 },
2213 // Delete everything after the first newline of the file.
2214 lsp::TextEdit {
2215 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2216 new_text: "".into(),
2217 },
2218 ],
2219 LanguageServerId(0),
2220 None,
2221 cx,
2222 )
2223 })
2224 .await
2225 .unwrap();
2226
2227 buffer.update(cx, |buffer, cx| {
2228 let edits = edits
2229 .into_iter()
2230 .map(|(range, text)| {
2231 (
2232 range.start.to_point(buffer)..range.end.to_point(buffer),
2233 text,
2234 )
2235 })
2236 .collect::<Vec<_>>();
2237
2238 assert_eq!(
2239 edits,
2240 [
2241 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2242 (Point::new(1, 0)..Point::new(2, 0), "".into())
2243 ]
2244 );
2245
2246 for (range, new_text) in edits {
2247 buffer.edit([(range, new_text)], None, cx);
2248 }
2249 assert_eq!(
2250 buffer.text(),
2251 "
2252 use a::{b, c};
2253
2254 fn f() {
2255 b();
2256 c();
2257 }
2258 "
2259 .unindent()
2260 );
2261 });
2262}
2263
2264#[gpui::test]
2265async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2266 init_test(cx);
2267
2268 let text = "
2269 use a::b;
2270 use a::c;
2271
2272 fn f() {
2273 b();
2274 c();
2275 }
2276 "
2277 .unindent();
2278
2279 let fs = FakeFs::new(cx.executor());
2280 fs.insert_tree(
2281 "/dir",
2282 json!({
2283 "a.rs": text.clone(),
2284 }),
2285 )
2286 .await;
2287
2288 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2289 let buffer = project
2290 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2291 .await
2292 .unwrap();
2293
2294 // Simulate the language server sending us edits in a non-ordered fashion,
2295 // with ranges sometimes being inverted or pointing to invalid locations.
2296 let edits = project
2297 .update(cx, |project, cx| {
2298 project.edits_from_lsp(
2299 &buffer,
2300 [
2301 lsp::TextEdit {
2302 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2303 new_text: "\n\n".into(),
2304 },
2305 lsp::TextEdit {
2306 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2307 new_text: "a::{b, c}".into(),
2308 },
2309 lsp::TextEdit {
2310 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2311 new_text: "".into(),
2312 },
2313 lsp::TextEdit {
2314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2315 new_text: "
2316 fn f() {
2317 b();
2318 c();
2319 }"
2320 .unindent(),
2321 },
2322 ],
2323 LanguageServerId(0),
2324 None,
2325 cx,
2326 )
2327 })
2328 .await
2329 .unwrap();
2330
2331 buffer.update(cx, |buffer, cx| {
2332 let edits = edits
2333 .into_iter()
2334 .map(|(range, text)| {
2335 (
2336 range.start.to_point(buffer)..range.end.to_point(buffer),
2337 text,
2338 )
2339 })
2340 .collect::<Vec<_>>();
2341
2342 assert_eq!(
2343 edits,
2344 [
2345 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2346 (Point::new(1, 0)..Point::new(2, 0), "".into())
2347 ]
2348 );
2349
2350 for (range, new_text) in edits {
2351 buffer.edit([(range, new_text)], None, cx);
2352 }
2353 assert_eq!(
2354 buffer.text(),
2355 "
2356 use a::{b, c};
2357
2358 fn f() {
2359 b();
2360 c();
2361 }
2362 "
2363 .unindent()
2364 );
2365 });
2366}
2367
2368fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2369 buffer: &Buffer,
2370 range: Range<T>,
2371) -> Vec<(String, Option<DiagnosticSeverity>)> {
2372 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2373 for chunk in buffer.snapshot().chunks(range, true) {
2374 if chunks.last().map_or(false, |prev_chunk| {
2375 prev_chunk.1 == chunk.diagnostic_severity
2376 }) {
2377 chunks.last_mut().unwrap().0.push_str(chunk.text);
2378 } else {
2379 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2380 }
2381 }
2382 chunks
2383}
2384
2385#[gpui::test(iterations = 10)]
2386async fn test_definition(cx: &mut gpui::TestAppContext) {
2387 init_test(cx);
2388
2389 let fs = FakeFs::new(cx.executor());
2390 fs.insert_tree(
2391 "/dir",
2392 json!({
2393 "a.rs": "const fn a() { A }",
2394 "b.rs": "const y: i32 = crate::a()",
2395 }),
2396 )
2397 .await;
2398
2399 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2400
2401 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2402 language_registry.add(rust_lang());
2403 let mut fake_servers =
2404 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2405
2406 let buffer = project
2407 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2408 .await
2409 .unwrap();
2410
2411 let fake_server = fake_servers.next().await.unwrap();
2412 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2413 let params = params.text_document_position_params;
2414 assert_eq!(
2415 params.text_document.uri.to_file_path().unwrap(),
2416 Path::new("/dir/b.rs"),
2417 );
2418 assert_eq!(params.position, lsp::Position::new(0, 22));
2419
2420 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2421 lsp::Location::new(
2422 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2423 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2424 ),
2425 )))
2426 });
2427
2428 let mut definitions = project
2429 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2430 .await
2431 .unwrap();
2432
2433 // Assert no new language server started
2434 cx.executor().run_until_parked();
2435 assert!(fake_servers.try_next().is_err());
2436
2437 assert_eq!(definitions.len(), 1);
2438 let definition = definitions.pop().unwrap();
2439 cx.update(|cx| {
2440 let target_buffer = definition.target.buffer.read(cx);
2441 assert_eq!(
2442 target_buffer
2443 .file()
2444 .unwrap()
2445 .as_local()
2446 .unwrap()
2447 .abs_path(cx),
2448 Path::new("/dir/a.rs"),
2449 );
2450 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2451 assert_eq!(
2452 list_worktrees(&project, cx),
2453 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2454 );
2455
2456 drop(definition);
2457 });
2458 cx.update(|cx| {
2459 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2460 });
2461
2462 fn list_worktrees<'a>(
2463 project: &'a Model<Project>,
2464 cx: &'a AppContext,
2465 ) -> Vec<(&'a Path, bool)> {
2466 project
2467 .read(cx)
2468 .worktrees()
2469 .map(|worktree| {
2470 let worktree = worktree.read(cx);
2471 (
2472 worktree.as_local().unwrap().abs_path().as_ref(),
2473 worktree.is_visible(),
2474 )
2475 })
2476 .collect::<Vec<_>>()
2477 }
2478}
2479
2480#[gpui::test]
2481async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2482 init_test(cx);
2483
2484 let fs = FakeFs::new(cx.executor());
2485 fs.insert_tree(
2486 "/dir",
2487 json!({
2488 "a.ts": "",
2489 }),
2490 )
2491 .await;
2492
2493 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2494
2495 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2496 language_registry.add(typescript_lang());
2497 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2498 "TypeScript",
2499 FakeLspAdapter {
2500 capabilities: lsp::ServerCapabilities {
2501 completion_provider: Some(lsp::CompletionOptions {
2502 trigger_characters: Some(vec![":".to_string()]),
2503 ..Default::default()
2504 }),
2505 ..Default::default()
2506 },
2507 ..Default::default()
2508 },
2509 );
2510
2511 let buffer = project
2512 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2513 .await
2514 .unwrap();
2515
2516 let fake_server = fake_language_servers.next().await.unwrap();
2517
2518 let text = "let a = b.fqn";
2519 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2520 let completions = project.update(cx, |project, cx| {
2521 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2522 });
2523
2524 fake_server
2525 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2526 Ok(Some(lsp::CompletionResponse::Array(vec![
2527 lsp::CompletionItem {
2528 label: "fullyQualifiedName?".into(),
2529 insert_text: Some("fullyQualifiedName".into()),
2530 ..Default::default()
2531 },
2532 ])))
2533 })
2534 .next()
2535 .await;
2536 let completions = completions.await.unwrap();
2537 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2538 assert_eq!(completions.len(), 1);
2539 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2540 assert_eq!(
2541 completions[0].old_range.to_offset(&snapshot),
2542 text.len() - 3..text.len()
2543 );
2544
2545 let text = "let a = \"atoms/cmp\"";
2546 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2547 let completions = project.update(cx, |project, cx| {
2548 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2549 });
2550
2551 fake_server
2552 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2553 Ok(Some(lsp::CompletionResponse::Array(vec![
2554 lsp::CompletionItem {
2555 label: "component".into(),
2556 ..Default::default()
2557 },
2558 ])))
2559 })
2560 .next()
2561 .await;
2562 let completions = completions.await.unwrap();
2563 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2564 assert_eq!(completions.len(), 1);
2565 assert_eq!(completions[0].new_text, "component");
2566 assert_eq!(
2567 completions[0].old_range.to_offset(&snapshot),
2568 text.len() - 4..text.len() - 1
2569 );
2570}
2571
2572#[gpui::test]
2573async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2574 init_test(cx);
2575
2576 let fs = FakeFs::new(cx.executor());
2577 fs.insert_tree(
2578 "/dir",
2579 json!({
2580 "a.ts": "",
2581 }),
2582 )
2583 .await;
2584
2585 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2586
2587 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2588 language_registry.add(typescript_lang());
2589 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2590 "TypeScript",
2591 FakeLspAdapter {
2592 capabilities: lsp::ServerCapabilities {
2593 completion_provider: Some(lsp::CompletionOptions {
2594 trigger_characters: Some(vec![":".to_string()]),
2595 ..Default::default()
2596 }),
2597 ..Default::default()
2598 },
2599 ..Default::default()
2600 },
2601 );
2602
2603 let buffer = project
2604 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2605 .await
2606 .unwrap();
2607
2608 let fake_server = fake_language_servers.next().await.unwrap();
2609
2610 let text = "let a = b.fqn";
2611 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2612 let completions = project.update(cx, |project, cx| {
2613 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2614 });
2615
2616 fake_server
2617 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2618 Ok(Some(lsp::CompletionResponse::Array(vec![
2619 lsp::CompletionItem {
2620 label: "fullyQualifiedName?".into(),
2621 insert_text: Some("fully\rQualified\r\nName".into()),
2622 ..Default::default()
2623 },
2624 ])))
2625 })
2626 .next()
2627 .await;
2628 let completions = completions.await.unwrap();
2629 assert_eq!(completions.len(), 1);
2630 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2631}
2632
2633#[gpui::test(iterations = 10)]
2634async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2635 init_test(cx);
2636
2637 let fs = FakeFs::new(cx.executor());
2638 fs.insert_tree(
2639 "/dir",
2640 json!({
2641 "a.ts": "a",
2642 }),
2643 )
2644 .await;
2645
2646 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2647
2648 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2649 language_registry.add(typescript_lang());
2650 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2651 "TypeScript",
2652 FakeLspAdapter {
2653 capabilities: lsp::ServerCapabilities {
2654 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2655 lsp::CodeActionOptions {
2656 resolve_provider: Some(true),
2657 ..lsp::CodeActionOptions::default()
2658 },
2659 )),
2660 ..lsp::ServerCapabilities::default()
2661 },
2662 ..FakeLspAdapter::default()
2663 },
2664 );
2665
2666 let buffer = project
2667 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2668 .await
2669 .unwrap();
2670
2671 let fake_server = fake_language_servers.next().await.unwrap();
2672
2673 // Language server returns code actions that contain commands, and not edits.
2674 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2675 fake_server
2676 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2677 Ok(Some(vec![
2678 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2679 title: "The code action".into(),
2680 data: Some(serde_json::json!({
2681 "command": "_the/command",
2682 })),
2683 ..lsp::CodeAction::default()
2684 }),
2685 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2686 title: "two".into(),
2687 ..lsp::CodeAction::default()
2688 }),
2689 ]))
2690 })
2691 .next()
2692 .await;
2693
2694 let action = actions.await[0].clone();
2695 let apply = project.update(cx, |project, cx| {
2696 project.apply_code_action(buffer.clone(), action, true, cx)
2697 });
2698
2699 // Resolving the code action does not populate its edits. In absence of
2700 // edits, we must execute the given command.
2701 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2702 |mut action, _| async move {
2703 if action.data.is_some() {
2704 action.command = Some(lsp::Command {
2705 title: "The command".into(),
2706 command: "_the/command".into(),
2707 arguments: Some(vec![json!("the-argument")]),
2708 });
2709 }
2710 Ok(action)
2711 },
2712 );
2713
2714 // While executing the command, the language server sends the editor
2715 // a `workspaceEdit` request.
2716 fake_server
2717 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2718 let fake = fake_server.clone();
2719 move |params, _| {
2720 assert_eq!(params.command, "_the/command");
2721 let fake = fake.clone();
2722 async move {
2723 fake.server
2724 .request::<lsp::request::ApplyWorkspaceEdit>(
2725 lsp::ApplyWorkspaceEditParams {
2726 label: None,
2727 edit: lsp::WorkspaceEdit {
2728 changes: Some(
2729 [(
2730 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2731 vec![lsp::TextEdit {
2732 range: lsp::Range::new(
2733 lsp::Position::new(0, 0),
2734 lsp::Position::new(0, 0),
2735 ),
2736 new_text: "X".into(),
2737 }],
2738 )]
2739 .into_iter()
2740 .collect(),
2741 ),
2742 ..Default::default()
2743 },
2744 },
2745 )
2746 .await
2747 .unwrap();
2748 Ok(Some(json!(null)))
2749 }
2750 }
2751 })
2752 .next()
2753 .await;
2754
2755 // Applying the code action returns a project transaction containing the edits
2756 // sent by the language server in its `workspaceEdit` request.
2757 let transaction = apply.await.unwrap();
2758 assert!(transaction.0.contains_key(&buffer));
2759 buffer.update(cx, |buffer, cx| {
2760 assert_eq!(buffer.text(), "Xa");
2761 buffer.undo(cx);
2762 assert_eq!(buffer.text(), "a");
2763 });
2764}
2765
2766#[gpui::test(iterations = 10)]
2767async fn test_save_file(cx: &mut gpui::TestAppContext) {
2768 init_test(cx);
2769
2770 let fs = FakeFs::new(cx.executor());
2771 fs.insert_tree(
2772 "/dir",
2773 json!({
2774 "file1": "the old contents",
2775 }),
2776 )
2777 .await;
2778
2779 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2780 let buffer = project
2781 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2782 .await
2783 .unwrap();
2784 buffer.update(cx, |buffer, cx| {
2785 assert_eq!(buffer.text(), "the old contents");
2786 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2787 });
2788
2789 project
2790 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2791 .await
2792 .unwrap();
2793
2794 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2795 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2796}
2797
2798#[gpui::test(iterations = 30)]
2799async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2800 init_test(cx);
2801
2802 let fs = FakeFs::new(cx.executor().clone());
2803 fs.insert_tree(
2804 "/dir",
2805 json!({
2806 "file1": "the original contents",
2807 }),
2808 )
2809 .await;
2810
2811 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2812 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2813 let buffer = project
2814 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2815 .await
2816 .unwrap();
2817
2818 // Simulate buffer diffs being slow, so that they don't complete before
2819 // the next file change occurs.
2820 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2821
2822 // Change the buffer's file on disk, and then wait for the file change
2823 // to be detected by the worktree, so that the buffer starts reloading.
2824 fs.save(
2825 "/dir/file1".as_ref(),
2826 &"the first contents".into(),
2827 Default::default(),
2828 )
2829 .await
2830 .unwrap();
2831 worktree.next_event(cx).await;
2832
2833 // Change the buffer's file again. Depending on the random seed, the
2834 // previous file change may still be in progress.
2835 fs.save(
2836 "/dir/file1".as_ref(),
2837 &"the second contents".into(),
2838 Default::default(),
2839 )
2840 .await
2841 .unwrap();
2842 worktree.next_event(cx).await;
2843
2844 cx.executor().run_until_parked();
2845 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2846 buffer.read_with(cx, |buffer, _| {
2847 assert_eq!(buffer.text(), on_disk_text);
2848 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2849 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2850 });
2851}
2852
2853#[gpui::test(iterations = 30)]
2854async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2855 init_test(cx);
2856
2857 let fs = FakeFs::new(cx.executor().clone());
2858 fs.insert_tree(
2859 "/dir",
2860 json!({
2861 "file1": "the original contents",
2862 }),
2863 )
2864 .await;
2865
2866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2867 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2868 let buffer = project
2869 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2870 .await
2871 .unwrap();
2872
2873 // Simulate buffer diffs being slow, so that they don't complete before
2874 // the next file change occurs.
2875 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2876
2877 // Change the buffer's file on disk, and then wait for the file change
2878 // to be detected by the worktree, so that the buffer starts reloading.
2879 fs.save(
2880 "/dir/file1".as_ref(),
2881 &"the first contents".into(),
2882 Default::default(),
2883 )
2884 .await
2885 .unwrap();
2886 worktree.next_event(cx).await;
2887
2888 cx.executor()
2889 .spawn(cx.executor().simulate_random_delay())
2890 .await;
2891
2892 // Perform a noop edit, causing the buffer's version to increase.
2893 buffer.update(cx, |buffer, cx| {
2894 buffer.edit([(0..0, " ")], None, cx);
2895 buffer.undo(cx);
2896 });
2897
2898 cx.executor().run_until_parked();
2899 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2900 buffer.read_with(cx, |buffer, _| {
2901 let buffer_text = buffer.text();
2902 if buffer_text == on_disk_text {
2903 assert!(
2904 !buffer.is_dirty() && !buffer.has_conflict(),
2905 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2906 );
2907 }
2908 // If the file change occurred while the buffer was processing the first
2909 // change, the buffer will be in a conflicting state.
2910 else {
2911 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2912 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2913 }
2914 });
2915}
2916
2917#[gpui::test]
2918async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2919 init_test(cx);
2920
2921 let fs = FakeFs::new(cx.executor());
2922 fs.insert_tree(
2923 "/dir",
2924 json!({
2925 "file1": "the old contents",
2926 }),
2927 )
2928 .await;
2929
2930 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2931 let buffer = project
2932 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2933 .await
2934 .unwrap();
2935 buffer.update(cx, |buffer, cx| {
2936 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2937 });
2938
2939 project
2940 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2941 .await
2942 .unwrap();
2943
2944 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2945 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2946}
2947
2948#[gpui::test]
2949async fn test_save_as(cx: &mut gpui::TestAppContext) {
2950 init_test(cx);
2951
2952 let fs = FakeFs::new(cx.executor());
2953 fs.insert_tree("/dir", json!({})).await;
2954
2955 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2956
2957 let languages = project.update(cx, |project, _| project.languages().clone());
2958 languages.add(rust_lang());
2959
2960 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2961 buffer.update(cx, |buffer, cx| {
2962 buffer.edit([(0..0, "abc")], None, cx);
2963 assert!(buffer.is_dirty());
2964 assert!(!buffer.has_conflict());
2965 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2966 });
2967 project
2968 .update(cx, |project, cx| {
2969 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2970 let path = ProjectPath {
2971 worktree_id,
2972 path: Arc::from(Path::new("file1.rs")),
2973 };
2974 project.save_buffer_as(buffer.clone(), path, cx)
2975 })
2976 .await
2977 .unwrap();
2978 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2979
2980 cx.executor().run_until_parked();
2981 buffer.update(cx, |buffer, cx| {
2982 assert_eq!(
2983 buffer.file().unwrap().full_path(cx),
2984 Path::new("dir/file1.rs")
2985 );
2986 assert!(!buffer.is_dirty());
2987 assert!(!buffer.has_conflict());
2988 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2989 });
2990
2991 let opened_buffer = project
2992 .update(cx, |project, cx| {
2993 project.open_local_buffer("/dir/file1.rs", cx)
2994 })
2995 .await
2996 .unwrap();
2997 assert_eq!(opened_buffer, buffer);
2998}
2999
3000#[gpui::test(retries = 5)]
3001async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3002 use worktree::WorktreeModelHandle as _;
3003
3004 init_test(cx);
3005 cx.executor().allow_parking();
3006
3007 let dir = temp_tree(json!({
3008 "a": {
3009 "file1": "",
3010 "file2": "",
3011 "file3": "",
3012 },
3013 "b": {
3014 "c": {
3015 "file4": "",
3016 "file5": "",
3017 }
3018 }
3019 }));
3020
3021 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3022
3023 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3024 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3025 async move { buffer.await.unwrap() }
3026 };
3027 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3028 project.update(cx, |project, cx| {
3029 let tree = project.worktrees().next().unwrap();
3030 tree.read(cx)
3031 .entry_for_path(path)
3032 .unwrap_or_else(|| panic!("no entry for path {}", path))
3033 .id
3034 })
3035 };
3036
3037 let buffer2 = buffer_for_path("a/file2", cx).await;
3038 let buffer3 = buffer_for_path("a/file3", cx).await;
3039 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3040 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3041
3042 let file2_id = id_for_path("a/file2", cx);
3043 let file3_id = id_for_path("a/file3", cx);
3044 let file4_id = id_for_path("b/c/file4", cx);
3045
3046 // Create a remote copy of this worktree.
3047 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3048 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3049
3050 let updates = Arc::new(Mutex::new(Vec::new()));
3051 tree.update(cx, |tree, cx| {
3052 let updates = updates.clone();
3053 tree.observe_updates(0, cx, move |update| {
3054 updates.lock().push(update);
3055 async { true }
3056 });
3057 });
3058
3059 let remote = cx.update(|cx| {
3060 Worktree::remote(
3061 0,
3062 1,
3063 metadata,
3064 Box::new(CollabRemoteWorktreeClient(project.read(cx).client())),
3065 cx,
3066 )
3067 });
3068
3069 cx.executor().run_until_parked();
3070
3071 cx.update(|cx| {
3072 assert!(!buffer2.read(cx).is_dirty());
3073 assert!(!buffer3.read(cx).is_dirty());
3074 assert!(!buffer4.read(cx).is_dirty());
3075 assert!(!buffer5.read(cx).is_dirty());
3076 });
3077
3078 // Rename and delete files and directories.
3079 tree.flush_fs_events(cx).await;
3080 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3081 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3082 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3083 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3084 tree.flush_fs_events(cx).await;
3085
3086 let expected_paths = vec![
3087 "a",
3088 "a/file1",
3089 "a/file2.new",
3090 "b",
3091 "d",
3092 "d/file3",
3093 "d/file4",
3094 ];
3095
3096 cx.update(|app| {
3097 assert_eq!(
3098 tree.read(app)
3099 .paths()
3100 .map(|p| p.to_str().unwrap())
3101 .collect::<Vec<_>>(),
3102 expected_paths
3103 );
3104 });
3105
3106 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3107 assert_eq!(id_for_path("d/file3", cx), file3_id);
3108 assert_eq!(id_for_path("d/file4", cx), file4_id);
3109
3110 cx.update(|cx| {
3111 assert_eq!(
3112 buffer2.read(cx).file().unwrap().path().as_ref(),
3113 Path::new("a/file2.new")
3114 );
3115 assert_eq!(
3116 buffer3.read(cx).file().unwrap().path().as_ref(),
3117 Path::new("d/file3")
3118 );
3119 assert_eq!(
3120 buffer4.read(cx).file().unwrap().path().as_ref(),
3121 Path::new("d/file4")
3122 );
3123 assert_eq!(
3124 buffer5.read(cx).file().unwrap().path().as_ref(),
3125 Path::new("b/c/file5")
3126 );
3127
3128 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3129 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3130 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3131 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3132 });
3133
3134 // Update the remote worktree. Check that it becomes consistent with the
3135 // local worktree.
3136 cx.executor().run_until_parked();
3137
3138 remote.update(cx, |remote, _| {
3139 for update in updates.lock().drain(..) {
3140 remote.as_remote_mut().unwrap().update_from_remote(update);
3141 }
3142 });
3143 cx.executor().run_until_parked();
3144 remote.update(cx, |remote, _| {
3145 assert_eq!(
3146 remote
3147 .paths()
3148 .map(|p| p.to_str().unwrap())
3149 .collect::<Vec<_>>(),
3150 expected_paths
3151 );
3152 });
3153}
3154
3155#[gpui::test(iterations = 10)]
3156async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3157 init_test(cx);
3158
3159 let fs = FakeFs::new(cx.executor());
3160 fs.insert_tree(
3161 "/dir",
3162 json!({
3163 "a": {
3164 "file1": "",
3165 }
3166 }),
3167 )
3168 .await;
3169
3170 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3171 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3172 let tree_id = tree.update(cx, |tree, _| tree.id());
3173
3174 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3175 project.update(cx, |project, cx| {
3176 let tree = project.worktrees().next().unwrap();
3177 tree.read(cx)
3178 .entry_for_path(path)
3179 .unwrap_or_else(|| panic!("no entry for path {}", path))
3180 .id
3181 })
3182 };
3183
3184 let dir_id = id_for_path("a", cx);
3185 let file_id = id_for_path("a/file1", cx);
3186 let buffer = project
3187 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3188 .await
3189 .unwrap();
3190 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3191
3192 project
3193 .update(cx, |project, cx| {
3194 project.rename_entry(dir_id, Path::new("b"), cx)
3195 })
3196 .unwrap()
3197 .await
3198 .to_included()
3199 .unwrap();
3200 cx.executor().run_until_parked();
3201
3202 assert_eq!(id_for_path("b", cx), dir_id);
3203 assert_eq!(id_for_path("b/file1", cx), file_id);
3204 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3205}
3206
3207#[gpui::test]
3208async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3209 init_test(cx);
3210
3211 let fs = FakeFs::new(cx.executor());
3212 fs.insert_tree(
3213 "/dir",
3214 json!({
3215 "a.txt": "a-contents",
3216 "b.txt": "b-contents",
3217 }),
3218 )
3219 .await;
3220
3221 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3222
3223 // Spawn multiple tasks to open paths, repeating some paths.
3224 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3225 (
3226 p.open_local_buffer("/dir/a.txt", cx),
3227 p.open_local_buffer("/dir/b.txt", cx),
3228 p.open_local_buffer("/dir/a.txt", cx),
3229 )
3230 });
3231
3232 let buffer_a_1 = buffer_a_1.await.unwrap();
3233 let buffer_a_2 = buffer_a_2.await.unwrap();
3234 let buffer_b = buffer_b.await.unwrap();
3235 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3236 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3237
3238 // There is only one buffer per path.
3239 let buffer_a_id = buffer_a_1.entity_id();
3240 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3241
3242 // Open the same path again while it is still open.
3243 drop(buffer_a_1);
3244 let buffer_a_3 = project
3245 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3246 .await
3247 .unwrap();
3248
3249 // There's still only one buffer per path.
3250 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3251}
3252
3253#[gpui::test]
3254async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3255 init_test(cx);
3256
3257 let fs = FakeFs::new(cx.executor());
3258 fs.insert_tree(
3259 "/dir",
3260 json!({
3261 "file1": "abc",
3262 "file2": "def",
3263 "file3": "ghi",
3264 }),
3265 )
3266 .await;
3267
3268 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3269
3270 let buffer1 = project
3271 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3272 .await
3273 .unwrap();
3274 let events = Arc::new(Mutex::new(Vec::new()));
3275
3276 // initially, the buffer isn't dirty.
3277 buffer1.update(cx, |buffer, cx| {
3278 cx.subscribe(&buffer1, {
3279 let events = events.clone();
3280 move |_, _, event, _| match event {
3281 BufferEvent::Operation(_) => {}
3282 _ => events.lock().push(event.clone()),
3283 }
3284 })
3285 .detach();
3286
3287 assert!(!buffer.is_dirty());
3288 assert!(events.lock().is_empty());
3289
3290 buffer.edit([(1..2, "")], None, cx);
3291 });
3292
3293 // after the first edit, the buffer is dirty, and emits a dirtied event.
3294 buffer1.update(cx, |buffer, cx| {
3295 assert!(buffer.text() == "ac");
3296 assert!(buffer.is_dirty());
3297 assert_eq!(
3298 *events.lock(),
3299 &[language::Event::Edited, language::Event::DirtyChanged]
3300 );
3301 events.lock().clear();
3302 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3303 });
3304
3305 // after saving, the buffer is not dirty, and emits a saved event.
3306 buffer1.update(cx, |buffer, cx| {
3307 assert!(!buffer.is_dirty());
3308 assert_eq!(*events.lock(), &[language::Event::Saved]);
3309 events.lock().clear();
3310
3311 buffer.edit([(1..1, "B")], None, cx);
3312 buffer.edit([(2..2, "D")], None, cx);
3313 });
3314
3315 // after editing again, the buffer is dirty, and emits another dirty event.
3316 buffer1.update(cx, |buffer, cx| {
3317 assert!(buffer.text() == "aBDc");
3318 assert!(buffer.is_dirty());
3319 assert_eq!(
3320 *events.lock(),
3321 &[
3322 language::Event::Edited,
3323 language::Event::DirtyChanged,
3324 language::Event::Edited,
3325 ],
3326 );
3327 events.lock().clear();
3328
3329 // After restoring the buffer to its previously-saved state,
3330 // the buffer is not considered dirty anymore.
3331 buffer.edit([(1..3, "")], None, cx);
3332 assert!(buffer.text() == "ac");
3333 assert!(!buffer.is_dirty());
3334 });
3335
3336 assert_eq!(
3337 *events.lock(),
3338 &[language::Event::Edited, language::Event::DirtyChanged]
3339 );
3340
3341 // When a file is deleted, the buffer is considered dirty.
3342 let events = Arc::new(Mutex::new(Vec::new()));
3343 let buffer2 = project
3344 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3345 .await
3346 .unwrap();
3347 buffer2.update(cx, |_, cx| {
3348 cx.subscribe(&buffer2, {
3349 let events = events.clone();
3350 move |_, _, event, _| events.lock().push(event.clone())
3351 })
3352 .detach();
3353 });
3354
3355 fs.remove_file("/dir/file2".as_ref(), Default::default())
3356 .await
3357 .unwrap();
3358 cx.executor().run_until_parked();
3359 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3360 assert_eq!(
3361 *events.lock(),
3362 &[
3363 language::Event::DirtyChanged,
3364 language::Event::FileHandleChanged
3365 ]
3366 );
3367
3368 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3369 let events = Arc::new(Mutex::new(Vec::new()));
3370 let buffer3 = project
3371 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3372 .await
3373 .unwrap();
3374 buffer3.update(cx, |_, cx| {
3375 cx.subscribe(&buffer3, {
3376 let events = events.clone();
3377 move |_, _, event, _| events.lock().push(event.clone())
3378 })
3379 .detach();
3380 });
3381
3382 buffer3.update(cx, |buffer, cx| {
3383 buffer.edit([(0..0, "x")], None, cx);
3384 });
3385 events.lock().clear();
3386 fs.remove_file("/dir/file3".as_ref(), Default::default())
3387 .await
3388 .unwrap();
3389 cx.executor().run_until_parked();
3390 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3391 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3392}
3393
3394#[gpui::test]
3395async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3396 init_test(cx);
3397
3398 let initial_contents = "aaa\nbbbbb\nc\n";
3399 let fs = FakeFs::new(cx.executor());
3400 fs.insert_tree(
3401 "/dir",
3402 json!({
3403 "the-file": initial_contents,
3404 }),
3405 )
3406 .await;
3407 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3408 let buffer = project
3409 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3410 .await
3411 .unwrap();
3412
3413 let anchors = (0..3)
3414 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3415 .collect::<Vec<_>>();
3416
3417 // Change the file on disk, adding two new lines of text, and removing
3418 // one line.
3419 buffer.update(cx, |buffer, _| {
3420 assert!(!buffer.is_dirty());
3421 assert!(!buffer.has_conflict());
3422 });
3423 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3424 fs.save(
3425 "/dir/the-file".as_ref(),
3426 &new_contents.into(),
3427 LineEnding::Unix,
3428 )
3429 .await
3430 .unwrap();
3431
3432 // Because the buffer was not modified, it is reloaded from disk. Its
3433 // contents are edited according to the diff between the old and new
3434 // file contents.
3435 cx.executor().run_until_parked();
3436 buffer.update(cx, |buffer, _| {
3437 assert_eq!(buffer.text(), new_contents);
3438 assert!(!buffer.is_dirty());
3439 assert!(!buffer.has_conflict());
3440
3441 let anchor_positions = anchors
3442 .iter()
3443 .map(|anchor| anchor.to_point(&*buffer))
3444 .collect::<Vec<_>>();
3445 assert_eq!(
3446 anchor_positions,
3447 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3448 );
3449 });
3450
3451 // Modify the buffer
3452 buffer.update(cx, |buffer, cx| {
3453 buffer.edit([(0..0, " ")], None, cx);
3454 assert!(buffer.is_dirty());
3455 assert!(!buffer.has_conflict());
3456 });
3457
3458 // Change the file on disk again, adding blank lines to the beginning.
3459 fs.save(
3460 "/dir/the-file".as_ref(),
3461 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3462 LineEnding::Unix,
3463 )
3464 .await
3465 .unwrap();
3466
3467 // Because the buffer is modified, it doesn't reload from disk, but is
3468 // marked as having a conflict.
3469 cx.executor().run_until_parked();
3470 buffer.update(cx, |buffer, _| {
3471 assert!(buffer.has_conflict());
3472 });
3473}
3474
3475#[gpui::test]
3476async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3477 init_test(cx);
3478
3479 let fs = FakeFs::new(cx.executor());
3480 fs.insert_tree(
3481 "/dir",
3482 json!({
3483 "file1": "a\nb\nc\n",
3484 "file2": "one\r\ntwo\r\nthree\r\n",
3485 }),
3486 )
3487 .await;
3488
3489 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3490 let buffer1 = project
3491 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3492 .await
3493 .unwrap();
3494 let buffer2 = project
3495 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3496 .await
3497 .unwrap();
3498
3499 buffer1.update(cx, |buffer, _| {
3500 assert_eq!(buffer.text(), "a\nb\nc\n");
3501 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3502 });
3503 buffer2.update(cx, |buffer, _| {
3504 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3505 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3506 });
3507
3508 // Change a file's line endings on disk from unix to windows. The buffer's
3509 // state updates correctly.
3510 fs.save(
3511 "/dir/file1".as_ref(),
3512 &"aaa\nb\nc\n".into(),
3513 LineEnding::Windows,
3514 )
3515 .await
3516 .unwrap();
3517 cx.executor().run_until_parked();
3518 buffer1.update(cx, |buffer, _| {
3519 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3520 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3521 });
3522
3523 // Save a file with windows line endings. The file is written correctly.
3524 buffer2.update(cx, |buffer, cx| {
3525 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3526 });
3527 project
3528 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3529 .await
3530 .unwrap();
3531 assert_eq!(
3532 fs.load("/dir/file2".as_ref()).await.unwrap(),
3533 "one\r\ntwo\r\nthree\r\nfour\r\n",
3534 );
3535}
3536
3537#[gpui::test]
3538async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3539 init_test(cx);
3540
3541 let fs = FakeFs::new(cx.executor());
3542 fs.insert_tree(
3543 "/the-dir",
3544 json!({
3545 "a.rs": "
3546 fn foo(mut v: Vec<usize>) {
3547 for x in &v {
3548 v.push(1);
3549 }
3550 }
3551 "
3552 .unindent(),
3553 }),
3554 )
3555 .await;
3556
3557 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3558 let buffer = project
3559 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3560 .await
3561 .unwrap();
3562
3563 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3564 let message = lsp::PublishDiagnosticsParams {
3565 uri: buffer_uri.clone(),
3566 diagnostics: vec![
3567 lsp::Diagnostic {
3568 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3569 severity: Some(DiagnosticSeverity::WARNING),
3570 message: "error 1".to_string(),
3571 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3572 location: lsp::Location {
3573 uri: buffer_uri.clone(),
3574 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3575 },
3576 message: "error 1 hint 1".to_string(),
3577 }]),
3578 ..Default::default()
3579 },
3580 lsp::Diagnostic {
3581 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3582 severity: Some(DiagnosticSeverity::HINT),
3583 message: "error 1 hint 1".to_string(),
3584 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3585 location: lsp::Location {
3586 uri: buffer_uri.clone(),
3587 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3588 },
3589 message: "original diagnostic".to_string(),
3590 }]),
3591 ..Default::default()
3592 },
3593 lsp::Diagnostic {
3594 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3595 severity: Some(DiagnosticSeverity::ERROR),
3596 message: "error 2".to_string(),
3597 related_information: Some(vec![
3598 lsp::DiagnosticRelatedInformation {
3599 location: lsp::Location {
3600 uri: buffer_uri.clone(),
3601 range: lsp::Range::new(
3602 lsp::Position::new(1, 13),
3603 lsp::Position::new(1, 15),
3604 ),
3605 },
3606 message: "error 2 hint 1".to_string(),
3607 },
3608 lsp::DiagnosticRelatedInformation {
3609 location: lsp::Location {
3610 uri: buffer_uri.clone(),
3611 range: lsp::Range::new(
3612 lsp::Position::new(1, 13),
3613 lsp::Position::new(1, 15),
3614 ),
3615 },
3616 message: "error 2 hint 2".to_string(),
3617 },
3618 ]),
3619 ..Default::default()
3620 },
3621 lsp::Diagnostic {
3622 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3623 severity: Some(DiagnosticSeverity::HINT),
3624 message: "error 2 hint 1".to_string(),
3625 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3626 location: lsp::Location {
3627 uri: buffer_uri.clone(),
3628 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3629 },
3630 message: "original diagnostic".to_string(),
3631 }]),
3632 ..Default::default()
3633 },
3634 lsp::Diagnostic {
3635 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3636 severity: Some(DiagnosticSeverity::HINT),
3637 message: "error 2 hint 2".to_string(),
3638 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3639 location: lsp::Location {
3640 uri: buffer_uri,
3641 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3642 },
3643 message: "original diagnostic".to_string(),
3644 }]),
3645 ..Default::default()
3646 },
3647 ],
3648 version: None,
3649 };
3650
3651 project
3652 .update(cx, |p, cx| {
3653 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3654 })
3655 .unwrap();
3656 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3657
3658 assert_eq!(
3659 buffer
3660 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3661 .collect::<Vec<_>>(),
3662 &[
3663 DiagnosticEntry {
3664 range: Point::new(1, 8)..Point::new(1, 9),
3665 diagnostic: Diagnostic {
3666 severity: DiagnosticSeverity::WARNING,
3667 message: "error 1".to_string(),
3668 group_id: 1,
3669 is_primary: true,
3670 ..Default::default()
3671 }
3672 },
3673 DiagnosticEntry {
3674 range: Point::new(1, 8)..Point::new(1, 9),
3675 diagnostic: Diagnostic {
3676 severity: DiagnosticSeverity::HINT,
3677 message: "error 1 hint 1".to_string(),
3678 group_id: 1,
3679 is_primary: false,
3680 ..Default::default()
3681 }
3682 },
3683 DiagnosticEntry {
3684 range: Point::new(1, 13)..Point::new(1, 15),
3685 diagnostic: Diagnostic {
3686 severity: DiagnosticSeverity::HINT,
3687 message: "error 2 hint 1".to_string(),
3688 group_id: 0,
3689 is_primary: false,
3690 ..Default::default()
3691 }
3692 },
3693 DiagnosticEntry {
3694 range: Point::new(1, 13)..Point::new(1, 15),
3695 diagnostic: Diagnostic {
3696 severity: DiagnosticSeverity::HINT,
3697 message: "error 2 hint 2".to_string(),
3698 group_id: 0,
3699 is_primary: false,
3700 ..Default::default()
3701 }
3702 },
3703 DiagnosticEntry {
3704 range: Point::new(2, 8)..Point::new(2, 17),
3705 diagnostic: Diagnostic {
3706 severity: DiagnosticSeverity::ERROR,
3707 message: "error 2".to_string(),
3708 group_id: 0,
3709 is_primary: true,
3710 ..Default::default()
3711 }
3712 }
3713 ]
3714 );
3715
3716 assert_eq!(
3717 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3718 &[
3719 DiagnosticEntry {
3720 range: Point::new(1, 13)..Point::new(1, 15),
3721 diagnostic: Diagnostic {
3722 severity: DiagnosticSeverity::HINT,
3723 message: "error 2 hint 1".to_string(),
3724 group_id: 0,
3725 is_primary: false,
3726 ..Default::default()
3727 }
3728 },
3729 DiagnosticEntry {
3730 range: Point::new(1, 13)..Point::new(1, 15),
3731 diagnostic: Diagnostic {
3732 severity: DiagnosticSeverity::HINT,
3733 message: "error 2 hint 2".to_string(),
3734 group_id: 0,
3735 is_primary: false,
3736 ..Default::default()
3737 }
3738 },
3739 DiagnosticEntry {
3740 range: Point::new(2, 8)..Point::new(2, 17),
3741 diagnostic: Diagnostic {
3742 severity: DiagnosticSeverity::ERROR,
3743 message: "error 2".to_string(),
3744 group_id: 0,
3745 is_primary: true,
3746 ..Default::default()
3747 }
3748 }
3749 ]
3750 );
3751
3752 assert_eq!(
3753 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3754 &[
3755 DiagnosticEntry {
3756 range: Point::new(1, 8)..Point::new(1, 9),
3757 diagnostic: Diagnostic {
3758 severity: DiagnosticSeverity::WARNING,
3759 message: "error 1".to_string(),
3760 group_id: 1,
3761 is_primary: true,
3762 ..Default::default()
3763 }
3764 },
3765 DiagnosticEntry {
3766 range: Point::new(1, 8)..Point::new(1, 9),
3767 diagnostic: Diagnostic {
3768 severity: DiagnosticSeverity::HINT,
3769 message: "error 1 hint 1".to_string(),
3770 group_id: 1,
3771 is_primary: false,
3772 ..Default::default()
3773 }
3774 },
3775 ]
3776 );
3777}
3778
3779#[gpui::test]
3780async fn test_rename(cx: &mut gpui::TestAppContext) {
3781 // hi
3782 init_test(cx);
3783
3784 let fs = FakeFs::new(cx.executor());
3785 fs.insert_tree(
3786 "/dir",
3787 json!({
3788 "one.rs": "const ONE: usize = 1;",
3789 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3790 }),
3791 )
3792 .await;
3793
3794 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3795
3796 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3797 language_registry.add(rust_lang());
3798 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3799 "Rust",
3800 FakeLspAdapter {
3801 capabilities: lsp::ServerCapabilities {
3802 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3803 prepare_provider: Some(true),
3804 work_done_progress_options: Default::default(),
3805 })),
3806 ..Default::default()
3807 },
3808 ..Default::default()
3809 },
3810 );
3811
3812 let buffer = project
3813 .update(cx, |project, cx| {
3814 project.open_local_buffer("/dir/one.rs", cx)
3815 })
3816 .await
3817 .unwrap();
3818
3819 let fake_server = fake_servers.next().await.unwrap();
3820
3821 let response = project.update(cx, |project, cx| {
3822 project.prepare_rename(buffer.clone(), 7, cx)
3823 });
3824 fake_server
3825 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3826 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3827 assert_eq!(params.position, lsp::Position::new(0, 7));
3828 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3829 lsp::Position::new(0, 6),
3830 lsp::Position::new(0, 9),
3831 ))))
3832 })
3833 .next()
3834 .await
3835 .unwrap();
3836 let range = response.await.unwrap().unwrap();
3837 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3838 assert_eq!(range, 6..9);
3839
3840 let response = project.update(cx, |project, cx| {
3841 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3842 });
3843 fake_server
3844 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3845 assert_eq!(
3846 params.text_document_position.text_document.uri.as_str(),
3847 "file:///dir/one.rs"
3848 );
3849 assert_eq!(
3850 params.text_document_position.position,
3851 lsp::Position::new(0, 7)
3852 );
3853 assert_eq!(params.new_name, "THREE");
3854 Ok(Some(lsp::WorkspaceEdit {
3855 changes: Some(
3856 [
3857 (
3858 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3859 vec![lsp::TextEdit::new(
3860 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3861 "THREE".to_string(),
3862 )],
3863 ),
3864 (
3865 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3866 vec![
3867 lsp::TextEdit::new(
3868 lsp::Range::new(
3869 lsp::Position::new(0, 24),
3870 lsp::Position::new(0, 27),
3871 ),
3872 "THREE".to_string(),
3873 ),
3874 lsp::TextEdit::new(
3875 lsp::Range::new(
3876 lsp::Position::new(0, 35),
3877 lsp::Position::new(0, 38),
3878 ),
3879 "THREE".to_string(),
3880 ),
3881 ],
3882 ),
3883 ]
3884 .into_iter()
3885 .collect(),
3886 ),
3887 ..Default::default()
3888 }))
3889 })
3890 .next()
3891 .await
3892 .unwrap();
3893 let mut transaction = response.await.unwrap().0;
3894 assert_eq!(transaction.len(), 2);
3895 assert_eq!(
3896 transaction
3897 .remove_entry(&buffer)
3898 .unwrap()
3899 .0
3900 .update(cx, |buffer, _| buffer.text()),
3901 "const THREE: usize = 1;"
3902 );
3903 assert_eq!(
3904 transaction
3905 .into_keys()
3906 .next()
3907 .unwrap()
3908 .update(cx, |buffer, _| buffer.text()),
3909 "const TWO: usize = one::THREE + one::THREE;"
3910 );
3911}
3912
3913#[gpui::test]
3914async fn test_search(cx: &mut gpui::TestAppContext) {
3915 init_test(cx);
3916
3917 let fs = FakeFs::new(cx.executor());
3918 fs.insert_tree(
3919 "/dir",
3920 json!({
3921 "one.rs": "const ONE: usize = 1;",
3922 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3923 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3924 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3925 }),
3926 )
3927 .await;
3928 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3929 assert_eq!(
3930 search(
3931 &project,
3932 SearchQuery::text(
3933 "TWO",
3934 false,
3935 true,
3936 false,
3937 Default::default(),
3938 Default::default()
3939 )
3940 .unwrap(),
3941 cx
3942 )
3943 .await
3944 .unwrap(),
3945 HashMap::from_iter([
3946 ("dir/two.rs".to_string(), vec![6..9]),
3947 ("dir/three.rs".to_string(), vec![37..40])
3948 ])
3949 );
3950
3951 let buffer_4 = project
3952 .update(cx, |project, cx| {
3953 project.open_local_buffer("/dir/four.rs", cx)
3954 })
3955 .await
3956 .unwrap();
3957 buffer_4.update(cx, |buffer, cx| {
3958 let text = "two::TWO";
3959 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3960 });
3961
3962 assert_eq!(
3963 search(
3964 &project,
3965 SearchQuery::text(
3966 "TWO",
3967 false,
3968 true,
3969 false,
3970 Default::default(),
3971 Default::default()
3972 )
3973 .unwrap(),
3974 cx
3975 )
3976 .await
3977 .unwrap(),
3978 HashMap::from_iter([
3979 ("dir/two.rs".to_string(), vec![6..9]),
3980 ("dir/three.rs".to_string(), vec![37..40]),
3981 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3982 ])
3983 );
3984}
3985
3986#[gpui::test]
3987async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3988 init_test(cx);
3989
3990 let search_query = "file";
3991
3992 let fs = FakeFs::new(cx.executor());
3993 fs.insert_tree(
3994 "/dir",
3995 json!({
3996 "one.rs": r#"// Rust file one"#,
3997 "one.ts": r#"// TypeScript file one"#,
3998 "two.rs": r#"// Rust file two"#,
3999 "two.ts": r#"// TypeScript file two"#,
4000 }),
4001 )
4002 .await;
4003 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4004
4005 assert!(
4006 search(
4007 &project,
4008 SearchQuery::text(
4009 search_query,
4010 false,
4011 true,
4012 false,
4013 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4014 Default::default()
4015 )
4016 .unwrap(),
4017 cx
4018 )
4019 .await
4020 .unwrap()
4021 .is_empty(),
4022 "If no inclusions match, no files should be returned"
4023 );
4024
4025 assert_eq!(
4026 search(
4027 &project,
4028 SearchQuery::text(
4029 search_query,
4030 false,
4031 true,
4032 false,
4033 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4034 Default::default()
4035 )
4036 .unwrap(),
4037 cx
4038 )
4039 .await
4040 .unwrap(),
4041 HashMap::from_iter([
4042 ("dir/one.rs".to_string(), vec![8..12]),
4043 ("dir/two.rs".to_string(), vec![8..12]),
4044 ]),
4045 "Rust only search should give only Rust files"
4046 );
4047
4048 assert_eq!(
4049 search(
4050 &project,
4051 SearchQuery::text(
4052 search_query,
4053 false,
4054 true,
4055 false,
4056
4057 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4058
4059 Default::default(),
4060 ).unwrap(),
4061 cx
4062 )
4063 .await
4064 .unwrap(),
4065 HashMap::from_iter([
4066 ("dir/one.ts".to_string(), vec![14..18]),
4067 ("dir/two.ts".to_string(), vec![14..18]),
4068 ]),
4069 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4070 );
4071
4072 assert_eq!(
4073 search(
4074 &project,
4075 SearchQuery::text(
4076 search_query,
4077 false,
4078 true,
4079 false,
4080
4081 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4082
4083 Default::default(),
4084 ).unwrap(),
4085 cx
4086 )
4087 .await
4088 .unwrap(),
4089 HashMap::from_iter([
4090 ("dir/two.ts".to_string(), vec![14..18]),
4091 ("dir/one.rs".to_string(), vec![8..12]),
4092 ("dir/one.ts".to_string(), vec![14..18]),
4093 ("dir/two.rs".to_string(), vec![8..12]),
4094 ]),
4095 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4096 );
4097}
4098
4099#[gpui::test]
4100async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4101 init_test(cx);
4102
4103 let search_query = "file";
4104
4105 let fs = FakeFs::new(cx.executor());
4106 fs.insert_tree(
4107 "/dir",
4108 json!({
4109 "one.rs": r#"// Rust file one"#,
4110 "one.ts": r#"// TypeScript file one"#,
4111 "two.rs": r#"// Rust file two"#,
4112 "two.ts": r#"// TypeScript file two"#,
4113 }),
4114 )
4115 .await;
4116 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4117
4118 assert_eq!(
4119 search(
4120 &project,
4121 SearchQuery::text(
4122 search_query,
4123 false,
4124 true,
4125 false,
4126 Default::default(),
4127 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4128 )
4129 .unwrap(),
4130 cx
4131 )
4132 .await
4133 .unwrap(),
4134 HashMap::from_iter([
4135 ("dir/one.rs".to_string(), vec![8..12]),
4136 ("dir/one.ts".to_string(), vec![14..18]),
4137 ("dir/two.rs".to_string(), vec![8..12]),
4138 ("dir/two.ts".to_string(), vec![14..18]),
4139 ]),
4140 "If no exclusions match, all files should be returned"
4141 );
4142
4143 assert_eq!(
4144 search(
4145 &project,
4146 SearchQuery::text(
4147 search_query,
4148 false,
4149 true,
4150 false,
4151 Default::default(),
4152 PathMatcher::new(&["*.rs".to_owned()]).unwrap()
4153 )
4154 .unwrap(),
4155 cx
4156 )
4157 .await
4158 .unwrap(),
4159 HashMap::from_iter([
4160 ("dir/one.ts".to_string(), vec![14..18]),
4161 ("dir/two.ts".to_string(), vec![14..18]),
4162 ]),
4163 "Rust exclusion search should give only TypeScript files"
4164 );
4165
4166 assert_eq!(
4167 search(
4168 &project,
4169 SearchQuery::text(
4170 search_query,
4171 false,
4172 true,
4173 false,
4174 Default::default(),
4175
4176 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4177
4178 ).unwrap(),
4179 cx
4180 )
4181 .await
4182 .unwrap(),
4183 HashMap::from_iter([
4184 ("dir/one.rs".to_string(), vec![8..12]),
4185 ("dir/two.rs".to_string(), vec![8..12]),
4186 ]),
4187 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4188 );
4189
4190 assert!(
4191 search(
4192 &project,
4193 SearchQuery::text(
4194 search_query,
4195 false,
4196 true,
4197 false,
4198 Default::default(),
4199
4200 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4201
4202 ).unwrap(),
4203 cx
4204 )
4205 .await
4206 .unwrap().is_empty(),
4207 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4208 );
4209}
4210
4211#[gpui::test]
4212async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4213 init_test(cx);
4214
4215 let search_query = "file";
4216
4217 let fs = FakeFs::new(cx.executor());
4218 fs.insert_tree(
4219 "/dir",
4220 json!({
4221 "one.rs": r#"// Rust file one"#,
4222 "one.ts": r#"// TypeScript file one"#,
4223 "two.rs": r#"// Rust file two"#,
4224 "two.ts": r#"// TypeScript file two"#,
4225 }),
4226 )
4227 .await;
4228 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4229
4230 assert!(
4231 search(
4232 &project,
4233 SearchQuery::text(
4234 search_query,
4235 false,
4236 true,
4237 false,
4238 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4239 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4240 )
4241 .unwrap(),
4242 cx
4243 )
4244 .await
4245 .unwrap()
4246 .is_empty(),
4247 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4248 );
4249
4250 assert!(
4251 search(
4252 &project,
4253 SearchQuery::text(
4254 search_query,
4255 false,
4256 true,
4257 false,
4258 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4259 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4260 ).unwrap(),
4261 cx
4262 )
4263 .await
4264 .unwrap()
4265 .is_empty(),
4266 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4267 );
4268
4269 assert!(
4270 search(
4271 &project,
4272 SearchQuery::text(
4273 search_query,
4274 false,
4275 true,
4276 false,
4277 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4278 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4279 )
4280 .unwrap(),
4281 cx
4282 )
4283 .await
4284 .unwrap()
4285 .is_empty(),
4286 "Non-matching inclusions and exclusions should not change that."
4287 );
4288
4289 assert_eq!(
4290 search(
4291 &project,
4292 SearchQuery::text(
4293 search_query,
4294 false,
4295 true,
4296 false,
4297 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4298 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4299 )
4300 .unwrap(),
4301 cx
4302 )
4303 .await
4304 .unwrap(),
4305 HashMap::from_iter([
4306 ("dir/one.ts".to_string(), vec![14..18]),
4307 ("dir/two.ts".to_string(), vec![14..18]),
4308 ]),
4309 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4310 );
4311}
4312
4313#[gpui::test]
4314async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4315 init_test(cx);
4316
4317 let fs = FakeFs::new(cx.executor());
4318 fs.insert_tree(
4319 "/worktree-a",
4320 json!({
4321 "haystack.rs": r#"// NEEDLE"#,
4322 "haystack.ts": r#"// NEEDLE"#,
4323 }),
4324 )
4325 .await;
4326 fs.insert_tree(
4327 "/worktree-b",
4328 json!({
4329 "haystack.rs": r#"// NEEDLE"#,
4330 "haystack.ts": r#"// NEEDLE"#,
4331 }),
4332 )
4333 .await;
4334
4335 let project = Project::test(
4336 fs.clone(),
4337 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4338 cx,
4339 )
4340 .await;
4341
4342 assert_eq!(
4343 search(
4344 &project,
4345 SearchQuery::text(
4346 "NEEDLE",
4347 false,
4348 true,
4349 false,
4350 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4351 Default::default()
4352 )
4353 .unwrap(),
4354 cx
4355 )
4356 .await
4357 .unwrap(),
4358 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4359 "should only return results from included worktree"
4360 );
4361 assert_eq!(
4362 search(
4363 &project,
4364 SearchQuery::text(
4365 "NEEDLE",
4366 false,
4367 true,
4368 false,
4369 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4370 Default::default()
4371 )
4372 .unwrap(),
4373 cx
4374 )
4375 .await
4376 .unwrap(),
4377 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4378 "should only return results from included worktree"
4379 );
4380
4381 assert_eq!(
4382 search(
4383 &project,
4384 SearchQuery::text(
4385 "NEEDLE",
4386 false,
4387 true,
4388 false,
4389 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4390 Default::default()
4391 )
4392 .unwrap(),
4393 cx
4394 )
4395 .await
4396 .unwrap(),
4397 HashMap::from_iter([
4398 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4399 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4400 ]),
4401 "should return results from both worktrees"
4402 );
4403}
4404
4405#[gpui::test]
4406async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4407 init_test(cx);
4408
4409 let fs = FakeFs::new(cx.background_executor.clone());
4410 fs.insert_tree(
4411 "/dir",
4412 json!({
4413 ".git": {},
4414 ".gitignore": "**/target\n/node_modules\n",
4415 "target": {
4416 "index.txt": "index_key:index_value"
4417 },
4418 "node_modules": {
4419 "eslint": {
4420 "index.ts": "const eslint_key = 'eslint value'",
4421 "package.json": r#"{ "some_key": "some value" }"#,
4422 },
4423 "prettier": {
4424 "index.ts": "const prettier_key = 'prettier value'",
4425 "package.json": r#"{ "other_key": "other value" }"#,
4426 },
4427 },
4428 "package.json": r#"{ "main_key": "main value" }"#,
4429 }),
4430 )
4431 .await;
4432 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4433
4434 let query = "key";
4435 assert_eq!(
4436 search(
4437 &project,
4438 SearchQuery::text(
4439 query,
4440 false,
4441 false,
4442 false,
4443 Default::default(),
4444 Default::default()
4445 )
4446 .unwrap(),
4447 cx
4448 )
4449 .await
4450 .unwrap(),
4451 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4452 "Only one non-ignored file should have the query"
4453 );
4454
4455 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4456 assert_eq!(
4457 search(
4458 &project,
4459 SearchQuery::text(
4460 query,
4461 false,
4462 false,
4463 true,
4464 Default::default(),
4465 Default::default()
4466 )
4467 .unwrap(),
4468 cx
4469 )
4470 .await
4471 .unwrap(),
4472 HashMap::from_iter([
4473 ("dir/package.json".to_string(), vec![8..11]),
4474 ("dir/target/index.txt".to_string(), vec![6..9]),
4475 (
4476 "dir/node_modules/prettier/package.json".to_string(),
4477 vec![9..12]
4478 ),
4479 (
4480 "dir/node_modules/prettier/index.ts".to_string(),
4481 vec![15..18]
4482 ),
4483 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4484 (
4485 "dir/node_modules/eslint/package.json".to_string(),
4486 vec![8..11]
4487 ),
4488 ]),
4489 "Unrestricted search with ignored directories should find every file with the query"
4490 );
4491
4492 let files_to_include = PathMatcher::new(&["/dir/node_modules/prettier/**".to_owned()]).unwrap();
4493 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4494 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4495 assert_eq!(
4496 search(
4497 &project,
4498 SearchQuery::text(
4499 query,
4500 false,
4501 false,
4502 true,
4503 files_to_include,
4504 files_to_exclude,
4505 )
4506 .unwrap(),
4507 cx
4508 )
4509 .await
4510 .unwrap(),
4511 HashMap::from_iter([(
4512 "dir/node_modules/prettier/package.json".to_string(),
4513 vec![9..12]
4514 )]),
4515 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4516 );
4517}
4518
4519#[test]
4520fn test_glob_literal_prefix() {
4521 assert_eq!(glob_literal_prefix("**/*.js"), "");
4522 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4523 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4524 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4525}
4526
4527#[gpui::test]
4528async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4529 init_test(cx);
4530
4531 let fs = FakeFs::new(cx.executor().clone());
4532 fs.insert_tree(
4533 "/one/two",
4534 json!({
4535 "three": {
4536 "a.txt": "",
4537 "four": {}
4538 },
4539 "c.rs": ""
4540 }),
4541 )
4542 .await;
4543
4544 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4545 project
4546 .update(cx, |project, cx| {
4547 let id = project.worktrees().next().unwrap().read(cx).id();
4548 project.create_entry((id, "b.."), true, cx)
4549 })
4550 .unwrap()
4551 .await
4552 .to_included()
4553 .unwrap();
4554
4555 // Can't create paths outside the project
4556 let result = project
4557 .update(cx, |project, cx| {
4558 let id = project.worktrees().next().unwrap().read(cx).id();
4559 project.create_entry((id, "../../boop"), true, cx)
4560 })
4561 .await;
4562 assert!(result.is_err());
4563
4564 // Can't create paths with '..'
4565 let result = project
4566 .update(cx, |project, cx| {
4567 let id = project.worktrees().next().unwrap().read(cx).id();
4568 project.create_entry((id, "four/../beep"), true, cx)
4569 })
4570 .await;
4571 assert!(result.is_err());
4572
4573 assert_eq!(
4574 fs.paths(true),
4575 vec![
4576 PathBuf::from("/"),
4577 PathBuf::from("/one"),
4578 PathBuf::from("/one/two"),
4579 PathBuf::from("/one/two/c.rs"),
4580 PathBuf::from("/one/two/three"),
4581 PathBuf::from("/one/two/three/a.txt"),
4582 PathBuf::from("/one/two/three/b.."),
4583 PathBuf::from("/one/two/three/four"),
4584 ]
4585 );
4586
4587 // And we cannot open buffers with '..'
4588 let result = project
4589 .update(cx, |project, cx| {
4590 let id = project.worktrees().next().unwrap().read(cx).id();
4591 project.open_buffer((id, "../c.rs"), cx)
4592 })
4593 .await;
4594 assert!(result.is_err())
4595}
4596
4597#[gpui::test]
4598async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4599 init_test(cx);
4600
4601 let fs = FakeFs::new(cx.executor());
4602 fs.insert_tree(
4603 "/dir",
4604 json!({
4605 "a.tsx": "a",
4606 }),
4607 )
4608 .await;
4609
4610 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4611
4612 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4613 language_registry.add(tsx_lang());
4614 let language_server_names = [
4615 "TypeScriptServer",
4616 "TailwindServer",
4617 "ESLintServer",
4618 "NoHoverCapabilitiesServer",
4619 ];
4620 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4621 "tsx",
4622 true,
4623 FakeLspAdapter {
4624 name: &language_server_names[0],
4625 capabilities: lsp::ServerCapabilities {
4626 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4627 ..lsp::ServerCapabilities::default()
4628 },
4629 ..FakeLspAdapter::default()
4630 },
4631 );
4632 let _a = language_registry.register_specific_fake_lsp_adapter(
4633 "tsx",
4634 false,
4635 FakeLspAdapter {
4636 name: &language_server_names[1],
4637 capabilities: lsp::ServerCapabilities {
4638 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4639 ..lsp::ServerCapabilities::default()
4640 },
4641 ..FakeLspAdapter::default()
4642 },
4643 );
4644 let _b = language_registry.register_specific_fake_lsp_adapter(
4645 "tsx",
4646 false,
4647 FakeLspAdapter {
4648 name: &language_server_names[2],
4649 capabilities: lsp::ServerCapabilities {
4650 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4651 ..lsp::ServerCapabilities::default()
4652 },
4653 ..FakeLspAdapter::default()
4654 },
4655 );
4656 let _c = language_registry.register_specific_fake_lsp_adapter(
4657 "tsx",
4658 false,
4659 FakeLspAdapter {
4660 name: &language_server_names[3],
4661 capabilities: lsp::ServerCapabilities {
4662 hover_provider: None,
4663 ..lsp::ServerCapabilities::default()
4664 },
4665 ..FakeLspAdapter::default()
4666 },
4667 );
4668
4669 let buffer = project
4670 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4671 .await
4672 .unwrap();
4673 cx.executor().run_until_parked();
4674
4675 let mut servers_with_hover_requests = HashMap::default();
4676 for i in 0..language_server_names.len() {
4677 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4678 panic!(
4679 "Failed to get language server #{i} with name {}",
4680 &language_server_names[i]
4681 )
4682 });
4683 let new_server_name = new_server.server.name();
4684 assert!(
4685 !servers_with_hover_requests.contains_key(new_server_name),
4686 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4687 );
4688 let new_server_name = new_server_name.to_string();
4689 match new_server_name.as_str() {
4690 "TailwindServer" | "TypeScriptServer" => {
4691 servers_with_hover_requests.insert(
4692 new_server_name.clone(),
4693 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4694 let name = new_server_name.clone();
4695 async move {
4696 Ok(Some(lsp::Hover {
4697 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4698 format!("{name} hover"),
4699 )),
4700 range: None,
4701 }))
4702 }
4703 }),
4704 );
4705 }
4706 "ESLintServer" => {
4707 servers_with_hover_requests.insert(
4708 new_server_name,
4709 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4710 |_, _| async move { Ok(None) },
4711 ),
4712 );
4713 }
4714 "NoHoverCapabilitiesServer" => {
4715 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4716 |_, _| async move {
4717 panic!(
4718 "Should not call for hovers server with no corresponding capabilities"
4719 )
4720 },
4721 );
4722 }
4723 unexpected => panic!("Unexpected server name: {unexpected}"),
4724 }
4725 }
4726
4727 let hover_task = project.update(cx, |project, cx| {
4728 project.hover(&buffer, Point::new(0, 0), cx)
4729 });
4730 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4731 |mut hover_request| async move {
4732 hover_request
4733 .next()
4734 .await
4735 .expect("All hover requests should have been triggered")
4736 },
4737 ))
4738 .await;
4739 assert_eq!(
4740 vec!["TailwindServer hover", "TypeScriptServer hover"],
4741 hover_task
4742 .await
4743 .into_iter()
4744 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4745 .sorted()
4746 .collect::<Vec<_>>(),
4747 "Should receive hover responses from all related servers with hover capabilities"
4748 );
4749}
4750
4751#[gpui::test]
4752async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4753 init_test(cx);
4754
4755 let fs = FakeFs::new(cx.executor());
4756 fs.insert_tree(
4757 "/dir",
4758 json!({
4759 "a.ts": "a",
4760 }),
4761 )
4762 .await;
4763
4764 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4765
4766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4767 language_registry.add(typescript_lang());
4768 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4769 "TypeScript",
4770 FakeLspAdapter {
4771 capabilities: lsp::ServerCapabilities {
4772 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4773 ..lsp::ServerCapabilities::default()
4774 },
4775 ..FakeLspAdapter::default()
4776 },
4777 );
4778
4779 let buffer = project
4780 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4781 .await
4782 .unwrap();
4783 cx.executor().run_until_parked();
4784
4785 let fake_server = fake_language_servers
4786 .next()
4787 .await
4788 .expect("failed to get the language server");
4789
4790 let mut request_handled =
4791 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4792 Ok(Some(lsp::Hover {
4793 contents: lsp::HoverContents::Array(vec![
4794 lsp::MarkedString::String("".to_string()),
4795 lsp::MarkedString::String(" ".to_string()),
4796 lsp::MarkedString::String("\n\n\n".to_string()),
4797 ]),
4798 range: None,
4799 }))
4800 });
4801
4802 let hover_task = project.update(cx, |project, cx| {
4803 project.hover(&buffer, Point::new(0, 0), cx)
4804 });
4805 let () = request_handled
4806 .next()
4807 .await
4808 .expect("All hover requests should have been triggered");
4809 assert_eq!(
4810 Vec::<String>::new(),
4811 hover_task
4812 .await
4813 .into_iter()
4814 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4815 .sorted()
4816 .collect::<Vec<_>>(),
4817 "Empty hover parts should be ignored"
4818 );
4819}
4820
4821#[gpui::test]
4822async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4823 init_test(cx);
4824
4825 let fs = FakeFs::new(cx.executor());
4826 fs.insert_tree(
4827 "/dir",
4828 json!({
4829 "a.tsx": "a",
4830 }),
4831 )
4832 .await;
4833
4834 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4835
4836 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4837 language_registry.add(tsx_lang());
4838 let language_server_names = [
4839 "TypeScriptServer",
4840 "TailwindServer",
4841 "ESLintServer",
4842 "NoActionsCapabilitiesServer",
4843 ];
4844 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4845 "tsx",
4846 true,
4847 FakeLspAdapter {
4848 name: &language_server_names[0],
4849 capabilities: lsp::ServerCapabilities {
4850 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4851 ..lsp::ServerCapabilities::default()
4852 },
4853 ..FakeLspAdapter::default()
4854 },
4855 );
4856 let _a = language_registry.register_specific_fake_lsp_adapter(
4857 "tsx",
4858 false,
4859 FakeLspAdapter {
4860 name: &language_server_names[1],
4861 capabilities: lsp::ServerCapabilities {
4862 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4863 ..lsp::ServerCapabilities::default()
4864 },
4865 ..FakeLspAdapter::default()
4866 },
4867 );
4868 let _b = language_registry.register_specific_fake_lsp_adapter(
4869 "tsx",
4870 false,
4871 FakeLspAdapter {
4872 name: &language_server_names[2],
4873 capabilities: lsp::ServerCapabilities {
4874 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4875 ..lsp::ServerCapabilities::default()
4876 },
4877 ..FakeLspAdapter::default()
4878 },
4879 );
4880 let _c = language_registry.register_specific_fake_lsp_adapter(
4881 "tsx",
4882 false,
4883 FakeLspAdapter {
4884 name: &language_server_names[3],
4885 capabilities: lsp::ServerCapabilities {
4886 code_action_provider: None,
4887 ..lsp::ServerCapabilities::default()
4888 },
4889 ..FakeLspAdapter::default()
4890 },
4891 );
4892
4893 let buffer = project
4894 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4895 .await
4896 .unwrap();
4897 cx.executor().run_until_parked();
4898
4899 let mut servers_with_actions_requests = HashMap::default();
4900 for i in 0..language_server_names.len() {
4901 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4902 panic!(
4903 "Failed to get language server #{i} with name {}",
4904 &language_server_names[i]
4905 )
4906 });
4907 let new_server_name = new_server.server.name();
4908 assert!(
4909 !servers_with_actions_requests.contains_key(new_server_name),
4910 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4911 );
4912 let new_server_name = new_server_name.to_string();
4913 match new_server_name.as_str() {
4914 "TailwindServer" | "TypeScriptServer" => {
4915 servers_with_actions_requests.insert(
4916 new_server_name.clone(),
4917 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4918 move |_, _| {
4919 let name = new_server_name.clone();
4920 async move {
4921 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4922 lsp::CodeAction {
4923 title: format!("{name} code action"),
4924 ..lsp::CodeAction::default()
4925 },
4926 )]))
4927 }
4928 },
4929 ),
4930 );
4931 }
4932 "ESLintServer" => {
4933 servers_with_actions_requests.insert(
4934 new_server_name,
4935 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4936 |_, _| async move { Ok(None) },
4937 ),
4938 );
4939 }
4940 "NoActionsCapabilitiesServer" => {
4941 let _never_handled = new_server
4942 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4943 panic!(
4944 "Should not call for code actions server with no corresponding capabilities"
4945 )
4946 });
4947 }
4948 unexpected => panic!("Unexpected server name: {unexpected}"),
4949 }
4950 }
4951
4952 let code_actions_task = project.update(cx, |project, cx| {
4953 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4954 });
4955 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4956 |mut code_actions_request| async move {
4957 code_actions_request
4958 .next()
4959 .await
4960 .expect("All code actions requests should have been triggered")
4961 },
4962 ))
4963 .await;
4964 assert_eq!(
4965 vec!["TailwindServer code action", "TypeScriptServer code action"],
4966 code_actions_task
4967 .await
4968 .into_iter()
4969 .map(|code_action| code_action.lsp_action.title)
4970 .sorted()
4971 .collect::<Vec<_>>(),
4972 "Should receive code actions responses from all related servers with hover capabilities"
4973 );
4974}
4975
4976#[gpui::test]
4977async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
4978 init_test(cx);
4979
4980 let fs = FakeFs::new(cx.executor());
4981 fs.insert_tree(
4982 "/dir",
4983 json!({
4984 "a.rs": "let a = 1;",
4985 "b.rs": "let b = 2;",
4986 "c.rs": "let c = 2;",
4987 }),
4988 )
4989 .await;
4990
4991 let project = Project::test(
4992 fs,
4993 [
4994 "/dir/a.rs".as_ref(),
4995 "/dir/b.rs".as_ref(),
4996 "/dir/c.rs".as_ref(),
4997 ],
4998 cx,
4999 )
5000 .await;
5001
5002 // check the initial state and get the worktrees
5003 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5004 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5005 assert_eq!(worktrees.len(), 3);
5006
5007 let worktree_a = worktrees[0].read(cx);
5008 let worktree_b = worktrees[1].read(cx);
5009 let worktree_c = worktrees[2].read(cx);
5010
5011 // check they start in the right order
5012 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5013 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5014 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5015
5016 (
5017 worktrees[0].clone(),
5018 worktrees[1].clone(),
5019 worktrees[2].clone(),
5020 )
5021 });
5022
5023 // move first worktree to after the second
5024 // [a, b, c] -> [b, a, c]
5025 project
5026 .update(cx, |project, cx| {
5027 let first = worktree_a.read(cx);
5028 let second = worktree_b.read(cx);
5029 project.move_worktree(first.id(), second.id(), cx)
5030 })
5031 .expect("moving first after second");
5032
5033 // check the state after moving
5034 project.update(cx, |project, cx| {
5035 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5036 assert_eq!(worktrees.len(), 3);
5037
5038 let first = worktrees[0].read(cx);
5039 let second = worktrees[1].read(cx);
5040 let third = worktrees[2].read(cx);
5041
5042 // check they are now in the right order
5043 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5044 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5045 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5046 });
5047
5048 // move the second worktree to before the first
5049 // [b, a, c] -> [a, b, c]
5050 project
5051 .update(cx, |project, cx| {
5052 let second = worktree_a.read(cx);
5053 let first = worktree_b.read(cx);
5054 project.move_worktree(first.id(), second.id(), cx)
5055 })
5056 .expect("moving second before first");
5057
5058 // check the state after moving
5059 project.update(cx, |project, cx| {
5060 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5061 assert_eq!(worktrees.len(), 3);
5062
5063 let first = worktrees[0].read(cx);
5064 let second = worktrees[1].read(cx);
5065 let third = worktrees[2].read(cx);
5066
5067 // check they are now in the right order
5068 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5069 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5070 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5071 });
5072
5073 // move the second worktree to after the third
5074 // [a, b, c] -> [a, c, b]
5075 project
5076 .update(cx, |project, cx| {
5077 let second = worktree_b.read(cx);
5078 let third = worktree_c.read(cx);
5079 project.move_worktree(second.id(), third.id(), cx)
5080 })
5081 .expect("moving second after third");
5082
5083 // check the state after moving
5084 project.update(cx, |project, cx| {
5085 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5086 assert_eq!(worktrees.len(), 3);
5087
5088 let first = worktrees[0].read(cx);
5089 let second = worktrees[1].read(cx);
5090 let third = worktrees[2].read(cx);
5091
5092 // check they are now in the right order
5093 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5094 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5095 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5096 });
5097
5098 // move the third worktree to before the second
5099 // [a, c, b] -> [a, b, c]
5100 project
5101 .update(cx, |project, cx| {
5102 let third = worktree_c.read(cx);
5103 let second = worktree_b.read(cx);
5104 project.move_worktree(third.id(), second.id(), cx)
5105 })
5106 .expect("moving third before second");
5107
5108 // check the state after moving
5109 project.update(cx, |project, cx| {
5110 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5111 assert_eq!(worktrees.len(), 3);
5112
5113 let first = worktrees[0].read(cx);
5114 let second = worktrees[1].read(cx);
5115 let third = worktrees[2].read(cx);
5116
5117 // check they are now in the right order
5118 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5119 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5120 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5121 });
5122
5123 // move the first worktree to after the third
5124 // [a, b, c] -> [b, c, a]
5125 project
5126 .update(cx, |project, cx| {
5127 let first = worktree_a.read(cx);
5128 let third = worktree_c.read(cx);
5129 project.move_worktree(first.id(), third.id(), cx)
5130 })
5131 .expect("moving first after third");
5132
5133 // check the state after moving
5134 project.update(cx, |project, cx| {
5135 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5136 assert_eq!(worktrees.len(), 3);
5137
5138 let first = worktrees[0].read(cx);
5139 let second = worktrees[1].read(cx);
5140 let third = worktrees[2].read(cx);
5141
5142 // check they are now in the right order
5143 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5144 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5145 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5146 });
5147
5148 // move the third worktree to before the first
5149 // [b, c, a] -> [a, b, c]
5150 project
5151 .update(cx, |project, cx| {
5152 let third = worktree_a.read(cx);
5153 let first = worktree_b.read(cx);
5154 project.move_worktree(third.id(), first.id(), cx)
5155 })
5156 .expect("moving third before first");
5157
5158 // check the state after moving
5159 project.update(cx, |project, cx| {
5160 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5161 assert_eq!(worktrees.len(), 3);
5162
5163 let first = worktrees[0].read(cx);
5164 let second = worktrees[1].read(cx);
5165 let third = worktrees[2].read(cx);
5166
5167 // check they are now in the right order
5168 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5169 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5170 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5171 });
5172}
5173
5174async fn search(
5175 project: &Model<Project>,
5176 query: SearchQuery,
5177 cx: &mut gpui::TestAppContext,
5178) -> Result<HashMap<String, Vec<Range<usize>>>> {
5179 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5180 let mut results = HashMap::default();
5181 while let Some(search_result) = search_rx.next().await {
5182 match search_result {
5183 SearchResult::Buffer { buffer, ranges } => {
5184 results.entry(buffer).or_insert(ranges);
5185 }
5186 SearchResult::LimitReached => {}
5187 }
5188 }
5189 Ok(results
5190 .into_iter()
5191 .map(|(buffer, ranges)| {
5192 buffer.update(cx, |buffer, cx| {
5193 let path = buffer
5194 .file()
5195 .unwrap()
5196 .full_path(cx)
5197 .to_string_lossy()
5198 .to_string();
5199 let ranges = ranges
5200 .into_iter()
5201 .map(|range| range.to_offset(buffer))
5202 .collect::<Vec<_>>();
5203 (path, ranges)
5204 })
5205 })
5206 .collect())
5207}
5208
5209fn init_test(cx: &mut gpui::TestAppContext) {
5210 if std::env::var("RUST_LOG").is_ok() {
5211 env_logger::try_init().ok();
5212 }
5213
5214 cx.update(|cx| {
5215 let settings_store = SettingsStore::test(cx);
5216 cx.set_global(settings_store);
5217 release_channel::init(SemanticVersion::default(), cx);
5218 language::init(cx);
5219 Project::init_settings(cx);
5220 });
5221}
5222
5223fn json_lang() -> Arc<Language> {
5224 Arc::new(Language::new(
5225 LanguageConfig {
5226 name: "JSON".into(),
5227 matcher: LanguageMatcher {
5228 path_suffixes: vec!["json".to_string()],
5229 ..Default::default()
5230 },
5231 ..Default::default()
5232 },
5233 None,
5234 ))
5235}
5236
5237fn js_lang() -> Arc<Language> {
5238 Arc::new(Language::new(
5239 LanguageConfig {
5240 name: Arc::from("JavaScript"),
5241 matcher: LanguageMatcher {
5242 path_suffixes: vec!["js".to_string()],
5243 ..Default::default()
5244 },
5245 ..Default::default()
5246 },
5247 None,
5248 ))
5249}
5250
5251fn rust_lang() -> Arc<Language> {
5252 Arc::new(Language::new(
5253 LanguageConfig {
5254 name: "Rust".into(),
5255 matcher: LanguageMatcher {
5256 path_suffixes: vec!["rs".to_string()],
5257 ..Default::default()
5258 },
5259 ..Default::default()
5260 },
5261 Some(tree_sitter_rust::language()),
5262 ))
5263}
5264
5265fn typescript_lang() -> Arc<Language> {
5266 Arc::new(Language::new(
5267 LanguageConfig {
5268 name: "TypeScript".into(),
5269 matcher: LanguageMatcher {
5270 path_suffixes: vec!["ts".to_string()],
5271 ..Default::default()
5272 },
5273 ..Default::default()
5274 },
5275 Some(tree_sitter_typescript::language_typescript()),
5276 ))
5277}
5278
5279fn tsx_lang() -> Arc<Language> {
5280 Arc::new(Language::new(
5281 LanguageConfig {
5282 name: "tsx".into(),
5283 matcher: LanguageMatcher {
5284 path_suffixes: vec!["tsx".to_string()],
5285 ..Default::default()
5286 },
5287 ..Default::default()
5288 },
5289 Some(tree_sitter_typescript::language_tsx()),
5290 ))
5291}
5292
5293fn get_all_tasks(
5294 project: &Model<Project>,
5295 worktree_id: Option<WorktreeId>,
5296 task_context: &TaskContext,
5297 cx: &mut AppContext,
5298) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5299 let resolved_tasks = project.update(cx, |project, cx| {
5300 project
5301 .task_inventory()
5302 .read(cx)
5303 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5304 });
5305
5306 cx.spawn(|_| async move {
5307 let (mut old, new) = resolved_tasks.await;
5308 old.extend(new);
5309 old
5310 })
5311}