1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17use worktree::WorktreeModelHandle as _;
18
19#[gpui::test]
20async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
21 cx.executor().allow_parking();
22
23 let (tx, mut rx) = futures::channel::mpsc::unbounded();
24 let _thread = std::thread::spawn(move || {
25 std::fs::metadata("/Users").unwrap();
26 std::thread::sleep(Duration::from_millis(1000));
27 tx.unbounded_send(1).unwrap();
28 });
29 rx.next().await.unwrap();
30}
31
32#[gpui::test]
33async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
34 cx.executor().allow_parking();
35
36 let io_task = smol::unblock(move || {
37 println!("sleeping on thread {:?}", std::thread::current().id());
38 std::thread::sleep(Duration::from_millis(10));
39 1
40 });
41
42 let task = cx.foreground_executor().spawn(async move {
43 io_task.await;
44 });
45
46 task.await;
47}
48
49#[cfg(not(windows))]
50#[gpui::test]
51async fn test_symlinks(cx: &mut gpui::TestAppContext) {
52 init_test(cx);
53 cx.executor().allow_parking();
54
55 let dir = temp_tree(json!({
56 "root": {
57 "apple": "",
58 "banana": {
59 "carrot": {
60 "date": "",
61 "endive": "",
62 }
63 },
64 "fennel": {
65 "grape": "",
66 }
67 }
68 }));
69
70 let root_link_path = dir.path().join("root_link");
71 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
72 os::unix::fs::symlink(
73 &dir.path().join("root/fennel"),
74 &dir.path().join("root/finnochio"),
75 )
76 .unwrap();
77
78 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
79
80 project.update(cx, |project, cx| {
81 let tree = project.worktrees().next().unwrap().read(cx);
82 assert_eq!(tree.file_count(), 5);
83 assert_eq!(
84 tree.inode_for_path("fennel/grape"),
85 tree.inode_for_path("finnochio/grape")
86 );
87 });
88}
89
90#[gpui::test]
91async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
92 init_test(cx);
93
94 let fs = FakeFs::new(cx.executor());
95 fs.insert_tree(
96 "/the-root",
97 json!({
98 ".zed": {
99 "settings.json": r#"{ "tab_size": 8 }"#,
100 "tasks.json": r#"[{
101 "label": "cargo check",
102 "command": "cargo",
103 "args": ["check", "--all"]
104 },]"#,
105 },
106 "a": {
107 "a.rs": "fn a() {\n A\n}"
108 },
109 "b": {
110 ".zed": {
111 "settings.json": r#"{ "tab_size": 2 }"#,
112 "tasks.json": r#"[{
113 "label": "cargo check",
114 "command": "cargo",
115 "args": ["check"]
116 },]"#,
117 },
118 "b.rs": "fn b() {\n B\n}"
119 }
120 }),
121 )
122 .await;
123
124 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
125 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
126
127 cx.executor().run_until_parked();
128 cx.update(|cx| {
129 let tree = worktree.read(cx);
130
131 let settings_a = language_settings(
132 None,
133 Some(
134 &(File::for_entry(
135 tree.entry_for_path("a/a.rs").unwrap().clone(),
136 worktree.clone(),
137 ) as _),
138 ),
139 cx,
140 );
141 let settings_b = language_settings(
142 None,
143 Some(
144 &(File::for_entry(
145 tree.entry_for_path("b/b.rs").unwrap().clone(),
146 worktree.clone(),
147 ) as _),
148 ),
149 cx,
150 );
151
152 assert_eq!(settings_a.tab_size.get(), 8);
153 assert_eq!(settings_b.tab_size.get(), 2);
154
155 let workree_id = project.update(cx, |project, cx| {
156 project.worktrees().next().unwrap().read(cx).id()
157 });
158 let all_tasks = project
159 .update(cx, |project, cx| {
160 project.task_inventory().update(cx, |inventory, cx| {
161 inventory.list_tasks(None, None, false, cx)
162 })
163 })
164 .into_iter()
165 .map(|(source_kind, task)| (source_kind, task.name().to_string()))
166 .collect::<Vec<_>>();
167 assert_eq!(
168 all_tasks,
169 vec![
170 (
171 TaskSourceKind::Worktree {
172 id: workree_id,
173 abs_path: PathBuf::from("/the-root/.zed/tasks.json")
174 },
175 "cargo check".to_string()
176 ),
177 (
178 TaskSourceKind::Worktree {
179 id: workree_id,
180 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json")
181 },
182 "cargo check".to_string()
183 ),
184 ]
185 );
186 });
187}
188
189#[gpui::test]
190async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
191 init_test(cx);
192
193 let fs = FakeFs::new(cx.executor());
194 fs.insert_tree(
195 "/the-root",
196 json!({
197 "test.rs": "const A: i32 = 1;",
198 "test2.rs": "",
199 "Cargo.toml": "a = 1",
200 "package.json": "{\"a\": 1}",
201 }),
202 )
203 .await;
204
205 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
206 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
207
208 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
209 "Rust",
210 FakeLspAdapter {
211 name: "the-rust-language-server",
212 capabilities: lsp::ServerCapabilities {
213 completion_provider: Some(lsp::CompletionOptions {
214 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
215 ..Default::default()
216 }),
217 ..Default::default()
218 },
219 ..Default::default()
220 },
221 );
222 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
223 "JSON",
224 FakeLspAdapter {
225 name: "the-json-language-server",
226 capabilities: lsp::ServerCapabilities {
227 completion_provider: Some(lsp::CompletionOptions {
228 trigger_characters: Some(vec![":".to_string()]),
229 ..Default::default()
230 }),
231 ..Default::default()
232 },
233 ..Default::default()
234 },
235 );
236
237 // Open a buffer without an associated language server.
238 let toml_buffer = project
239 .update(cx, |project, cx| {
240 project.open_local_buffer("/the-root/Cargo.toml", cx)
241 })
242 .await
243 .unwrap();
244
245 // Open a buffer with an associated language server before the language for it has been loaded.
246 let rust_buffer = project
247 .update(cx, |project, cx| {
248 project.open_local_buffer("/the-root/test.rs", cx)
249 })
250 .await
251 .unwrap();
252 rust_buffer.update(cx, |buffer, _| {
253 assert_eq!(buffer.language().map(|l| l.name()), None);
254 });
255
256 // Now we add the languages to the project, and ensure they get assigned to all
257 // the relevant open buffers.
258 language_registry.add(json_lang());
259 language_registry.add(rust_lang());
260 cx.executor().run_until_parked();
261 rust_buffer.update(cx, |buffer, _| {
262 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
263 });
264
265 // A server is started up, and it is notified about Rust files.
266 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
267 assert_eq!(
268 fake_rust_server
269 .receive_notification::<lsp::notification::DidOpenTextDocument>()
270 .await
271 .text_document,
272 lsp::TextDocumentItem {
273 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
274 version: 0,
275 text: "const A: i32 = 1;".to_string(),
276 language_id: Default::default()
277 }
278 );
279
280 // The buffer is configured based on the language server's capabilities.
281 rust_buffer.update(cx, |buffer, _| {
282 assert_eq!(
283 buffer.completion_triggers(),
284 &[".".to_string(), "::".to_string()]
285 );
286 });
287 toml_buffer.update(cx, |buffer, _| {
288 assert!(buffer.completion_triggers().is_empty());
289 });
290
291 // Edit a buffer. The changes are reported to the language server.
292 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
293 assert_eq!(
294 fake_rust_server
295 .receive_notification::<lsp::notification::DidChangeTextDocument>()
296 .await
297 .text_document,
298 lsp::VersionedTextDocumentIdentifier::new(
299 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
300 1
301 )
302 );
303
304 // Open a third buffer with a different associated language server.
305 let json_buffer = project
306 .update(cx, |project, cx| {
307 project.open_local_buffer("/the-root/package.json", cx)
308 })
309 .await
310 .unwrap();
311
312 // A json language server is started up and is only notified about the json buffer.
313 let mut fake_json_server = fake_json_servers.next().await.unwrap();
314 assert_eq!(
315 fake_json_server
316 .receive_notification::<lsp::notification::DidOpenTextDocument>()
317 .await
318 .text_document,
319 lsp::TextDocumentItem {
320 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
321 version: 0,
322 text: "{\"a\": 1}".to_string(),
323 language_id: Default::default()
324 }
325 );
326
327 // This buffer is configured based on the second language server's
328 // capabilities.
329 json_buffer.update(cx, |buffer, _| {
330 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
331 });
332
333 // When opening another buffer whose language server is already running,
334 // it is also configured based on the existing language server's capabilities.
335 let rust_buffer2 = project
336 .update(cx, |project, cx| {
337 project.open_local_buffer("/the-root/test2.rs", cx)
338 })
339 .await
340 .unwrap();
341 rust_buffer2.update(cx, |buffer, _| {
342 assert_eq!(
343 buffer.completion_triggers(),
344 &[".".to_string(), "::".to_string()]
345 );
346 });
347
348 // Changes are reported only to servers matching the buffer's language.
349 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
350 rust_buffer2.update(cx, |buffer, cx| {
351 buffer.edit([(0..0, "let x = 1;")], None, cx)
352 });
353 assert_eq!(
354 fake_rust_server
355 .receive_notification::<lsp::notification::DidChangeTextDocument>()
356 .await
357 .text_document,
358 lsp::VersionedTextDocumentIdentifier::new(
359 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
360 1
361 )
362 );
363
364 // Save notifications are reported to all servers.
365 project
366 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
367 .await
368 .unwrap();
369 assert_eq!(
370 fake_rust_server
371 .receive_notification::<lsp::notification::DidSaveTextDocument>()
372 .await
373 .text_document,
374 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
375 );
376 assert_eq!(
377 fake_json_server
378 .receive_notification::<lsp::notification::DidSaveTextDocument>()
379 .await
380 .text_document,
381 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
382 );
383
384 // Renames are reported only to servers matching the buffer's language.
385 fs.rename(
386 Path::new("/the-root/test2.rs"),
387 Path::new("/the-root/test3.rs"),
388 Default::default(),
389 )
390 .await
391 .unwrap();
392 assert_eq!(
393 fake_rust_server
394 .receive_notification::<lsp::notification::DidCloseTextDocument>()
395 .await
396 .text_document,
397 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
398 );
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
406 version: 0,
407 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 },
410 );
411
412 rust_buffer2.update(cx, |buffer, cx| {
413 buffer.update_diagnostics(
414 LanguageServerId(0),
415 DiagnosticSet::from_sorted_entries(
416 vec![DiagnosticEntry {
417 diagnostic: Default::default(),
418 range: Anchor::MIN..Anchor::MAX,
419 }],
420 &buffer.snapshot(),
421 ),
422 cx,
423 );
424 assert_eq!(
425 buffer
426 .snapshot()
427 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
428 .count(),
429 1
430 );
431 });
432
433 // When the rename changes the extension of the file, the buffer gets closed on the old
434 // language server and gets opened on the new one.
435 fs.rename(
436 Path::new("/the-root/test3.rs"),
437 Path::new("/the-root/test3.json"),
438 Default::default(),
439 )
440 .await
441 .unwrap();
442 assert_eq!(
443 fake_rust_server
444 .receive_notification::<lsp::notification::DidCloseTextDocument>()
445 .await
446 .text_document,
447 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
448 );
449 assert_eq!(
450 fake_json_server
451 .receive_notification::<lsp::notification::DidOpenTextDocument>()
452 .await
453 .text_document,
454 lsp::TextDocumentItem {
455 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
456 version: 0,
457 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
458 language_id: Default::default()
459 },
460 );
461
462 // We clear the diagnostics, since the language has changed.
463 rust_buffer2.update(cx, |buffer, _| {
464 assert_eq!(
465 buffer
466 .snapshot()
467 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
468 .count(),
469 0
470 );
471 });
472
473 // The renamed file's version resets after changing language server.
474 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
475 assert_eq!(
476 fake_json_server
477 .receive_notification::<lsp::notification::DidChangeTextDocument>()
478 .await
479 .text_document,
480 lsp::VersionedTextDocumentIdentifier::new(
481 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
482 1
483 )
484 );
485
486 // Restart language servers
487 project.update(cx, |project, cx| {
488 project.restart_language_servers_for_buffers(
489 vec![rust_buffer.clone(), json_buffer.clone()],
490 cx,
491 );
492 });
493
494 let mut rust_shutdown_requests = fake_rust_server
495 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
496 let mut json_shutdown_requests = fake_json_server
497 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
498 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
499
500 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
501 let mut fake_json_server = fake_json_servers.next().await.unwrap();
502
503 // Ensure rust document is reopened in new rust language server
504 assert_eq!(
505 fake_rust_server
506 .receive_notification::<lsp::notification::DidOpenTextDocument>()
507 .await
508 .text_document,
509 lsp::TextDocumentItem {
510 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
511 version: 0,
512 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
513 language_id: Default::default()
514 }
515 );
516
517 // Ensure json documents are reopened in new json language server
518 assert_set_eq!(
519 [
520 fake_json_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 fake_json_server
525 .receive_notification::<lsp::notification::DidOpenTextDocument>()
526 .await
527 .text_document,
528 ],
529 [
530 lsp::TextDocumentItem {
531 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
532 version: 0,
533 text: json_buffer.update(cx, |buffer, _| buffer.text()),
534 language_id: Default::default()
535 },
536 lsp::TextDocumentItem {
537 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
538 version: 0,
539 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
540 language_id: Default::default()
541 }
542 ]
543 );
544
545 // Close notifications are reported only to servers matching the buffer's language.
546 cx.update(|_| drop(json_buffer));
547 let close_message = lsp::DidCloseTextDocumentParams {
548 text_document: lsp::TextDocumentIdentifier::new(
549 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
550 ),
551 };
552 assert_eq!(
553 fake_json_server
554 .receive_notification::<lsp::notification::DidCloseTextDocument>()
555 .await,
556 close_message,
557 );
558}
559
560#[gpui::test]
561async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
562 init_test(cx);
563
564 let fs = FakeFs::new(cx.executor());
565 fs.insert_tree(
566 "/the-root",
567 json!({
568 ".gitignore": "target\n",
569 "src": {
570 "a.rs": "",
571 "b.rs": "",
572 },
573 "target": {
574 "x": {
575 "out": {
576 "x.rs": ""
577 }
578 },
579 "y": {
580 "out": {
581 "y.rs": "",
582 }
583 },
584 "z": {
585 "out": {
586 "z.rs": ""
587 }
588 }
589 }
590 }),
591 )
592 .await;
593
594 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
596 language_registry.add(rust_lang());
597 let mut fake_servers = language_registry.register_fake_lsp_adapter(
598 "Rust",
599 FakeLspAdapter {
600 name: "the-language-server",
601 ..Default::default()
602 },
603 );
604
605 cx.executor().run_until_parked();
606
607 // Start the language server by opening a buffer with a compatible file extension.
608 let _buffer = project
609 .update(cx, |project, cx| {
610 project.open_local_buffer("/the-root/src/a.rs", cx)
611 })
612 .await
613 .unwrap();
614
615 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
616 project.update(cx, |project, cx| {
617 let worktree = project.worktrees().next().unwrap();
618 assert_eq!(
619 worktree
620 .read(cx)
621 .snapshot()
622 .entries(true)
623 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
624 .collect::<Vec<_>>(),
625 &[
626 (Path::new(""), false),
627 (Path::new(".gitignore"), false),
628 (Path::new("src"), false),
629 (Path::new("src/a.rs"), false),
630 (Path::new("src/b.rs"), false),
631 (Path::new("target"), true),
632 ]
633 );
634 });
635
636 let prev_read_dir_count = fs.read_dir_call_count();
637
638 // Keep track of the FS events reported to the language server.
639 let fake_server = fake_servers.next().await.unwrap();
640 let file_changes = Arc::new(Mutex::new(Vec::new()));
641 fake_server
642 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
643 registrations: vec![lsp::Registration {
644 id: Default::default(),
645 method: "workspace/didChangeWatchedFiles".to_string(),
646 register_options: serde_json::to_value(
647 lsp::DidChangeWatchedFilesRegistrationOptions {
648 watchers: vec![
649 lsp::FileSystemWatcher {
650 glob_pattern: lsp::GlobPattern::String(
651 "/the-root/Cargo.toml".to_string(),
652 ),
653 kind: None,
654 },
655 lsp::FileSystemWatcher {
656 glob_pattern: lsp::GlobPattern::String(
657 "/the-root/src/*.{rs,c}".to_string(),
658 ),
659 kind: None,
660 },
661 lsp::FileSystemWatcher {
662 glob_pattern: lsp::GlobPattern::String(
663 "/the-root/target/y/**/*.rs".to_string(),
664 ),
665 kind: None,
666 },
667 ],
668 },
669 )
670 .ok(),
671 }],
672 })
673 .await
674 .unwrap();
675 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
676 let file_changes = file_changes.clone();
677 move |params, _| {
678 let mut file_changes = file_changes.lock();
679 file_changes.extend(params.changes);
680 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
681 }
682 });
683
684 cx.executor().run_until_parked();
685 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
686 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
687
688 // Now the language server has asked us to watch an ignored directory path,
689 // so we recursively load it.
690 project.update(cx, |project, cx| {
691 let worktree = project.worktrees().next().unwrap();
692 assert_eq!(
693 worktree
694 .read(cx)
695 .snapshot()
696 .entries(true)
697 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
698 .collect::<Vec<_>>(),
699 &[
700 (Path::new(""), false),
701 (Path::new(".gitignore"), false),
702 (Path::new("src"), false),
703 (Path::new("src/a.rs"), false),
704 (Path::new("src/b.rs"), false),
705 (Path::new("target"), true),
706 (Path::new("target/x"), true),
707 (Path::new("target/y"), true),
708 (Path::new("target/y/out"), true),
709 (Path::new("target/y/out/y.rs"), true),
710 (Path::new("target/z"), true),
711 ]
712 );
713 });
714
715 // Perform some file system mutations, two of which match the watched patterns,
716 // and one of which does not.
717 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
718 .await
719 .unwrap();
720 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
721 .await
722 .unwrap();
723 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
724 .await
725 .unwrap();
726 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
727 .await
728 .unwrap();
729 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
730 .await
731 .unwrap();
732
733 // The language server receives events for the FS mutations that match its watch patterns.
734 cx.executor().run_until_parked();
735 assert_eq!(
736 &*file_changes.lock(),
737 &[
738 lsp::FileEvent {
739 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
740 typ: lsp::FileChangeType::DELETED,
741 },
742 lsp::FileEvent {
743 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
744 typ: lsp::FileChangeType::CREATED,
745 },
746 lsp::FileEvent {
747 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
748 typ: lsp::FileChangeType::CREATED,
749 },
750 ]
751 );
752}
753
754#[gpui::test]
755async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
756 init_test(cx);
757
758 let fs = FakeFs::new(cx.executor());
759 fs.insert_tree(
760 "/dir",
761 json!({
762 "a.rs": "let a = 1;",
763 "b.rs": "let b = 2;"
764 }),
765 )
766 .await;
767
768 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
769
770 let buffer_a = project
771 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
772 .await
773 .unwrap();
774 let buffer_b = project
775 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
776 .await
777 .unwrap();
778
779 project.update(cx, |project, cx| {
780 project
781 .update_diagnostics(
782 LanguageServerId(0),
783 lsp::PublishDiagnosticsParams {
784 uri: Url::from_file_path("/dir/a.rs").unwrap(),
785 version: None,
786 diagnostics: vec![lsp::Diagnostic {
787 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
788 severity: Some(lsp::DiagnosticSeverity::ERROR),
789 message: "error 1".to_string(),
790 ..Default::default()
791 }],
792 },
793 &[],
794 cx,
795 )
796 .unwrap();
797 project
798 .update_diagnostics(
799 LanguageServerId(0),
800 lsp::PublishDiagnosticsParams {
801 uri: Url::from_file_path("/dir/b.rs").unwrap(),
802 version: None,
803 diagnostics: vec![lsp::Diagnostic {
804 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
805 severity: Some(lsp::DiagnosticSeverity::WARNING),
806 message: "error 2".to_string(),
807 ..Default::default()
808 }],
809 },
810 &[],
811 cx,
812 )
813 .unwrap();
814 });
815
816 buffer_a.update(cx, |buffer, _| {
817 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
818 assert_eq!(
819 chunks
820 .iter()
821 .map(|(s, d)| (s.as_str(), *d))
822 .collect::<Vec<_>>(),
823 &[
824 ("let ", None),
825 ("a", Some(DiagnosticSeverity::ERROR)),
826 (" = 1;", None),
827 ]
828 );
829 });
830 buffer_b.update(cx, |buffer, _| {
831 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
832 assert_eq!(
833 chunks
834 .iter()
835 .map(|(s, d)| (s.as_str(), *d))
836 .collect::<Vec<_>>(),
837 &[
838 ("let ", None),
839 ("b", Some(DiagnosticSeverity::WARNING)),
840 (" = 2;", None),
841 ]
842 );
843 });
844}
845
846#[gpui::test]
847async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
848 init_test(cx);
849
850 let fs = FakeFs::new(cx.executor());
851 fs.insert_tree(
852 "/root",
853 json!({
854 "dir": {
855 ".git": {
856 "HEAD": "ref: refs/heads/main",
857 },
858 ".gitignore": "b.rs",
859 "a.rs": "let a = 1;",
860 "b.rs": "let b = 2;",
861 },
862 "other.rs": "let b = c;"
863 }),
864 )
865 .await;
866
867 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
868 let (worktree, _) = project
869 .update(cx, |project, cx| {
870 project.find_or_create_local_worktree("/root/dir", true, cx)
871 })
872 .await
873 .unwrap();
874 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
875
876 let (worktree, _) = project
877 .update(cx, |project, cx| {
878 project.find_or_create_local_worktree("/root/other.rs", false, cx)
879 })
880 .await
881 .unwrap();
882 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
883
884 let server_id = LanguageServerId(0);
885 project.update(cx, |project, cx| {
886 project
887 .update_diagnostics(
888 server_id,
889 lsp::PublishDiagnosticsParams {
890 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
891 version: None,
892 diagnostics: vec![lsp::Diagnostic {
893 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
894 severity: Some(lsp::DiagnosticSeverity::ERROR),
895 message: "unused variable 'b'".to_string(),
896 ..Default::default()
897 }],
898 },
899 &[],
900 cx,
901 )
902 .unwrap();
903 project
904 .update_diagnostics(
905 server_id,
906 lsp::PublishDiagnosticsParams {
907 uri: Url::from_file_path("/root/other.rs").unwrap(),
908 version: None,
909 diagnostics: vec![lsp::Diagnostic {
910 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
911 severity: Some(lsp::DiagnosticSeverity::ERROR),
912 message: "unknown variable 'c'".to_string(),
913 ..Default::default()
914 }],
915 },
916 &[],
917 cx,
918 )
919 .unwrap();
920 });
921
922 let main_ignored_buffer = project
923 .update(cx, |project, cx| {
924 project.open_buffer((main_worktree_id, "b.rs"), cx)
925 })
926 .await
927 .unwrap();
928 main_ignored_buffer.update(cx, |buffer, _| {
929 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
930 assert_eq!(
931 chunks
932 .iter()
933 .map(|(s, d)| (s.as_str(), *d))
934 .collect::<Vec<_>>(),
935 &[
936 ("let ", None),
937 ("b", Some(DiagnosticSeverity::ERROR)),
938 (" = 2;", None),
939 ],
940 "Gigitnored buffers should still get in-buffer diagnostics",
941 );
942 });
943 let other_buffer = project
944 .update(cx, |project, cx| {
945 project.open_buffer((other_worktree_id, ""), cx)
946 })
947 .await
948 .unwrap();
949 other_buffer.update(cx, |buffer, _| {
950 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
951 assert_eq!(
952 chunks
953 .iter()
954 .map(|(s, d)| (s.as_str(), *d))
955 .collect::<Vec<_>>(),
956 &[
957 ("let b = ", None),
958 ("c", Some(DiagnosticSeverity::ERROR)),
959 (";", None),
960 ],
961 "Buffers from hidden projects should still get in-buffer diagnostics"
962 );
963 });
964
965 project.update(cx, |project, cx| {
966 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
967 assert_eq!(
968 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
969 vec![(
970 ProjectPath {
971 worktree_id: main_worktree_id,
972 path: Arc::from(Path::new("b.rs")),
973 },
974 server_id,
975 DiagnosticSummary {
976 error_count: 1,
977 warning_count: 0,
978 }
979 )]
980 );
981 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
982 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
983 });
984}
985
986#[gpui::test]
987async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
988 init_test(cx);
989
990 let progress_token = "the-progress-token";
991
992 let fs = FakeFs::new(cx.executor());
993 fs.insert_tree(
994 "/dir",
995 json!({
996 "a.rs": "fn a() { A }",
997 "b.rs": "const y: i32 = 1",
998 }),
999 )
1000 .await;
1001
1002 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1004
1005 language_registry.add(rust_lang());
1006 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1007 "Rust",
1008 FakeLspAdapter {
1009 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1010 disk_based_diagnostics_sources: vec!["disk".into()],
1011 ..Default::default()
1012 },
1013 );
1014
1015 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1016
1017 // Cause worktree to start the fake language server
1018 let _buffer = project
1019 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1020 .await
1021 .unwrap();
1022
1023 let mut events = cx.events(&project);
1024
1025 let fake_server = fake_servers.next().await.unwrap();
1026 assert_eq!(
1027 events.next().await.unwrap(),
1028 Event::LanguageServerAdded(LanguageServerId(0)),
1029 );
1030
1031 fake_server
1032 .start_progress(format!("{}/0", progress_token))
1033 .await;
1034 assert_eq!(
1035 events.next().await.unwrap(),
1036 Event::DiskBasedDiagnosticsStarted {
1037 language_server_id: LanguageServerId(0),
1038 }
1039 );
1040
1041 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1042 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1043 version: None,
1044 diagnostics: vec![lsp::Diagnostic {
1045 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1046 severity: Some(lsp::DiagnosticSeverity::ERROR),
1047 message: "undefined variable 'A'".to_string(),
1048 ..Default::default()
1049 }],
1050 });
1051 assert_eq!(
1052 events.next().await.unwrap(),
1053 Event::DiagnosticsUpdated {
1054 language_server_id: LanguageServerId(0),
1055 path: (worktree_id, Path::new("a.rs")).into()
1056 }
1057 );
1058
1059 fake_server.end_progress(format!("{}/0", progress_token));
1060 assert_eq!(
1061 events.next().await.unwrap(),
1062 Event::DiskBasedDiagnosticsFinished {
1063 language_server_id: LanguageServerId(0)
1064 }
1065 );
1066
1067 let buffer = project
1068 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1069 .await
1070 .unwrap();
1071
1072 buffer.update(cx, |buffer, _| {
1073 let snapshot = buffer.snapshot();
1074 let diagnostics = snapshot
1075 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1076 .collect::<Vec<_>>();
1077 assert_eq!(
1078 diagnostics,
1079 &[DiagnosticEntry {
1080 range: Point::new(0, 9)..Point::new(0, 10),
1081 diagnostic: Diagnostic {
1082 severity: lsp::DiagnosticSeverity::ERROR,
1083 message: "undefined variable 'A'".to_string(),
1084 group_id: 0,
1085 is_primary: true,
1086 ..Default::default()
1087 }
1088 }]
1089 )
1090 });
1091
1092 // Ensure publishing empty diagnostics twice only results in one update event.
1093 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1094 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1095 version: None,
1096 diagnostics: Default::default(),
1097 });
1098 assert_eq!(
1099 events.next().await.unwrap(),
1100 Event::DiagnosticsUpdated {
1101 language_server_id: LanguageServerId(0),
1102 path: (worktree_id, Path::new("a.rs")).into()
1103 }
1104 );
1105
1106 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1107 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1108 version: None,
1109 diagnostics: Default::default(),
1110 });
1111 cx.executor().run_until_parked();
1112 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1113}
1114
1115#[gpui::test]
1116async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1117 init_test(cx);
1118
1119 let progress_token = "the-progress-token";
1120
1121 let fs = FakeFs::new(cx.executor());
1122 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1123
1124 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1125
1126 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1127 language_registry.add(rust_lang());
1128 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1129 "Rust",
1130 FakeLspAdapter {
1131 name: "the-language-server",
1132 disk_based_diagnostics_sources: vec!["disk".into()],
1133 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1134 ..Default::default()
1135 },
1136 );
1137
1138 let buffer = project
1139 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1140 .await
1141 .unwrap();
1142
1143 // Simulate diagnostics starting to update.
1144 let fake_server = fake_servers.next().await.unwrap();
1145 fake_server.start_progress(progress_token).await;
1146
1147 // Restart the server before the diagnostics finish updating.
1148 project.update(cx, |project, cx| {
1149 project.restart_language_servers_for_buffers([buffer], cx);
1150 });
1151 let mut events = cx.events(&project);
1152
1153 // Simulate the newly started server sending more diagnostics.
1154 let fake_server = fake_servers.next().await.unwrap();
1155 assert_eq!(
1156 events.next().await.unwrap(),
1157 Event::LanguageServerAdded(LanguageServerId(1))
1158 );
1159 fake_server.start_progress(progress_token).await;
1160 assert_eq!(
1161 events.next().await.unwrap(),
1162 Event::DiskBasedDiagnosticsStarted {
1163 language_server_id: LanguageServerId(1)
1164 }
1165 );
1166 project.update(cx, |project, _| {
1167 assert_eq!(
1168 project
1169 .language_servers_running_disk_based_diagnostics()
1170 .collect::<Vec<_>>(),
1171 [LanguageServerId(1)]
1172 );
1173 });
1174
1175 // All diagnostics are considered done, despite the old server's diagnostic
1176 // task never completing.
1177 fake_server.end_progress(progress_token);
1178 assert_eq!(
1179 events.next().await.unwrap(),
1180 Event::DiskBasedDiagnosticsFinished {
1181 language_server_id: LanguageServerId(1)
1182 }
1183 );
1184 project.update(cx, |project, _| {
1185 assert_eq!(
1186 project
1187 .language_servers_running_disk_based_diagnostics()
1188 .collect::<Vec<_>>(),
1189 [LanguageServerId(0); 0]
1190 );
1191 });
1192}
1193
1194#[gpui::test]
1195async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1196 init_test(cx);
1197
1198 let fs = FakeFs::new(cx.executor());
1199 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1200
1201 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1202
1203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1204 language_registry.add(rust_lang());
1205 let mut fake_servers =
1206 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1207
1208 let buffer = project
1209 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1210 .await
1211 .unwrap();
1212
1213 // Publish diagnostics
1214 let fake_server = fake_servers.next().await.unwrap();
1215 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1216 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1217 version: None,
1218 diagnostics: vec![lsp::Diagnostic {
1219 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1220 severity: Some(lsp::DiagnosticSeverity::ERROR),
1221 message: "the message".to_string(),
1222 ..Default::default()
1223 }],
1224 });
1225
1226 cx.executor().run_until_parked();
1227 buffer.update(cx, |buffer, _| {
1228 assert_eq!(
1229 buffer
1230 .snapshot()
1231 .diagnostics_in_range::<_, usize>(0..1, false)
1232 .map(|entry| entry.diagnostic.message.clone())
1233 .collect::<Vec<_>>(),
1234 ["the message".to_string()]
1235 );
1236 });
1237 project.update(cx, |project, cx| {
1238 assert_eq!(
1239 project.diagnostic_summary(false, cx),
1240 DiagnosticSummary {
1241 error_count: 1,
1242 warning_count: 0,
1243 }
1244 );
1245 });
1246
1247 project.update(cx, |project, cx| {
1248 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1249 });
1250
1251 // The diagnostics are cleared.
1252 cx.executor().run_until_parked();
1253 buffer.update(cx, |buffer, _| {
1254 assert_eq!(
1255 buffer
1256 .snapshot()
1257 .diagnostics_in_range::<_, usize>(0..1, false)
1258 .map(|entry| entry.diagnostic.message.clone())
1259 .collect::<Vec<_>>(),
1260 Vec::<String>::new(),
1261 );
1262 });
1263 project.update(cx, |project, cx| {
1264 assert_eq!(
1265 project.diagnostic_summary(false, cx),
1266 DiagnosticSummary {
1267 error_count: 0,
1268 warning_count: 0,
1269 }
1270 );
1271 });
1272}
1273
1274#[gpui::test]
1275async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1276 init_test(cx);
1277
1278 let fs = FakeFs::new(cx.executor());
1279 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1280
1281 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1282 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1283
1284 language_registry.add(rust_lang());
1285 let mut fake_servers =
1286 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1287
1288 let buffer = project
1289 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1290 .await
1291 .unwrap();
1292
1293 // Before restarting the server, report diagnostics with an unknown buffer version.
1294 let fake_server = fake_servers.next().await.unwrap();
1295 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1296 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1297 version: Some(10000),
1298 diagnostics: Vec::new(),
1299 });
1300 cx.executor().run_until_parked();
1301
1302 project.update(cx, |project, cx| {
1303 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1304 });
1305 let mut fake_server = fake_servers.next().await.unwrap();
1306 let notification = fake_server
1307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1308 .await
1309 .text_document;
1310 assert_eq!(notification.version, 0);
1311}
1312
1313#[gpui::test]
1314async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1315 init_test(cx);
1316
1317 let fs = FakeFs::new(cx.executor());
1318 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1319 .await;
1320
1321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1322 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1323
1324 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1325 "Rust",
1326 FakeLspAdapter {
1327 name: "rust-lsp",
1328 ..Default::default()
1329 },
1330 );
1331 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1332 "JavaScript",
1333 FakeLspAdapter {
1334 name: "js-lsp",
1335 ..Default::default()
1336 },
1337 );
1338 language_registry.add(rust_lang());
1339 language_registry.add(js_lang());
1340
1341 let _rs_buffer = project
1342 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1343 .await
1344 .unwrap();
1345 let _js_buffer = project
1346 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1347 .await
1348 .unwrap();
1349
1350 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1351 assert_eq!(
1352 fake_rust_server_1
1353 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1354 .await
1355 .text_document
1356 .uri
1357 .as_str(),
1358 "file:///dir/a.rs"
1359 );
1360
1361 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1362 assert_eq!(
1363 fake_js_server
1364 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1365 .await
1366 .text_document
1367 .uri
1368 .as_str(),
1369 "file:///dir/b.js"
1370 );
1371
1372 // Disable Rust language server, ensuring only that server gets stopped.
1373 cx.update(|cx| {
1374 cx.update_global(|settings: &mut SettingsStore, cx| {
1375 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1376 settings.languages.insert(
1377 Arc::from("Rust"),
1378 LanguageSettingsContent {
1379 enable_language_server: Some(false),
1380 ..Default::default()
1381 },
1382 );
1383 });
1384 })
1385 });
1386 fake_rust_server_1
1387 .receive_notification::<lsp::notification::Exit>()
1388 .await;
1389
1390 // Enable Rust and disable JavaScript language servers, ensuring that the
1391 // former gets started again and that the latter stops.
1392 cx.update(|cx| {
1393 cx.update_global(|settings: &mut SettingsStore, cx| {
1394 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1395 settings.languages.insert(
1396 Arc::from("Rust"),
1397 LanguageSettingsContent {
1398 enable_language_server: Some(true),
1399 ..Default::default()
1400 },
1401 );
1402 settings.languages.insert(
1403 Arc::from("JavaScript"),
1404 LanguageSettingsContent {
1405 enable_language_server: Some(false),
1406 ..Default::default()
1407 },
1408 );
1409 });
1410 })
1411 });
1412 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1413 assert_eq!(
1414 fake_rust_server_2
1415 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1416 .await
1417 .text_document
1418 .uri
1419 .as_str(),
1420 "file:///dir/a.rs"
1421 );
1422 fake_js_server
1423 .receive_notification::<lsp::notification::Exit>()
1424 .await;
1425}
1426
1427#[gpui::test(iterations = 3)]
1428async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1429 init_test(cx);
1430
1431 let text = "
1432 fn a() { A }
1433 fn b() { BB }
1434 fn c() { CCC }
1435 "
1436 .unindent();
1437
1438 let fs = FakeFs::new(cx.executor());
1439 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1440
1441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1442 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1443
1444 language_registry.add(rust_lang());
1445 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1446 "Rust",
1447 FakeLspAdapter {
1448 disk_based_diagnostics_sources: vec!["disk".into()],
1449 ..Default::default()
1450 },
1451 );
1452
1453 let buffer = project
1454 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1455 .await
1456 .unwrap();
1457
1458 let mut fake_server = fake_servers.next().await.unwrap();
1459 let open_notification = fake_server
1460 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1461 .await;
1462
1463 // Edit the buffer, moving the content down
1464 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1465 let change_notification_1 = fake_server
1466 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1467 .await;
1468 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1469
1470 // Report some diagnostics for the initial version of the buffer
1471 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1472 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1473 version: Some(open_notification.text_document.version),
1474 diagnostics: vec![
1475 lsp::Diagnostic {
1476 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1477 severity: Some(DiagnosticSeverity::ERROR),
1478 message: "undefined variable 'A'".to_string(),
1479 source: Some("disk".to_string()),
1480 ..Default::default()
1481 },
1482 lsp::Diagnostic {
1483 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1484 severity: Some(DiagnosticSeverity::ERROR),
1485 message: "undefined variable 'BB'".to_string(),
1486 source: Some("disk".to_string()),
1487 ..Default::default()
1488 },
1489 lsp::Diagnostic {
1490 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1491 severity: Some(DiagnosticSeverity::ERROR),
1492 source: Some("disk".to_string()),
1493 message: "undefined variable 'CCC'".to_string(),
1494 ..Default::default()
1495 },
1496 ],
1497 });
1498
1499 // The diagnostics have moved down since they were created.
1500 cx.executor().run_until_parked();
1501 buffer.update(cx, |buffer, _| {
1502 assert_eq!(
1503 buffer
1504 .snapshot()
1505 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1506 .collect::<Vec<_>>(),
1507 &[
1508 DiagnosticEntry {
1509 range: Point::new(3, 9)..Point::new(3, 11),
1510 diagnostic: Diagnostic {
1511 source: Some("disk".into()),
1512 severity: DiagnosticSeverity::ERROR,
1513 message: "undefined variable 'BB'".to_string(),
1514 is_disk_based: true,
1515 group_id: 1,
1516 is_primary: true,
1517 ..Default::default()
1518 },
1519 },
1520 DiagnosticEntry {
1521 range: Point::new(4, 9)..Point::new(4, 12),
1522 diagnostic: Diagnostic {
1523 source: Some("disk".into()),
1524 severity: DiagnosticSeverity::ERROR,
1525 message: "undefined variable 'CCC'".to_string(),
1526 is_disk_based: true,
1527 group_id: 2,
1528 is_primary: true,
1529 ..Default::default()
1530 }
1531 }
1532 ]
1533 );
1534 assert_eq!(
1535 chunks_with_diagnostics(buffer, 0..buffer.len()),
1536 [
1537 ("\n\nfn a() { ".to_string(), None),
1538 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1539 (" }\nfn b() { ".to_string(), None),
1540 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1541 (" }\nfn c() { ".to_string(), None),
1542 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1543 (" }\n".to_string(), None),
1544 ]
1545 );
1546 assert_eq!(
1547 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1548 [
1549 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1550 (" }\nfn c() { ".to_string(), None),
1551 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1552 ]
1553 );
1554 });
1555
1556 // Ensure overlapping diagnostics are highlighted correctly.
1557 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1558 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1559 version: Some(open_notification.text_document.version),
1560 diagnostics: vec![
1561 lsp::Diagnostic {
1562 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1563 severity: Some(DiagnosticSeverity::ERROR),
1564 message: "undefined variable 'A'".to_string(),
1565 source: Some("disk".to_string()),
1566 ..Default::default()
1567 },
1568 lsp::Diagnostic {
1569 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1570 severity: Some(DiagnosticSeverity::WARNING),
1571 message: "unreachable statement".to_string(),
1572 source: Some("disk".to_string()),
1573 ..Default::default()
1574 },
1575 ],
1576 });
1577
1578 cx.executor().run_until_parked();
1579 buffer.update(cx, |buffer, _| {
1580 assert_eq!(
1581 buffer
1582 .snapshot()
1583 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1584 .collect::<Vec<_>>(),
1585 &[
1586 DiagnosticEntry {
1587 range: Point::new(2, 9)..Point::new(2, 12),
1588 diagnostic: Diagnostic {
1589 source: Some("disk".into()),
1590 severity: DiagnosticSeverity::WARNING,
1591 message: "unreachable statement".to_string(),
1592 is_disk_based: true,
1593 group_id: 4,
1594 is_primary: true,
1595 ..Default::default()
1596 }
1597 },
1598 DiagnosticEntry {
1599 range: Point::new(2, 9)..Point::new(2, 10),
1600 diagnostic: Diagnostic {
1601 source: Some("disk".into()),
1602 severity: DiagnosticSeverity::ERROR,
1603 message: "undefined variable 'A'".to_string(),
1604 is_disk_based: true,
1605 group_id: 3,
1606 is_primary: true,
1607 ..Default::default()
1608 },
1609 }
1610 ]
1611 );
1612 assert_eq!(
1613 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1614 [
1615 ("fn a() { ".to_string(), None),
1616 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1617 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1618 ("\n".to_string(), None),
1619 ]
1620 );
1621 assert_eq!(
1622 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1623 [
1624 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1625 ("\n".to_string(), None),
1626 ]
1627 );
1628 });
1629
1630 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1631 // changes since the last save.
1632 buffer.update(cx, |buffer, cx| {
1633 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1634 buffer.edit(
1635 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1636 None,
1637 cx,
1638 );
1639 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1640 });
1641 let change_notification_2 = fake_server
1642 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1643 .await;
1644 assert!(
1645 change_notification_2.text_document.version > change_notification_1.text_document.version
1646 );
1647
1648 // Handle out-of-order diagnostics
1649 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1650 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1651 version: Some(change_notification_2.text_document.version),
1652 diagnostics: vec![
1653 lsp::Diagnostic {
1654 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1655 severity: Some(DiagnosticSeverity::ERROR),
1656 message: "undefined variable 'BB'".to_string(),
1657 source: Some("disk".to_string()),
1658 ..Default::default()
1659 },
1660 lsp::Diagnostic {
1661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1662 severity: Some(DiagnosticSeverity::WARNING),
1663 message: "undefined variable 'A'".to_string(),
1664 source: Some("disk".to_string()),
1665 ..Default::default()
1666 },
1667 ],
1668 });
1669
1670 cx.executor().run_until_parked();
1671 buffer.update(cx, |buffer, _| {
1672 assert_eq!(
1673 buffer
1674 .snapshot()
1675 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1676 .collect::<Vec<_>>(),
1677 &[
1678 DiagnosticEntry {
1679 range: Point::new(2, 21)..Point::new(2, 22),
1680 diagnostic: Diagnostic {
1681 source: Some("disk".into()),
1682 severity: DiagnosticSeverity::WARNING,
1683 message: "undefined variable 'A'".to_string(),
1684 is_disk_based: true,
1685 group_id: 6,
1686 is_primary: true,
1687 ..Default::default()
1688 }
1689 },
1690 DiagnosticEntry {
1691 range: Point::new(3, 9)..Point::new(3, 14),
1692 diagnostic: Diagnostic {
1693 source: Some("disk".into()),
1694 severity: DiagnosticSeverity::ERROR,
1695 message: "undefined variable 'BB'".to_string(),
1696 is_disk_based: true,
1697 group_id: 5,
1698 is_primary: true,
1699 ..Default::default()
1700 },
1701 }
1702 ]
1703 );
1704 });
1705}
1706
1707#[gpui::test]
1708async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1709 init_test(cx);
1710
1711 let text = concat!(
1712 "let one = ;\n", //
1713 "let two = \n",
1714 "let three = 3;\n",
1715 );
1716
1717 let fs = FakeFs::new(cx.executor());
1718 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1719
1720 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1721 let buffer = project
1722 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1723 .await
1724 .unwrap();
1725
1726 project.update(cx, |project, cx| {
1727 project
1728 .update_buffer_diagnostics(
1729 &buffer,
1730 LanguageServerId(0),
1731 None,
1732 vec![
1733 DiagnosticEntry {
1734 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1735 diagnostic: Diagnostic {
1736 severity: DiagnosticSeverity::ERROR,
1737 message: "syntax error 1".to_string(),
1738 ..Default::default()
1739 },
1740 },
1741 DiagnosticEntry {
1742 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1743 diagnostic: Diagnostic {
1744 severity: DiagnosticSeverity::ERROR,
1745 message: "syntax error 2".to_string(),
1746 ..Default::default()
1747 },
1748 },
1749 ],
1750 cx,
1751 )
1752 .unwrap();
1753 });
1754
1755 // An empty range is extended forward to include the following character.
1756 // At the end of a line, an empty range is extended backward to include
1757 // the preceding character.
1758 buffer.update(cx, |buffer, _| {
1759 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1760 assert_eq!(
1761 chunks
1762 .iter()
1763 .map(|(s, d)| (s.as_str(), *d))
1764 .collect::<Vec<_>>(),
1765 &[
1766 ("let one = ", None),
1767 (";", Some(DiagnosticSeverity::ERROR)),
1768 ("\nlet two =", None),
1769 (" ", Some(DiagnosticSeverity::ERROR)),
1770 ("\nlet three = 3;\n", None)
1771 ]
1772 );
1773 });
1774}
1775
1776#[gpui::test]
1777async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1778 init_test(cx);
1779
1780 let fs = FakeFs::new(cx.executor());
1781 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1782 .await;
1783
1784 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1785
1786 project.update(cx, |project, cx| {
1787 project
1788 .update_diagnostic_entries(
1789 LanguageServerId(0),
1790 Path::new("/dir/a.rs").to_owned(),
1791 None,
1792 vec![DiagnosticEntry {
1793 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1794 diagnostic: Diagnostic {
1795 severity: DiagnosticSeverity::ERROR,
1796 is_primary: true,
1797 message: "syntax error a1".to_string(),
1798 ..Default::default()
1799 },
1800 }],
1801 cx,
1802 )
1803 .unwrap();
1804 project
1805 .update_diagnostic_entries(
1806 LanguageServerId(1),
1807 Path::new("/dir/a.rs").to_owned(),
1808 None,
1809 vec![DiagnosticEntry {
1810 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1811 diagnostic: Diagnostic {
1812 severity: DiagnosticSeverity::ERROR,
1813 is_primary: true,
1814 message: "syntax error b1".to_string(),
1815 ..Default::default()
1816 },
1817 }],
1818 cx,
1819 )
1820 .unwrap();
1821
1822 assert_eq!(
1823 project.diagnostic_summary(false, cx),
1824 DiagnosticSummary {
1825 error_count: 2,
1826 warning_count: 0,
1827 }
1828 );
1829 });
1830}
1831
1832#[gpui::test]
1833async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1834 init_test(cx);
1835
1836 let text = "
1837 fn a() {
1838 f1();
1839 }
1840 fn b() {
1841 f2();
1842 }
1843 fn c() {
1844 f3();
1845 }
1846 "
1847 .unindent();
1848
1849 let fs = FakeFs::new(cx.executor());
1850 fs.insert_tree(
1851 "/dir",
1852 json!({
1853 "a.rs": text.clone(),
1854 }),
1855 )
1856 .await;
1857
1858 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1859
1860 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1861 language_registry.add(rust_lang());
1862 let mut fake_servers =
1863 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1864
1865 let buffer = project
1866 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1867 .await
1868 .unwrap();
1869
1870 let mut fake_server = fake_servers.next().await.unwrap();
1871 let lsp_document_version = fake_server
1872 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1873 .await
1874 .text_document
1875 .version;
1876
1877 // Simulate editing the buffer after the language server computes some edits.
1878 buffer.update(cx, |buffer, cx| {
1879 buffer.edit(
1880 [(
1881 Point::new(0, 0)..Point::new(0, 0),
1882 "// above first function\n",
1883 )],
1884 None,
1885 cx,
1886 );
1887 buffer.edit(
1888 [(
1889 Point::new(2, 0)..Point::new(2, 0),
1890 " // inside first function\n",
1891 )],
1892 None,
1893 cx,
1894 );
1895 buffer.edit(
1896 [(
1897 Point::new(6, 4)..Point::new(6, 4),
1898 "// inside second function ",
1899 )],
1900 None,
1901 cx,
1902 );
1903
1904 assert_eq!(
1905 buffer.text(),
1906 "
1907 // above first function
1908 fn a() {
1909 // inside first function
1910 f1();
1911 }
1912 fn b() {
1913 // inside second function f2();
1914 }
1915 fn c() {
1916 f3();
1917 }
1918 "
1919 .unindent()
1920 );
1921 });
1922
1923 let edits = project
1924 .update(cx, |project, cx| {
1925 project.edits_from_lsp(
1926 &buffer,
1927 vec![
1928 // replace body of first function
1929 lsp::TextEdit {
1930 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1931 new_text: "
1932 fn a() {
1933 f10();
1934 }
1935 "
1936 .unindent(),
1937 },
1938 // edit inside second function
1939 lsp::TextEdit {
1940 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1941 new_text: "00".into(),
1942 },
1943 // edit inside third function via two distinct edits
1944 lsp::TextEdit {
1945 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1946 new_text: "4000".into(),
1947 },
1948 lsp::TextEdit {
1949 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1950 new_text: "".into(),
1951 },
1952 ],
1953 LanguageServerId(0),
1954 Some(lsp_document_version),
1955 cx,
1956 )
1957 })
1958 .await
1959 .unwrap();
1960
1961 buffer.update(cx, |buffer, cx| {
1962 for (range, new_text) in edits {
1963 buffer.edit([(range, new_text)], None, cx);
1964 }
1965 assert_eq!(
1966 buffer.text(),
1967 "
1968 // above first function
1969 fn a() {
1970 // inside first function
1971 f10();
1972 }
1973 fn b() {
1974 // inside second function f200();
1975 }
1976 fn c() {
1977 f4000();
1978 }
1979 "
1980 .unindent()
1981 );
1982 });
1983}
1984
1985#[gpui::test]
1986async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1987 init_test(cx);
1988
1989 let text = "
1990 use a::b;
1991 use a::c;
1992
1993 fn f() {
1994 b();
1995 c();
1996 }
1997 "
1998 .unindent();
1999
2000 let fs = FakeFs::new(cx.executor());
2001 fs.insert_tree(
2002 "/dir",
2003 json!({
2004 "a.rs": text.clone(),
2005 }),
2006 )
2007 .await;
2008
2009 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2010 let buffer = project
2011 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2012 .await
2013 .unwrap();
2014
2015 // Simulate the language server sending us a small edit in the form of a very large diff.
2016 // Rust-analyzer does this when performing a merge-imports code action.
2017 let edits = project
2018 .update(cx, |project, cx| {
2019 project.edits_from_lsp(
2020 &buffer,
2021 [
2022 // Replace the first use statement without editing the semicolon.
2023 lsp::TextEdit {
2024 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2025 new_text: "a::{b, c}".into(),
2026 },
2027 // Reinsert the remainder of the file between the semicolon and the final
2028 // newline of the file.
2029 lsp::TextEdit {
2030 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2031 new_text: "\n\n".into(),
2032 },
2033 lsp::TextEdit {
2034 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2035 new_text: "
2036 fn f() {
2037 b();
2038 c();
2039 }"
2040 .unindent(),
2041 },
2042 // Delete everything after the first newline of the file.
2043 lsp::TextEdit {
2044 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2045 new_text: "".into(),
2046 },
2047 ],
2048 LanguageServerId(0),
2049 None,
2050 cx,
2051 )
2052 })
2053 .await
2054 .unwrap();
2055
2056 buffer.update(cx, |buffer, cx| {
2057 let edits = edits
2058 .into_iter()
2059 .map(|(range, text)| {
2060 (
2061 range.start.to_point(buffer)..range.end.to_point(buffer),
2062 text,
2063 )
2064 })
2065 .collect::<Vec<_>>();
2066
2067 assert_eq!(
2068 edits,
2069 [
2070 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2071 (Point::new(1, 0)..Point::new(2, 0), "".into())
2072 ]
2073 );
2074
2075 for (range, new_text) in edits {
2076 buffer.edit([(range, new_text)], None, cx);
2077 }
2078 assert_eq!(
2079 buffer.text(),
2080 "
2081 use a::{b, c};
2082
2083 fn f() {
2084 b();
2085 c();
2086 }
2087 "
2088 .unindent()
2089 );
2090 });
2091}
2092
2093#[gpui::test]
2094async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2095 init_test(cx);
2096
2097 let text = "
2098 use a::b;
2099 use a::c;
2100
2101 fn f() {
2102 b();
2103 c();
2104 }
2105 "
2106 .unindent();
2107
2108 let fs = FakeFs::new(cx.executor());
2109 fs.insert_tree(
2110 "/dir",
2111 json!({
2112 "a.rs": text.clone(),
2113 }),
2114 )
2115 .await;
2116
2117 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2118 let buffer = project
2119 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2120 .await
2121 .unwrap();
2122
2123 // Simulate the language server sending us edits in a non-ordered fashion,
2124 // with ranges sometimes being inverted or pointing to invalid locations.
2125 let edits = project
2126 .update(cx, |project, cx| {
2127 project.edits_from_lsp(
2128 &buffer,
2129 [
2130 lsp::TextEdit {
2131 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2132 new_text: "\n\n".into(),
2133 },
2134 lsp::TextEdit {
2135 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2136 new_text: "a::{b, c}".into(),
2137 },
2138 lsp::TextEdit {
2139 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2140 new_text: "".into(),
2141 },
2142 lsp::TextEdit {
2143 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2144 new_text: "
2145 fn f() {
2146 b();
2147 c();
2148 }"
2149 .unindent(),
2150 },
2151 ],
2152 LanguageServerId(0),
2153 None,
2154 cx,
2155 )
2156 })
2157 .await
2158 .unwrap();
2159
2160 buffer.update(cx, |buffer, cx| {
2161 let edits = edits
2162 .into_iter()
2163 .map(|(range, text)| {
2164 (
2165 range.start.to_point(buffer)..range.end.to_point(buffer),
2166 text,
2167 )
2168 })
2169 .collect::<Vec<_>>();
2170
2171 assert_eq!(
2172 edits,
2173 [
2174 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2175 (Point::new(1, 0)..Point::new(2, 0), "".into())
2176 ]
2177 );
2178
2179 for (range, new_text) in edits {
2180 buffer.edit([(range, new_text)], None, cx);
2181 }
2182 assert_eq!(
2183 buffer.text(),
2184 "
2185 use a::{b, c};
2186
2187 fn f() {
2188 b();
2189 c();
2190 }
2191 "
2192 .unindent()
2193 );
2194 });
2195}
2196
2197fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2198 buffer: &Buffer,
2199 range: Range<T>,
2200) -> Vec<(String, Option<DiagnosticSeverity>)> {
2201 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2202 for chunk in buffer.snapshot().chunks(range, true) {
2203 if chunks.last().map_or(false, |prev_chunk| {
2204 prev_chunk.1 == chunk.diagnostic_severity
2205 }) {
2206 chunks.last_mut().unwrap().0.push_str(chunk.text);
2207 } else {
2208 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2209 }
2210 }
2211 chunks
2212}
2213
2214#[gpui::test(iterations = 10)]
2215async fn test_definition(cx: &mut gpui::TestAppContext) {
2216 init_test(cx);
2217
2218 let fs = FakeFs::new(cx.executor());
2219 fs.insert_tree(
2220 "/dir",
2221 json!({
2222 "a.rs": "const fn a() { A }",
2223 "b.rs": "const y: i32 = crate::a()",
2224 }),
2225 )
2226 .await;
2227
2228 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2229
2230 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2231 language_registry.add(rust_lang());
2232 let mut fake_servers =
2233 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2234
2235 let buffer = project
2236 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2237 .await
2238 .unwrap();
2239
2240 let fake_server = fake_servers.next().await.unwrap();
2241 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2242 let params = params.text_document_position_params;
2243 assert_eq!(
2244 params.text_document.uri.to_file_path().unwrap(),
2245 Path::new("/dir/b.rs"),
2246 );
2247 assert_eq!(params.position, lsp::Position::new(0, 22));
2248
2249 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2250 lsp::Location::new(
2251 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2252 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2253 ),
2254 )))
2255 });
2256
2257 let mut definitions = project
2258 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2259 .await
2260 .unwrap();
2261
2262 // Assert no new language server started
2263 cx.executor().run_until_parked();
2264 assert!(fake_servers.try_next().is_err());
2265
2266 assert_eq!(definitions.len(), 1);
2267 let definition = definitions.pop().unwrap();
2268 cx.update(|cx| {
2269 let target_buffer = definition.target.buffer.read(cx);
2270 assert_eq!(
2271 target_buffer
2272 .file()
2273 .unwrap()
2274 .as_local()
2275 .unwrap()
2276 .abs_path(cx),
2277 Path::new("/dir/a.rs"),
2278 );
2279 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2280 assert_eq!(
2281 list_worktrees(&project, cx),
2282 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2283 );
2284
2285 drop(definition);
2286 });
2287 cx.update(|cx| {
2288 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2289 });
2290
2291 fn list_worktrees<'a>(
2292 project: &'a Model<Project>,
2293 cx: &'a AppContext,
2294 ) -> Vec<(&'a Path, bool)> {
2295 project
2296 .read(cx)
2297 .worktrees()
2298 .map(|worktree| {
2299 let worktree = worktree.read(cx);
2300 (
2301 worktree.as_local().unwrap().abs_path().as_ref(),
2302 worktree.is_visible(),
2303 )
2304 })
2305 .collect::<Vec<_>>()
2306 }
2307}
2308
2309#[gpui::test]
2310async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2311 init_test(cx);
2312
2313 let fs = FakeFs::new(cx.executor());
2314 fs.insert_tree(
2315 "/dir",
2316 json!({
2317 "a.ts": "",
2318 }),
2319 )
2320 .await;
2321
2322 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2323
2324 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2325 language_registry.add(typescript_lang());
2326 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2327 "TypeScript",
2328 FakeLspAdapter {
2329 capabilities: lsp::ServerCapabilities {
2330 completion_provider: Some(lsp::CompletionOptions {
2331 trigger_characters: Some(vec![":".to_string()]),
2332 ..Default::default()
2333 }),
2334 ..Default::default()
2335 },
2336 ..Default::default()
2337 },
2338 );
2339
2340 let buffer = project
2341 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2342 .await
2343 .unwrap();
2344
2345 let fake_server = fake_language_servers.next().await.unwrap();
2346
2347 let text = "let a = b.fqn";
2348 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2349 let completions = project.update(cx, |project, cx| {
2350 project.completions(&buffer, text.len(), cx)
2351 });
2352
2353 fake_server
2354 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2355 Ok(Some(lsp::CompletionResponse::Array(vec![
2356 lsp::CompletionItem {
2357 label: "fullyQualifiedName?".into(),
2358 insert_text: Some("fullyQualifiedName".into()),
2359 ..Default::default()
2360 },
2361 ])))
2362 })
2363 .next()
2364 .await;
2365 let completions = completions.await.unwrap();
2366 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2367 assert_eq!(completions.len(), 1);
2368 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2369 assert_eq!(
2370 completions[0].old_range.to_offset(&snapshot),
2371 text.len() - 3..text.len()
2372 );
2373
2374 let text = "let a = \"atoms/cmp\"";
2375 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2376 let completions = project.update(cx, |project, cx| {
2377 project.completions(&buffer, text.len() - 1, cx)
2378 });
2379
2380 fake_server
2381 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2382 Ok(Some(lsp::CompletionResponse::Array(vec![
2383 lsp::CompletionItem {
2384 label: "component".into(),
2385 ..Default::default()
2386 },
2387 ])))
2388 })
2389 .next()
2390 .await;
2391 let completions = completions.await.unwrap();
2392 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2393 assert_eq!(completions.len(), 1);
2394 assert_eq!(completions[0].new_text, "component");
2395 assert_eq!(
2396 completions[0].old_range.to_offset(&snapshot),
2397 text.len() - 4..text.len() - 1
2398 );
2399}
2400
2401#[gpui::test]
2402async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2403 init_test(cx);
2404
2405 let fs = FakeFs::new(cx.executor());
2406 fs.insert_tree(
2407 "/dir",
2408 json!({
2409 "a.ts": "",
2410 }),
2411 )
2412 .await;
2413
2414 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2415
2416 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2417 language_registry.add(typescript_lang());
2418 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2419 "TypeScript",
2420 FakeLspAdapter {
2421 capabilities: lsp::ServerCapabilities {
2422 completion_provider: Some(lsp::CompletionOptions {
2423 trigger_characters: Some(vec![":".to_string()]),
2424 ..Default::default()
2425 }),
2426 ..Default::default()
2427 },
2428 ..Default::default()
2429 },
2430 );
2431
2432 let buffer = project
2433 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2434 .await
2435 .unwrap();
2436
2437 let fake_server = fake_language_servers.next().await.unwrap();
2438
2439 let text = "let a = b.fqn";
2440 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2441 let completions = project.update(cx, |project, cx| {
2442 project.completions(&buffer, text.len(), cx)
2443 });
2444
2445 fake_server
2446 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2447 Ok(Some(lsp::CompletionResponse::Array(vec![
2448 lsp::CompletionItem {
2449 label: "fullyQualifiedName?".into(),
2450 insert_text: Some("fully\rQualified\r\nName".into()),
2451 ..Default::default()
2452 },
2453 ])))
2454 })
2455 .next()
2456 .await;
2457 let completions = completions.await.unwrap();
2458 assert_eq!(completions.len(), 1);
2459 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2460}
2461
2462#[gpui::test(iterations = 10)]
2463async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2464 init_test(cx);
2465
2466 let fs = FakeFs::new(cx.executor());
2467 fs.insert_tree(
2468 "/dir",
2469 json!({
2470 "a.ts": "a",
2471 }),
2472 )
2473 .await;
2474
2475 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2476
2477 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2478 language_registry.add(typescript_lang());
2479 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2480 "TypeScript",
2481 FakeLspAdapter {
2482 capabilities: lsp::ServerCapabilities {
2483 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2484 lsp::CodeActionOptions {
2485 resolve_provider: Some(true),
2486 ..lsp::CodeActionOptions::default()
2487 },
2488 )),
2489 ..lsp::ServerCapabilities::default()
2490 },
2491 ..FakeLspAdapter::default()
2492 },
2493 );
2494
2495 let buffer = project
2496 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2497 .await
2498 .unwrap();
2499
2500 let fake_server = fake_language_servers.next().await.unwrap();
2501
2502 // Language server returns code actions that contain commands, and not edits.
2503 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2504 fake_server
2505 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2506 Ok(Some(vec![
2507 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2508 title: "The code action".into(),
2509 data: Some(serde_json::json!({
2510 "command": "_the/command",
2511 })),
2512 ..lsp::CodeAction::default()
2513 }),
2514 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2515 title: "two".into(),
2516 ..lsp::CodeAction::default()
2517 }),
2518 ]))
2519 })
2520 .next()
2521 .await;
2522
2523 let action = actions.await.unwrap()[0].clone();
2524 let apply = project.update(cx, |project, cx| {
2525 project.apply_code_action(buffer.clone(), action, true, cx)
2526 });
2527
2528 // Resolving the code action does not populate its edits. In absence of
2529 // edits, we must execute the given command.
2530 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2531 |mut action, _| async move {
2532 if action.data.is_some() {
2533 action.command = Some(lsp::Command {
2534 title: "The command".into(),
2535 command: "_the/command".into(),
2536 arguments: Some(vec![json!("the-argument")]),
2537 });
2538 }
2539 Ok(action)
2540 },
2541 );
2542
2543 // While executing the command, the language server sends the editor
2544 // a `workspaceEdit` request.
2545 fake_server
2546 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2547 let fake = fake_server.clone();
2548 move |params, _| {
2549 assert_eq!(params.command, "_the/command");
2550 let fake = fake.clone();
2551 async move {
2552 fake.server
2553 .request::<lsp::request::ApplyWorkspaceEdit>(
2554 lsp::ApplyWorkspaceEditParams {
2555 label: None,
2556 edit: lsp::WorkspaceEdit {
2557 changes: Some(
2558 [(
2559 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2560 vec![lsp::TextEdit {
2561 range: lsp::Range::new(
2562 lsp::Position::new(0, 0),
2563 lsp::Position::new(0, 0),
2564 ),
2565 new_text: "X".into(),
2566 }],
2567 )]
2568 .into_iter()
2569 .collect(),
2570 ),
2571 ..Default::default()
2572 },
2573 },
2574 )
2575 .await
2576 .unwrap();
2577 Ok(Some(json!(null)))
2578 }
2579 }
2580 })
2581 .next()
2582 .await;
2583
2584 // Applying the code action returns a project transaction containing the edits
2585 // sent by the language server in its `workspaceEdit` request.
2586 let transaction = apply.await.unwrap();
2587 assert!(transaction.0.contains_key(&buffer));
2588 buffer.update(cx, |buffer, cx| {
2589 assert_eq!(buffer.text(), "Xa");
2590 buffer.undo(cx);
2591 assert_eq!(buffer.text(), "a");
2592 });
2593}
2594
2595#[gpui::test(iterations = 10)]
2596async fn test_save_file(cx: &mut gpui::TestAppContext) {
2597 init_test(cx);
2598
2599 let fs = FakeFs::new(cx.executor());
2600 fs.insert_tree(
2601 "/dir",
2602 json!({
2603 "file1": "the old contents",
2604 }),
2605 )
2606 .await;
2607
2608 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2609 let buffer = project
2610 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2611 .await
2612 .unwrap();
2613 buffer.update(cx, |buffer, cx| {
2614 assert_eq!(buffer.text(), "the old contents");
2615 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2616 });
2617
2618 project
2619 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2620 .await
2621 .unwrap();
2622
2623 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2624 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2625}
2626
2627#[gpui::test(iterations = 30)]
2628async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2629 init_test(cx);
2630
2631 let fs = FakeFs::new(cx.executor().clone());
2632 fs.insert_tree(
2633 "/dir",
2634 json!({
2635 "file1": "the original contents",
2636 }),
2637 )
2638 .await;
2639
2640 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2641 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2642 let buffer = project
2643 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2644 .await
2645 .unwrap();
2646
2647 // Simulate buffer diffs being slow, so that they don't complete before
2648 // the next file change occurs.
2649 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2650
2651 // Change the buffer's file on disk, and then wait for the file change
2652 // to be detected by the worktree, so that the buffer starts reloading.
2653 fs.save(
2654 "/dir/file1".as_ref(),
2655 &"the first contents".into(),
2656 Default::default(),
2657 )
2658 .await
2659 .unwrap();
2660 worktree.next_event(cx);
2661
2662 // Change the buffer's file again. Depending on the random seed, the
2663 // previous file change may still be in progress.
2664 fs.save(
2665 "/dir/file1".as_ref(),
2666 &"the second contents".into(),
2667 Default::default(),
2668 )
2669 .await
2670 .unwrap();
2671 worktree.next_event(cx);
2672
2673 cx.executor().run_until_parked();
2674 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2675 buffer.read_with(cx, |buffer, _| {
2676 assert_eq!(buffer.text(), on_disk_text);
2677 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2678 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2679 });
2680}
2681
2682#[gpui::test(iterations = 30)]
2683async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2684 init_test(cx);
2685
2686 let fs = FakeFs::new(cx.executor().clone());
2687 fs.insert_tree(
2688 "/dir",
2689 json!({
2690 "file1": "the original contents",
2691 }),
2692 )
2693 .await;
2694
2695 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2696 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2697 let buffer = project
2698 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2699 .await
2700 .unwrap();
2701
2702 // Simulate buffer diffs being slow, so that they don't complete before
2703 // the next file change occurs.
2704 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2705
2706 // Change the buffer's file on disk, and then wait for the file change
2707 // to be detected by the worktree, so that the buffer starts reloading.
2708 fs.save(
2709 "/dir/file1".as_ref(),
2710 &"the first contents".into(),
2711 Default::default(),
2712 )
2713 .await
2714 .unwrap();
2715 worktree.next_event(cx);
2716
2717 cx.executor()
2718 .spawn(cx.executor().simulate_random_delay())
2719 .await;
2720
2721 // Perform a noop edit, causing the buffer's version to increase.
2722 buffer.update(cx, |buffer, cx| {
2723 buffer.edit([(0..0, " ")], None, cx);
2724 buffer.undo(cx);
2725 });
2726
2727 cx.executor().run_until_parked();
2728 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2729 buffer.read_with(cx, |buffer, _| {
2730 let buffer_text = buffer.text();
2731 if buffer_text == on_disk_text {
2732 assert!(
2733 !buffer.is_dirty() && !buffer.has_conflict(),
2734 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2735 );
2736 }
2737 // If the file change occurred while the buffer was processing the first
2738 // change, the buffer will be in a conflicting state.
2739 else {
2740 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2741 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2742 }
2743 });
2744}
2745
2746#[gpui::test]
2747async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2748 init_test(cx);
2749
2750 let fs = FakeFs::new(cx.executor());
2751 fs.insert_tree(
2752 "/dir",
2753 json!({
2754 "file1": "the old contents",
2755 }),
2756 )
2757 .await;
2758
2759 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2760 let buffer = project
2761 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2762 .await
2763 .unwrap();
2764 buffer.update(cx, |buffer, cx| {
2765 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2766 });
2767
2768 project
2769 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2770 .await
2771 .unwrap();
2772
2773 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2774 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2775}
2776
2777#[gpui::test]
2778async fn test_save_as(cx: &mut gpui::TestAppContext) {
2779 init_test(cx);
2780
2781 let fs = FakeFs::new(cx.executor());
2782 fs.insert_tree("/dir", json!({})).await;
2783
2784 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2785
2786 let languages = project.update(cx, |project, _| project.languages().clone());
2787 languages.add(rust_lang());
2788
2789 let buffer = project.update(cx, |project, cx| {
2790 project.create_buffer("", None, cx).unwrap()
2791 });
2792 buffer.update(cx, |buffer, cx| {
2793 buffer.edit([(0..0, "abc")], None, cx);
2794 assert!(buffer.is_dirty());
2795 assert!(!buffer.has_conflict());
2796 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2797 });
2798 project
2799 .update(cx, |project, cx| {
2800 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2801 })
2802 .await
2803 .unwrap();
2804 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2805
2806 cx.executor().run_until_parked();
2807 buffer.update(cx, |buffer, cx| {
2808 assert_eq!(
2809 buffer.file().unwrap().full_path(cx),
2810 Path::new("dir/file1.rs")
2811 );
2812 assert!(!buffer.is_dirty());
2813 assert!(!buffer.has_conflict());
2814 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2815 });
2816
2817 let opened_buffer = project
2818 .update(cx, |project, cx| {
2819 project.open_local_buffer("/dir/file1.rs", cx)
2820 })
2821 .await
2822 .unwrap();
2823 assert_eq!(opened_buffer, buffer);
2824}
2825
2826#[gpui::test(retries = 5)]
2827async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2828 init_test(cx);
2829 cx.executor().allow_parking();
2830
2831 let dir = temp_tree(json!({
2832 "a": {
2833 "file1": "",
2834 "file2": "",
2835 "file3": "",
2836 },
2837 "b": {
2838 "c": {
2839 "file4": "",
2840 "file5": "",
2841 }
2842 }
2843 }));
2844
2845 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2846 let rpc = project.update(cx, |p, _| p.client.clone());
2847
2848 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2849 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2850 async move { buffer.await.unwrap() }
2851 };
2852 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2853 project.update(cx, |project, cx| {
2854 let tree = project.worktrees().next().unwrap();
2855 tree.read(cx)
2856 .entry_for_path(path)
2857 .unwrap_or_else(|| panic!("no entry for path {}", path))
2858 .id
2859 })
2860 };
2861
2862 let buffer2 = buffer_for_path("a/file2", cx).await;
2863 let buffer3 = buffer_for_path("a/file3", cx).await;
2864 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2865 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2866
2867 let file2_id = id_for_path("a/file2", cx);
2868 let file3_id = id_for_path("a/file3", cx);
2869 let file4_id = id_for_path("b/c/file4", cx);
2870
2871 // Create a remote copy of this worktree.
2872 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2873
2874 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2875
2876 let updates = Arc::new(Mutex::new(Vec::new()));
2877 tree.update(cx, |tree, cx| {
2878 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2879 let updates = updates.clone();
2880 move |update| {
2881 updates.lock().push(update);
2882 async { true }
2883 }
2884 });
2885 });
2886
2887 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2888
2889 cx.executor().run_until_parked();
2890
2891 cx.update(|cx| {
2892 assert!(!buffer2.read(cx).is_dirty());
2893 assert!(!buffer3.read(cx).is_dirty());
2894 assert!(!buffer4.read(cx).is_dirty());
2895 assert!(!buffer5.read(cx).is_dirty());
2896 });
2897
2898 // Rename and delete files and directories.
2899 tree.flush_fs_events(cx).await;
2900 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2901 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2902 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2903 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2904 tree.flush_fs_events(cx).await;
2905
2906 let expected_paths = vec![
2907 "a",
2908 "a/file1",
2909 "a/file2.new",
2910 "b",
2911 "d",
2912 "d/file3",
2913 "d/file4",
2914 ];
2915
2916 cx.update(|app| {
2917 assert_eq!(
2918 tree.read(app)
2919 .paths()
2920 .map(|p| p.to_str().unwrap())
2921 .collect::<Vec<_>>(),
2922 expected_paths
2923 );
2924 });
2925
2926 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2927 assert_eq!(id_for_path("d/file3", cx), file3_id);
2928 assert_eq!(id_for_path("d/file4", cx), file4_id);
2929
2930 cx.update(|cx| {
2931 assert_eq!(
2932 buffer2.read(cx).file().unwrap().path().as_ref(),
2933 Path::new("a/file2.new")
2934 );
2935 assert_eq!(
2936 buffer3.read(cx).file().unwrap().path().as_ref(),
2937 Path::new("d/file3")
2938 );
2939 assert_eq!(
2940 buffer4.read(cx).file().unwrap().path().as_ref(),
2941 Path::new("d/file4")
2942 );
2943 assert_eq!(
2944 buffer5.read(cx).file().unwrap().path().as_ref(),
2945 Path::new("b/c/file5")
2946 );
2947
2948 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2949 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2950 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2951 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2952 });
2953
2954 // Update the remote worktree. Check that it becomes consistent with the
2955 // local worktree.
2956 cx.executor().run_until_parked();
2957
2958 remote.update(cx, |remote, _| {
2959 for update in updates.lock().drain(..) {
2960 remote.as_remote_mut().unwrap().update_from_remote(update);
2961 }
2962 });
2963 cx.executor().run_until_parked();
2964 remote.update(cx, |remote, _| {
2965 assert_eq!(
2966 remote
2967 .paths()
2968 .map(|p| p.to_str().unwrap())
2969 .collect::<Vec<_>>(),
2970 expected_paths
2971 );
2972 });
2973}
2974
2975#[gpui::test(iterations = 10)]
2976async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2977 init_test(cx);
2978
2979 let fs = FakeFs::new(cx.executor());
2980 fs.insert_tree(
2981 "/dir",
2982 json!({
2983 "a": {
2984 "file1": "",
2985 }
2986 }),
2987 )
2988 .await;
2989
2990 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2991 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2992 let tree_id = tree.update(cx, |tree, _| tree.id());
2993
2994 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2995 project.update(cx, |project, cx| {
2996 let tree = project.worktrees().next().unwrap();
2997 tree.read(cx)
2998 .entry_for_path(path)
2999 .unwrap_or_else(|| panic!("no entry for path {}", path))
3000 .id
3001 })
3002 };
3003
3004 let dir_id = id_for_path("a", cx);
3005 let file_id = id_for_path("a/file1", cx);
3006 let buffer = project
3007 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3008 .await
3009 .unwrap();
3010 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3011
3012 project
3013 .update(cx, |project, cx| {
3014 project.rename_entry(dir_id, Path::new("b"), cx)
3015 })
3016 .unwrap()
3017 .await
3018 .unwrap();
3019 cx.executor().run_until_parked();
3020
3021 assert_eq!(id_for_path("b", cx), dir_id);
3022 assert_eq!(id_for_path("b/file1", cx), file_id);
3023 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3024}
3025
3026#[gpui::test]
3027async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3028 init_test(cx);
3029
3030 let fs = FakeFs::new(cx.executor());
3031 fs.insert_tree(
3032 "/dir",
3033 json!({
3034 "a.txt": "a-contents",
3035 "b.txt": "b-contents",
3036 }),
3037 )
3038 .await;
3039
3040 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3041
3042 // Spawn multiple tasks to open paths, repeating some paths.
3043 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3044 (
3045 p.open_local_buffer("/dir/a.txt", cx),
3046 p.open_local_buffer("/dir/b.txt", cx),
3047 p.open_local_buffer("/dir/a.txt", cx),
3048 )
3049 });
3050
3051 let buffer_a_1 = buffer_a_1.await.unwrap();
3052 let buffer_a_2 = buffer_a_2.await.unwrap();
3053 let buffer_b = buffer_b.await.unwrap();
3054 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3055 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3056
3057 // There is only one buffer per path.
3058 let buffer_a_id = buffer_a_1.entity_id();
3059 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3060
3061 // Open the same path again while it is still open.
3062 drop(buffer_a_1);
3063 let buffer_a_3 = project
3064 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3065 .await
3066 .unwrap();
3067
3068 // There's still only one buffer per path.
3069 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3070}
3071
3072#[gpui::test]
3073async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3074 init_test(cx);
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree(
3078 "/dir",
3079 json!({
3080 "file1": "abc",
3081 "file2": "def",
3082 "file3": "ghi",
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3088
3089 let buffer1 = project
3090 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3091 .await
3092 .unwrap();
3093 let events = Arc::new(Mutex::new(Vec::new()));
3094
3095 // initially, the buffer isn't dirty.
3096 buffer1.update(cx, |buffer, cx| {
3097 cx.subscribe(&buffer1, {
3098 let events = events.clone();
3099 move |_, _, event, _| match event {
3100 BufferEvent::Operation(_) => {}
3101 _ => events.lock().push(event.clone()),
3102 }
3103 })
3104 .detach();
3105
3106 assert!(!buffer.is_dirty());
3107 assert!(events.lock().is_empty());
3108
3109 buffer.edit([(1..2, "")], None, cx);
3110 });
3111
3112 // after the first edit, the buffer is dirty, and emits a dirtied event.
3113 buffer1.update(cx, |buffer, cx| {
3114 assert!(buffer.text() == "ac");
3115 assert!(buffer.is_dirty());
3116 assert_eq!(
3117 *events.lock(),
3118 &[language::Event::Edited, language::Event::DirtyChanged]
3119 );
3120 events.lock().clear();
3121 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3122 });
3123
3124 // after saving, the buffer is not dirty, and emits a saved event.
3125 buffer1.update(cx, |buffer, cx| {
3126 assert!(!buffer.is_dirty());
3127 assert_eq!(*events.lock(), &[language::Event::Saved]);
3128 events.lock().clear();
3129
3130 buffer.edit([(1..1, "B")], None, cx);
3131 buffer.edit([(2..2, "D")], None, cx);
3132 });
3133
3134 // after editing again, the buffer is dirty, and emits another dirty event.
3135 buffer1.update(cx, |buffer, cx| {
3136 assert!(buffer.text() == "aBDc");
3137 assert!(buffer.is_dirty());
3138 assert_eq!(
3139 *events.lock(),
3140 &[
3141 language::Event::Edited,
3142 language::Event::DirtyChanged,
3143 language::Event::Edited,
3144 ],
3145 );
3146 events.lock().clear();
3147
3148 // After restoring the buffer to its previously-saved state,
3149 // the buffer is not considered dirty anymore.
3150 buffer.edit([(1..3, "")], None, cx);
3151 assert!(buffer.text() == "ac");
3152 assert!(!buffer.is_dirty());
3153 });
3154
3155 assert_eq!(
3156 *events.lock(),
3157 &[language::Event::Edited, language::Event::DirtyChanged]
3158 );
3159
3160 // When a file is deleted, the buffer is considered dirty.
3161 let events = Arc::new(Mutex::new(Vec::new()));
3162 let buffer2 = project
3163 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3164 .await
3165 .unwrap();
3166 buffer2.update(cx, |_, cx| {
3167 cx.subscribe(&buffer2, {
3168 let events = events.clone();
3169 move |_, _, event, _| events.lock().push(event.clone())
3170 })
3171 .detach();
3172 });
3173
3174 fs.remove_file("/dir/file2".as_ref(), Default::default())
3175 .await
3176 .unwrap();
3177 cx.executor().run_until_parked();
3178 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3179 assert_eq!(
3180 *events.lock(),
3181 &[
3182 language::Event::DirtyChanged,
3183 language::Event::FileHandleChanged
3184 ]
3185 );
3186
3187 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3188 let events = Arc::new(Mutex::new(Vec::new()));
3189 let buffer3 = project
3190 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3191 .await
3192 .unwrap();
3193 buffer3.update(cx, |_, cx| {
3194 cx.subscribe(&buffer3, {
3195 let events = events.clone();
3196 move |_, _, event, _| events.lock().push(event.clone())
3197 })
3198 .detach();
3199 });
3200
3201 buffer3.update(cx, |buffer, cx| {
3202 buffer.edit([(0..0, "x")], None, cx);
3203 });
3204 events.lock().clear();
3205 fs.remove_file("/dir/file3".as_ref(), Default::default())
3206 .await
3207 .unwrap();
3208 cx.executor().run_until_parked();
3209 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3210 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3211}
3212
3213#[gpui::test]
3214async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3215 init_test(cx);
3216
3217 let initial_contents = "aaa\nbbbbb\nc\n";
3218 let fs = FakeFs::new(cx.executor());
3219 fs.insert_tree(
3220 "/dir",
3221 json!({
3222 "the-file": initial_contents,
3223 }),
3224 )
3225 .await;
3226 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3227 let buffer = project
3228 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3229 .await
3230 .unwrap();
3231
3232 let anchors = (0..3)
3233 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3234 .collect::<Vec<_>>();
3235
3236 // Change the file on disk, adding two new lines of text, and removing
3237 // one line.
3238 buffer.update(cx, |buffer, _| {
3239 assert!(!buffer.is_dirty());
3240 assert!(!buffer.has_conflict());
3241 });
3242 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3243 fs.save(
3244 "/dir/the-file".as_ref(),
3245 &new_contents.into(),
3246 LineEnding::Unix,
3247 )
3248 .await
3249 .unwrap();
3250
3251 // Because the buffer was not modified, it is reloaded from disk. Its
3252 // contents are edited according to the diff between the old and new
3253 // file contents.
3254 cx.executor().run_until_parked();
3255 buffer.update(cx, |buffer, _| {
3256 assert_eq!(buffer.text(), new_contents);
3257 assert!(!buffer.is_dirty());
3258 assert!(!buffer.has_conflict());
3259
3260 let anchor_positions = anchors
3261 .iter()
3262 .map(|anchor| anchor.to_point(&*buffer))
3263 .collect::<Vec<_>>();
3264 assert_eq!(
3265 anchor_positions,
3266 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3267 );
3268 });
3269
3270 // Modify the buffer
3271 buffer.update(cx, |buffer, cx| {
3272 buffer.edit([(0..0, " ")], None, cx);
3273 assert!(buffer.is_dirty());
3274 assert!(!buffer.has_conflict());
3275 });
3276
3277 // Change the file on disk again, adding blank lines to the beginning.
3278 fs.save(
3279 "/dir/the-file".as_ref(),
3280 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3281 LineEnding::Unix,
3282 )
3283 .await
3284 .unwrap();
3285
3286 // Because the buffer is modified, it doesn't reload from disk, but is
3287 // marked as having a conflict.
3288 cx.executor().run_until_parked();
3289 buffer.update(cx, |buffer, _| {
3290 assert!(buffer.has_conflict());
3291 });
3292}
3293
3294#[gpui::test]
3295async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3296 init_test(cx);
3297
3298 let fs = FakeFs::new(cx.executor());
3299 fs.insert_tree(
3300 "/dir",
3301 json!({
3302 "file1": "a\nb\nc\n",
3303 "file2": "one\r\ntwo\r\nthree\r\n",
3304 }),
3305 )
3306 .await;
3307
3308 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3309 let buffer1 = project
3310 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3311 .await
3312 .unwrap();
3313 let buffer2 = project
3314 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3315 .await
3316 .unwrap();
3317
3318 buffer1.update(cx, |buffer, _| {
3319 assert_eq!(buffer.text(), "a\nb\nc\n");
3320 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3321 });
3322 buffer2.update(cx, |buffer, _| {
3323 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3324 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3325 });
3326
3327 // Change a file's line endings on disk from unix to windows. The buffer's
3328 // state updates correctly.
3329 fs.save(
3330 "/dir/file1".as_ref(),
3331 &"aaa\nb\nc\n".into(),
3332 LineEnding::Windows,
3333 )
3334 .await
3335 .unwrap();
3336 cx.executor().run_until_parked();
3337 buffer1.update(cx, |buffer, _| {
3338 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3339 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3340 });
3341
3342 // Save a file with windows line endings. The file is written correctly.
3343 buffer2.update(cx, |buffer, cx| {
3344 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3345 });
3346 project
3347 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3348 .await
3349 .unwrap();
3350 assert_eq!(
3351 fs.load("/dir/file2".as_ref()).await.unwrap(),
3352 "one\r\ntwo\r\nthree\r\nfour\r\n",
3353 );
3354}
3355
3356#[gpui::test]
3357async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3358 init_test(cx);
3359
3360 let fs = FakeFs::new(cx.executor());
3361 fs.insert_tree(
3362 "/the-dir",
3363 json!({
3364 "a.rs": "
3365 fn foo(mut v: Vec<usize>) {
3366 for x in &v {
3367 v.push(1);
3368 }
3369 }
3370 "
3371 .unindent(),
3372 }),
3373 )
3374 .await;
3375
3376 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3377 let buffer = project
3378 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3379 .await
3380 .unwrap();
3381
3382 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3383 let message = lsp::PublishDiagnosticsParams {
3384 uri: buffer_uri.clone(),
3385 diagnostics: vec![
3386 lsp::Diagnostic {
3387 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3388 severity: Some(DiagnosticSeverity::WARNING),
3389 message: "error 1".to_string(),
3390 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3391 location: lsp::Location {
3392 uri: buffer_uri.clone(),
3393 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3394 },
3395 message: "error 1 hint 1".to_string(),
3396 }]),
3397 ..Default::default()
3398 },
3399 lsp::Diagnostic {
3400 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3401 severity: Some(DiagnosticSeverity::HINT),
3402 message: "error 1 hint 1".to_string(),
3403 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3404 location: lsp::Location {
3405 uri: buffer_uri.clone(),
3406 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3407 },
3408 message: "original diagnostic".to_string(),
3409 }]),
3410 ..Default::default()
3411 },
3412 lsp::Diagnostic {
3413 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3414 severity: Some(DiagnosticSeverity::ERROR),
3415 message: "error 2".to_string(),
3416 related_information: Some(vec![
3417 lsp::DiagnosticRelatedInformation {
3418 location: lsp::Location {
3419 uri: buffer_uri.clone(),
3420 range: lsp::Range::new(
3421 lsp::Position::new(1, 13),
3422 lsp::Position::new(1, 15),
3423 ),
3424 },
3425 message: "error 2 hint 1".to_string(),
3426 },
3427 lsp::DiagnosticRelatedInformation {
3428 location: lsp::Location {
3429 uri: buffer_uri.clone(),
3430 range: lsp::Range::new(
3431 lsp::Position::new(1, 13),
3432 lsp::Position::new(1, 15),
3433 ),
3434 },
3435 message: "error 2 hint 2".to_string(),
3436 },
3437 ]),
3438 ..Default::default()
3439 },
3440 lsp::Diagnostic {
3441 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3442 severity: Some(DiagnosticSeverity::HINT),
3443 message: "error 2 hint 1".to_string(),
3444 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3445 location: lsp::Location {
3446 uri: buffer_uri.clone(),
3447 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3448 },
3449 message: "original diagnostic".to_string(),
3450 }]),
3451 ..Default::default()
3452 },
3453 lsp::Diagnostic {
3454 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3455 severity: Some(DiagnosticSeverity::HINT),
3456 message: "error 2 hint 2".to_string(),
3457 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3458 location: lsp::Location {
3459 uri: buffer_uri,
3460 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3461 },
3462 message: "original diagnostic".to_string(),
3463 }]),
3464 ..Default::default()
3465 },
3466 ],
3467 version: None,
3468 };
3469
3470 project
3471 .update(cx, |p, cx| {
3472 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3473 })
3474 .unwrap();
3475 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3476
3477 assert_eq!(
3478 buffer
3479 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3480 .collect::<Vec<_>>(),
3481 &[
3482 DiagnosticEntry {
3483 range: Point::new(1, 8)..Point::new(1, 9),
3484 diagnostic: Diagnostic {
3485 severity: DiagnosticSeverity::WARNING,
3486 message: "error 1".to_string(),
3487 group_id: 1,
3488 is_primary: true,
3489 ..Default::default()
3490 }
3491 },
3492 DiagnosticEntry {
3493 range: Point::new(1, 8)..Point::new(1, 9),
3494 diagnostic: Diagnostic {
3495 severity: DiagnosticSeverity::HINT,
3496 message: "error 1 hint 1".to_string(),
3497 group_id: 1,
3498 is_primary: false,
3499 ..Default::default()
3500 }
3501 },
3502 DiagnosticEntry {
3503 range: Point::new(1, 13)..Point::new(1, 15),
3504 diagnostic: Diagnostic {
3505 severity: DiagnosticSeverity::HINT,
3506 message: "error 2 hint 1".to_string(),
3507 group_id: 0,
3508 is_primary: false,
3509 ..Default::default()
3510 }
3511 },
3512 DiagnosticEntry {
3513 range: Point::new(1, 13)..Point::new(1, 15),
3514 diagnostic: Diagnostic {
3515 severity: DiagnosticSeverity::HINT,
3516 message: "error 2 hint 2".to_string(),
3517 group_id: 0,
3518 is_primary: false,
3519 ..Default::default()
3520 }
3521 },
3522 DiagnosticEntry {
3523 range: Point::new(2, 8)..Point::new(2, 17),
3524 diagnostic: Diagnostic {
3525 severity: DiagnosticSeverity::ERROR,
3526 message: "error 2".to_string(),
3527 group_id: 0,
3528 is_primary: true,
3529 ..Default::default()
3530 }
3531 }
3532 ]
3533 );
3534
3535 assert_eq!(
3536 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3537 &[
3538 DiagnosticEntry {
3539 range: Point::new(1, 13)..Point::new(1, 15),
3540 diagnostic: Diagnostic {
3541 severity: DiagnosticSeverity::HINT,
3542 message: "error 2 hint 1".to_string(),
3543 group_id: 0,
3544 is_primary: false,
3545 ..Default::default()
3546 }
3547 },
3548 DiagnosticEntry {
3549 range: Point::new(1, 13)..Point::new(1, 15),
3550 diagnostic: Diagnostic {
3551 severity: DiagnosticSeverity::HINT,
3552 message: "error 2 hint 2".to_string(),
3553 group_id: 0,
3554 is_primary: false,
3555 ..Default::default()
3556 }
3557 },
3558 DiagnosticEntry {
3559 range: Point::new(2, 8)..Point::new(2, 17),
3560 diagnostic: Diagnostic {
3561 severity: DiagnosticSeverity::ERROR,
3562 message: "error 2".to_string(),
3563 group_id: 0,
3564 is_primary: true,
3565 ..Default::default()
3566 }
3567 }
3568 ]
3569 );
3570
3571 assert_eq!(
3572 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3573 &[
3574 DiagnosticEntry {
3575 range: Point::new(1, 8)..Point::new(1, 9),
3576 diagnostic: Diagnostic {
3577 severity: DiagnosticSeverity::WARNING,
3578 message: "error 1".to_string(),
3579 group_id: 1,
3580 is_primary: true,
3581 ..Default::default()
3582 }
3583 },
3584 DiagnosticEntry {
3585 range: Point::new(1, 8)..Point::new(1, 9),
3586 diagnostic: Diagnostic {
3587 severity: DiagnosticSeverity::HINT,
3588 message: "error 1 hint 1".to_string(),
3589 group_id: 1,
3590 is_primary: false,
3591 ..Default::default()
3592 }
3593 },
3594 ]
3595 );
3596}
3597
3598#[gpui::test]
3599async fn test_rename(cx: &mut gpui::TestAppContext) {
3600 init_test(cx);
3601
3602 let fs = FakeFs::new(cx.executor());
3603 fs.insert_tree(
3604 "/dir",
3605 json!({
3606 "one.rs": "const ONE: usize = 1;",
3607 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3608 }),
3609 )
3610 .await;
3611
3612 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3613
3614 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3615 language_registry.add(rust_lang());
3616 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3617 "Rust",
3618 FakeLspAdapter {
3619 capabilities: lsp::ServerCapabilities {
3620 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3621 prepare_provider: Some(true),
3622 work_done_progress_options: Default::default(),
3623 })),
3624 ..Default::default()
3625 },
3626 ..Default::default()
3627 },
3628 );
3629
3630 let buffer = project
3631 .update(cx, |project, cx| {
3632 project.open_local_buffer("/dir/one.rs", cx)
3633 })
3634 .await
3635 .unwrap();
3636
3637 let fake_server = fake_servers.next().await.unwrap();
3638
3639 let response = project.update(cx, |project, cx| {
3640 project.prepare_rename(buffer.clone(), 7, cx)
3641 });
3642 fake_server
3643 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3644 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3645 assert_eq!(params.position, lsp::Position::new(0, 7));
3646 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3647 lsp::Position::new(0, 6),
3648 lsp::Position::new(0, 9),
3649 ))))
3650 })
3651 .next()
3652 .await
3653 .unwrap();
3654 let range = response.await.unwrap().unwrap();
3655 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3656 assert_eq!(range, 6..9);
3657
3658 let response = project.update(cx, |project, cx| {
3659 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3660 });
3661 fake_server
3662 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3663 assert_eq!(
3664 params.text_document_position.text_document.uri.as_str(),
3665 "file:///dir/one.rs"
3666 );
3667 assert_eq!(
3668 params.text_document_position.position,
3669 lsp::Position::new(0, 7)
3670 );
3671 assert_eq!(params.new_name, "THREE");
3672 Ok(Some(lsp::WorkspaceEdit {
3673 changes: Some(
3674 [
3675 (
3676 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3677 vec![lsp::TextEdit::new(
3678 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3679 "THREE".to_string(),
3680 )],
3681 ),
3682 (
3683 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3684 vec![
3685 lsp::TextEdit::new(
3686 lsp::Range::new(
3687 lsp::Position::new(0, 24),
3688 lsp::Position::new(0, 27),
3689 ),
3690 "THREE".to_string(),
3691 ),
3692 lsp::TextEdit::new(
3693 lsp::Range::new(
3694 lsp::Position::new(0, 35),
3695 lsp::Position::new(0, 38),
3696 ),
3697 "THREE".to_string(),
3698 ),
3699 ],
3700 ),
3701 ]
3702 .into_iter()
3703 .collect(),
3704 ),
3705 ..Default::default()
3706 }))
3707 })
3708 .next()
3709 .await
3710 .unwrap();
3711 let mut transaction = response.await.unwrap().0;
3712 assert_eq!(transaction.len(), 2);
3713 assert_eq!(
3714 transaction
3715 .remove_entry(&buffer)
3716 .unwrap()
3717 .0
3718 .update(cx, |buffer, _| buffer.text()),
3719 "const THREE: usize = 1;"
3720 );
3721 assert_eq!(
3722 transaction
3723 .into_keys()
3724 .next()
3725 .unwrap()
3726 .update(cx, |buffer, _| buffer.text()),
3727 "const TWO: usize = one::THREE + one::THREE;"
3728 );
3729}
3730
3731#[gpui::test]
3732async fn test_search(cx: &mut gpui::TestAppContext) {
3733 init_test(cx);
3734
3735 let fs = FakeFs::new(cx.executor());
3736 fs.insert_tree(
3737 "/dir",
3738 json!({
3739 "one.rs": "const ONE: usize = 1;",
3740 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3741 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3742 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3743 }),
3744 )
3745 .await;
3746 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3747 assert_eq!(
3748 search(
3749 &project,
3750 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3751 cx
3752 )
3753 .await
3754 .unwrap(),
3755 HashMap::from_iter([
3756 ("dir/two.rs".to_string(), vec![6..9]),
3757 ("dir/three.rs".to_string(), vec![37..40])
3758 ])
3759 );
3760
3761 let buffer_4 = project
3762 .update(cx, |project, cx| {
3763 project.open_local_buffer("/dir/four.rs", cx)
3764 })
3765 .await
3766 .unwrap();
3767 buffer_4.update(cx, |buffer, cx| {
3768 let text = "two::TWO";
3769 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3770 });
3771
3772 assert_eq!(
3773 search(
3774 &project,
3775 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3776 cx
3777 )
3778 .await
3779 .unwrap(),
3780 HashMap::from_iter([
3781 ("dir/two.rs".to_string(), vec![6..9]),
3782 ("dir/three.rs".to_string(), vec![37..40]),
3783 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3784 ])
3785 );
3786}
3787
3788#[gpui::test]
3789async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3790 init_test(cx);
3791
3792 let search_query = "file";
3793
3794 let fs = FakeFs::new(cx.executor());
3795 fs.insert_tree(
3796 "/dir",
3797 json!({
3798 "one.rs": r#"// Rust file one"#,
3799 "one.ts": r#"// TypeScript file one"#,
3800 "two.rs": r#"// Rust file two"#,
3801 "two.ts": r#"// TypeScript file two"#,
3802 }),
3803 )
3804 .await;
3805 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3806
3807 assert!(
3808 search(
3809 &project,
3810 SearchQuery::text(
3811 search_query,
3812 false,
3813 true,
3814 false,
3815 vec![PathMatcher::new("*.odd").unwrap()],
3816 Vec::new()
3817 )
3818 .unwrap(),
3819 cx
3820 )
3821 .await
3822 .unwrap()
3823 .is_empty(),
3824 "If no inclusions match, no files should be returned"
3825 );
3826
3827 assert_eq!(
3828 search(
3829 &project,
3830 SearchQuery::text(
3831 search_query,
3832 false,
3833 true,
3834 false,
3835 vec![PathMatcher::new("*.rs").unwrap()],
3836 Vec::new()
3837 )
3838 .unwrap(),
3839 cx
3840 )
3841 .await
3842 .unwrap(),
3843 HashMap::from_iter([
3844 ("dir/one.rs".to_string(), vec![8..12]),
3845 ("dir/two.rs".to_string(), vec![8..12]),
3846 ]),
3847 "Rust only search should give only Rust files"
3848 );
3849
3850 assert_eq!(
3851 search(
3852 &project,
3853 SearchQuery::text(
3854 search_query,
3855 false,
3856 true,
3857 false,
3858 vec![
3859 PathMatcher::new("*.ts").unwrap(),
3860 PathMatcher::new("*.odd").unwrap(),
3861 ],
3862 Vec::new()
3863 ).unwrap(),
3864 cx
3865 )
3866 .await
3867 .unwrap(),
3868 HashMap::from_iter([
3869 ("dir/one.ts".to_string(), vec![14..18]),
3870 ("dir/two.ts".to_string(), vec![14..18]),
3871 ]),
3872 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3873 );
3874
3875 assert_eq!(
3876 search(
3877 &project,
3878 SearchQuery::text(
3879 search_query,
3880 false,
3881 true,
3882 false,
3883 vec![
3884 PathMatcher::new("*.rs").unwrap(),
3885 PathMatcher::new("*.ts").unwrap(),
3886 PathMatcher::new("*.odd").unwrap(),
3887 ],
3888 Vec::new()
3889 ).unwrap(),
3890 cx
3891 )
3892 .await
3893 .unwrap(),
3894 HashMap::from_iter([
3895 ("dir/two.ts".to_string(), vec![14..18]),
3896 ("dir/one.rs".to_string(), vec![8..12]),
3897 ("dir/one.ts".to_string(), vec![14..18]),
3898 ("dir/two.rs".to_string(), vec![8..12]),
3899 ]),
3900 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3901 );
3902}
3903
3904#[gpui::test]
3905async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3906 init_test(cx);
3907
3908 let search_query = "file";
3909
3910 let fs = FakeFs::new(cx.executor());
3911 fs.insert_tree(
3912 "/dir",
3913 json!({
3914 "one.rs": r#"// Rust file one"#,
3915 "one.ts": r#"// TypeScript file one"#,
3916 "two.rs": r#"// Rust file two"#,
3917 "two.ts": r#"// TypeScript file two"#,
3918 }),
3919 )
3920 .await;
3921 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3922
3923 assert_eq!(
3924 search(
3925 &project,
3926 SearchQuery::text(
3927 search_query,
3928 false,
3929 true,
3930 false,
3931 Vec::new(),
3932 vec![PathMatcher::new("*.odd").unwrap()],
3933 )
3934 .unwrap(),
3935 cx
3936 )
3937 .await
3938 .unwrap(),
3939 HashMap::from_iter([
3940 ("dir/one.rs".to_string(), vec![8..12]),
3941 ("dir/one.ts".to_string(), vec![14..18]),
3942 ("dir/two.rs".to_string(), vec![8..12]),
3943 ("dir/two.ts".to_string(), vec![14..18]),
3944 ]),
3945 "If no exclusions match, all files should be returned"
3946 );
3947
3948 assert_eq!(
3949 search(
3950 &project,
3951 SearchQuery::text(
3952 search_query,
3953 false,
3954 true,
3955 false,
3956 Vec::new(),
3957 vec![PathMatcher::new("*.rs").unwrap()],
3958 )
3959 .unwrap(),
3960 cx
3961 )
3962 .await
3963 .unwrap(),
3964 HashMap::from_iter([
3965 ("dir/one.ts".to_string(), vec![14..18]),
3966 ("dir/two.ts".to_string(), vec![14..18]),
3967 ]),
3968 "Rust exclusion search should give only TypeScript files"
3969 );
3970
3971 assert_eq!(
3972 search(
3973 &project,
3974 SearchQuery::text(
3975 search_query,
3976 false,
3977 true,
3978 false,
3979 Vec::new(),
3980 vec![
3981 PathMatcher::new("*.ts").unwrap(),
3982 PathMatcher::new("*.odd").unwrap(),
3983 ],
3984 ).unwrap(),
3985 cx
3986 )
3987 .await
3988 .unwrap(),
3989 HashMap::from_iter([
3990 ("dir/one.rs".to_string(), vec![8..12]),
3991 ("dir/two.rs".to_string(), vec![8..12]),
3992 ]),
3993 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3994 );
3995
3996 assert!(
3997 search(
3998 &project,
3999 SearchQuery::text(
4000 search_query,
4001 false,
4002 true,
4003 false,
4004 Vec::new(),
4005 vec![
4006 PathMatcher::new("*.rs").unwrap(),
4007 PathMatcher::new("*.ts").unwrap(),
4008 PathMatcher::new("*.odd").unwrap(),
4009 ],
4010 ).unwrap(),
4011 cx
4012 )
4013 .await
4014 .unwrap().is_empty(),
4015 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4016 );
4017}
4018
4019#[gpui::test]
4020async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4021 init_test(cx);
4022
4023 let search_query = "file";
4024
4025 let fs = FakeFs::new(cx.executor());
4026 fs.insert_tree(
4027 "/dir",
4028 json!({
4029 "one.rs": r#"// Rust file one"#,
4030 "one.ts": r#"// TypeScript file one"#,
4031 "two.rs": r#"// Rust file two"#,
4032 "two.ts": r#"// TypeScript file two"#,
4033 }),
4034 )
4035 .await;
4036 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4037
4038 assert!(
4039 search(
4040 &project,
4041 SearchQuery::text(
4042 search_query,
4043 false,
4044 true,
4045 false,
4046 vec![PathMatcher::new("*.odd").unwrap()],
4047 vec![PathMatcher::new("*.odd").unwrap()],
4048 )
4049 .unwrap(),
4050 cx
4051 )
4052 .await
4053 .unwrap()
4054 .is_empty(),
4055 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4056 );
4057
4058 assert!(
4059 search(
4060 &project,
4061 SearchQuery::text(
4062 search_query,
4063 false,
4064 true,
4065 false,
4066 vec![PathMatcher::new("*.ts").unwrap()],
4067 vec![PathMatcher::new("*.ts").unwrap()],
4068 ).unwrap(),
4069 cx
4070 )
4071 .await
4072 .unwrap()
4073 .is_empty(),
4074 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4075 );
4076
4077 assert!(
4078 search(
4079 &project,
4080 SearchQuery::text(
4081 search_query,
4082 false,
4083 true,
4084 false,
4085 vec![
4086 PathMatcher::new("*.ts").unwrap(),
4087 PathMatcher::new("*.odd").unwrap()
4088 ],
4089 vec![
4090 PathMatcher::new("*.ts").unwrap(),
4091 PathMatcher::new("*.odd").unwrap()
4092 ],
4093 )
4094 .unwrap(),
4095 cx
4096 )
4097 .await
4098 .unwrap()
4099 .is_empty(),
4100 "Non-matching inclusions and exclusions should not change that."
4101 );
4102
4103 assert_eq!(
4104 search(
4105 &project,
4106 SearchQuery::text(
4107 search_query,
4108 false,
4109 true,
4110 false,
4111 vec![
4112 PathMatcher::new("*.ts").unwrap(),
4113 PathMatcher::new("*.odd").unwrap()
4114 ],
4115 vec![
4116 PathMatcher::new("*.rs").unwrap(),
4117 PathMatcher::new("*.odd").unwrap()
4118 ],
4119 )
4120 .unwrap(),
4121 cx
4122 )
4123 .await
4124 .unwrap(),
4125 HashMap::from_iter([
4126 ("dir/one.ts".to_string(), vec![14..18]),
4127 ("dir/two.ts".to_string(), vec![14..18]),
4128 ]),
4129 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4130 );
4131}
4132
4133#[gpui::test]
4134async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4135 init_test(cx);
4136
4137 let fs = FakeFs::new(cx.executor());
4138 fs.insert_tree(
4139 "/worktree-a",
4140 json!({
4141 "haystack.rs": r#"// NEEDLE"#,
4142 "haystack.ts": r#"// NEEDLE"#,
4143 }),
4144 )
4145 .await;
4146 fs.insert_tree(
4147 "/worktree-b",
4148 json!({
4149 "haystack.rs": r#"// NEEDLE"#,
4150 "haystack.ts": r#"// NEEDLE"#,
4151 }),
4152 )
4153 .await;
4154
4155 let project = Project::test(
4156 fs.clone(),
4157 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4158 cx,
4159 )
4160 .await;
4161
4162 assert_eq!(
4163 search(
4164 &project,
4165 SearchQuery::text(
4166 "NEEDLE",
4167 false,
4168 true,
4169 false,
4170 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4171 Vec::new()
4172 )
4173 .unwrap(),
4174 cx
4175 )
4176 .await
4177 .unwrap(),
4178 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4179 "should only return results from included worktree"
4180 );
4181 assert_eq!(
4182 search(
4183 &project,
4184 SearchQuery::text(
4185 "NEEDLE",
4186 false,
4187 true,
4188 false,
4189 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4190 Vec::new()
4191 )
4192 .unwrap(),
4193 cx
4194 )
4195 .await
4196 .unwrap(),
4197 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4198 "should only return results from included worktree"
4199 );
4200
4201 assert_eq!(
4202 search(
4203 &project,
4204 SearchQuery::text(
4205 "NEEDLE",
4206 false,
4207 true,
4208 false,
4209 vec![PathMatcher::new("*.ts").unwrap()],
4210 Vec::new()
4211 )
4212 .unwrap(),
4213 cx
4214 )
4215 .await
4216 .unwrap(),
4217 HashMap::from_iter([
4218 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4219 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4220 ]),
4221 "should return results from both worktrees"
4222 );
4223}
4224
4225#[gpui::test]
4226async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4227 init_test(cx);
4228
4229 let fs = FakeFs::new(cx.background_executor.clone());
4230 fs.insert_tree(
4231 "/dir",
4232 json!({
4233 ".git": {},
4234 ".gitignore": "**/target\n/node_modules\n",
4235 "target": {
4236 "index.txt": "index_key:index_value"
4237 },
4238 "node_modules": {
4239 "eslint": {
4240 "index.ts": "const eslint_key = 'eslint value'",
4241 "package.json": r#"{ "some_key": "some value" }"#,
4242 },
4243 "prettier": {
4244 "index.ts": "const prettier_key = 'prettier value'",
4245 "package.json": r#"{ "other_key": "other value" }"#,
4246 },
4247 },
4248 "package.json": r#"{ "main_key": "main value" }"#,
4249 }),
4250 )
4251 .await;
4252 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4253
4254 let query = "key";
4255 assert_eq!(
4256 search(
4257 &project,
4258 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4259 cx
4260 )
4261 .await
4262 .unwrap(),
4263 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4264 "Only one non-ignored file should have the query"
4265 );
4266
4267 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4268 assert_eq!(
4269 search(
4270 &project,
4271 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4272 cx
4273 )
4274 .await
4275 .unwrap(),
4276 HashMap::from_iter([
4277 ("dir/package.json".to_string(), vec![8..11]),
4278 ("dir/target/index.txt".to_string(), vec![6..9]),
4279 (
4280 "dir/node_modules/prettier/package.json".to_string(),
4281 vec![9..12]
4282 ),
4283 (
4284 "dir/node_modules/prettier/index.ts".to_string(),
4285 vec![15..18]
4286 ),
4287 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4288 (
4289 "dir/node_modules/eslint/package.json".to_string(),
4290 vec![8..11]
4291 ),
4292 ]),
4293 "Unrestricted search with ignored directories should find every file with the query"
4294 );
4295
4296 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4297 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4298 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4299 assert_eq!(
4300 search(
4301 &project,
4302 SearchQuery::text(
4303 query,
4304 false,
4305 false,
4306 true,
4307 files_to_include,
4308 files_to_exclude,
4309 )
4310 .unwrap(),
4311 cx
4312 )
4313 .await
4314 .unwrap(),
4315 HashMap::from_iter([(
4316 "dir/node_modules/prettier/package.json".to_string(),
4317 vec![9..12]
4318 )]),
4319 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4320 );
4321}
4322
4323#[test]
4324fn test_glob_literal_prefix() {
4325 assert_eq!(glob_literal_prefix("**/*.js"), "");
4326 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4327 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4328 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4329}
4330
4331#[gpui::test]
4332async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4333 init_test(cx);
4334
4335 let fs = FakeFs::new(cx.executor().clone());
4336 fs.insert_tree(
4337 "/one/two",
4338 json!({
4339 "three": {
4340 "a.txt": "",
4341 "four": {}
4342 },
4343 "c.rs": ""
4344 }),
4345 )
4346 .await;
4347
4348 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4349 project
4350 .update(cx, |project, cx| {
4351 let id = project.worktrees().next().unwrap().read(cx).id();
4352 project.create_entry((id, "b.."), true, cx)
4353 })
4354 .unwrap()
4355 .await
4356 .unwrap();
4357
4358 // Can't create paths outside the project
4359 let result = project
4360 .update(cx, |project, cx| {
4361 let id = project.worktrees().next().unwrap().read(cx).id();
4362 project.create_entry((id, "../../boop"), true, cx)
4363 })
4364 .await;
4365 assert!(result.is_err());
4366
4367 // Can't create paths with '..'
4368 let result = project
4369 .update(cx, |project, cx| {
4370 let id = project.worktrees().next().unwrap().read(cx).id();
4371 project.create_entry((id, "four/../beep"), true, cx)
4372 })
4373 .await;
4374 assert!(result.is_err());
4375
4376 assert_eq!(
4377 fs.paths(true),
4378 vec![
4379 PathBuf::from("/"),
4380 PathBuf::from("/one"),
4381 PathBuf::from("/one/two"),
4382 PathBuf::from("/one/two/c.rs"),
4383 PathBuf::from("/one/two/three"),
4384 PathBuf::from("/one/two/three/a.txt"),
4385 PathBuf::from("/one/two/three/b.."),
4386 PathBuf::from("/one/two/three/four"),
4387 ]
4388 );
4389
4390 // And we cannot open buffers with '..'
4391 let result = project
4392 .update(cx, |project, cx| {
4393 let id = project.worktrees().next().unwrap().read(cx).id();
4394 project.open_buffer((id, "../c.rs"), cx)
4395 })
4396 .await;
4397 assert!(result.is_err())
4398}
4399
4400async fn search(
4401 project: &Model<Project>,
4402 query: SearchQuery,
4403 cx: &mut gpui::TestAppContext,
4404) -> Result<HashMap<String, Vec<Range<usize>>>> {
4405 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4406 let mut results = HashMap::default();
4407 while let Some(search_result) = search_rx.next().await {
4408 match search_result {
4409 SearchResult::Buffer { buffer, ranges } => {
4410 results.entry(buffer).or_insert(ranges);
4411 }
4412 SearchResult::LimitReached => {}
4413 }
4414 }
4415 Ok(results
4416 .into_iter()
4417 .map(|(buffer, ranges)| {
4418 buffer.update(cx, |buffer, cx| {
4419 let path = buffer
4420 .file()
4421 .unwrap()
4422 .full_path(cx)
4423 .to_string_lossy()
4424 .to_string();
4425 let ranges = ranges
4426 .into_iter()
4427 .map(|range| range.to_offset(buffer))
4428 .collect::<Vec<_>>();
4429 (path, ranges)
4430 })
4431 })
4432 .collect())
4433}
4434
4435fn init_test(cx: &mut gpui::TestAppContext) {
4436 if std::env::var("RUST_LOG").is_ok() {
4437 env_logger::try_init().ok();
4438 }
4439
4440 cx.update(|cx| {
4441 let settings_store = SettingsStore::test(cx);
4442 cx.set_global(settings_store);
4443 release_channel::init("0.0.0", cx);
4444 language::init(cx);
4445 Project::init_settings(cx);
4446 });
4447}
4448
4449fn json_lang() -> Arc<Language> {
4450 Arc::new(Language::new(
4451 LanguageConfig {
4452 name: "JSON".into(),
4453 matcher: LanguageMatcher {
4454 path_suffixes: vec!["json".to_string()],
4455 ..Default::default()
4456 },
4457 ..Default::default()
4458 },
4459 None,
4460 ))
4461}
4462
4463fn js_lang() -> Arc<Language> {
4464 Arc::new(Language::new(
4465 LanguageConfig {
4466 name: Arc::from("JavaScript"),
4467 matcher: LanguageMatcher {
4468 path_suffixes: vec!["js".to_string()],
4469 ..Default::default()
4470 },
4471 ..Default::default()
4472 },
4473 None,
4474 ))
4475}
4476
4477fn rust_lang() -> Arc<Language> {
4478 Arc::new(Language::new(
4479 LanguageConfig {
4480 name: "Rust".into(),
4481 matcher: LanguageMatcher {
4482 path_suffixes: vec!["rs".to_string()],
4483 ..Default::default()
4484 },
4485 ..Default::default()
4486 },
4487 Some(tree_sitter_rust::language()),
4488 ))
4489}
4490
4491fn typescript_lang() -> Arc<Language> {
4492 Arc::new(Language::new(
4493 LanguageConfig {
4494 name: "TypeScript".into(),
4495 matcher: LanguageMatcher {
4496 path_suffixes: vec!["ts".to_string()],
4497 ..Default::default()
4498 },
4499 ..Default::default()
4500 },
4501 Some(tree_sitter_typescript::language_typescript()),
4502 ))
4503}