1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[cfg(not(windows))]
49#[gpui::test]
50async fn test_symlinks(cx: &mut gpui::TestAppContext) {
51 init_test(cx);
52 cx.executor().allow_parking();
53
54 let dir = temp_tree(json!({
55 "root": {
56 "apple": "",
57 "banana": {
58 "carrot": {
59 "date": "",
60 "endive": "",
61 }
62 },
63 "fennel": {
64 "grape": "",
65 }
66 }
67 }));
68
69 let root_link_path = dir.path().join("root_link");
70 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
71 os::unix::fs::symlink(
72 &dir.path().join("root/fennel"),
73 &dir.path().join("root/finnochio"),
74 )
75 .unwrap();
76
77 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
78
79 project.update(cx, |project, cx| {
80 let tree = project.worktrees().next().unwrap().read(cx);
81 assert_eq!(tree.file_count(), 5);
82 assert_eq!(
83 tree.inode_for_path("fennel/grape"),
84 tree.inode_for_path("finnochio/grape")
85 );
86 });
87}
88
89#[gpui::test]
90async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
91 init_test(cx);
92
93 let fs = FakeFs::new(cx.executor());
94 fs.insert_tree(
95 "/the-root",
96 json!({
97 ".zed": {
98 "settings.json": r#"{ "tab_size": 8 }"#,
99 "tasks.json": r#"[{
100 "label": "cargo check",
101 "command": "cargo",
102 "args": ["check", "--all"]
103 },]"#,
104 },
105 "a": {
106 "a.rs": "fn a() {\n A\n}"
107 },
108 "b": {
109 ".zed": {
110 "settings.json": r#"{ "tab_size": 2 }"#,
111 "tasks.json": r#"[{
112 "label": "cargo check",
113 "command": "cargo",
114 "args": ["check"]
115 },]"#,
116 },
117 "b.rs": "fn b() {\n B\n}"
118 }
119 }),
120 )
121 .await;
122
123 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
124 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
125
126 cx.executor().run_until_parked();
127 cx.update(|cx| {
128 let tree = worktree.read(cx);
129
130 let settings_a = language_settings(
131 None,
132 Some(
133 &(File::for_entry(
134 tree.entry_for_path("a/a.rs").unwrap().clone(),
135 worktree.clone(),
136 ) as _),
137 ),
138 cx,
139 );
140 let settings_b = language_settings(
141 None,
142 Some(
143 &(File::for_entry(
144 tree.entry_for_path("b/b.rs").unwrap().clone(),
145 worktree.clone(),
146 ) as _),
147 ),
148 cx,
149 );
150
151 assert_eq!(settings_a.tab_size.get(), 8);
152 assert_eq!(settings_b.tab_size.get(), 2);
153
154 let workree_id = project.update(cx, |project, cx| {
155 project.worktrees().next().unwrap().read(cx).id()
156 });
157 let all_tasks = project
158 .update(cx, |project, cx| {
159 project.task_inventory().update(cx, |inventory, cx| {
160 inventory.list_tasks(None, None, false, cx)
161 })
162 })
163 .into_iter()
164 .map(|(source_kind, task)| (source_kind, task.name().to_string()))
165 .collect::<Vec<_>>();
166 assert_eq!(
167 all_tasks,
168 vec![
169 (
170 TaskSourceKind::Worktree {
171 id: workree_id,
172 abs_path: PathBuf::from("/the-root/.zed/tasks.json")
173 },
174 "cargo check".to_string()
175 ),
176 (
177 TaskSourceKind::Worktree {
178 id: workree_id,
179 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json")
180 },
181 "cargo check".to_string()
182 ),
183 ]
184 );
185 });
186}
187
188#[gpui::test]
189async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
190 init_test(cx);
191
192 let mut rust_language = Language::new(
193 LanguageConfig {
194 name: "Rust".into(),
195 matcher: LanguageMatcher {
196 path_suffixes: vec!["rs".to_string()],
197 ..Default::default()
198 },
199 ..Default::default()
200 },
201 Some(tree_sitter_rust::language()),
202 );
203 let mut json_language = Language::new(
204 LanguageConfig {
205 name: "JSON".into(),
206 matcher: LanguageMatcher {
207 path_suffixes: vec!["json".to_string()],
208 ..Default::default()
209 },
210 ..Default::default()
211 },
212 None,
213 );
214 let mut fake_rust_servers = rust_language
215 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
216 name: "the-rust-language-server",
217 capabilities: lsp::ServerCapabilities {
218 completion_provider: Some(lsp::CompletionOptions {
219 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
220 ..Default::default()
221 }),
222 ..Default::default()
223 },
224 ..Default::default()
225 }))
226 .await;
227 let mut fake_json_servers = json_language
228 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
229 name: "the-json-language-server",
230 capabilities: lsp::ServerCapabilities {
231 completion_provider: Some(lsp::CompletionOptions {
232 trigger_characters: Some(vec![":".to_string()]),
233 ..Default::default()
234 }),
235 ..Default::default()
236 },
237 ..Default::default()
238 }))
239 .await;
240
241 let fs = FakeFs::new(cx.executor());
242 fs.insert_tree(
243 "/the-root",
244 json!({
245 "test.rs": "const A: i32 = 1;",
246 "test2.rs": "",
247 "Cargo.toml": "a = 1",
248 "package.json": "{\"a\": 1}",
249 }),
250 )
251 .await;
252
253 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
254
255 // Open a buffer without an associated language server.
256 let toml_buffer = project
257 .update(cx, |project, cx| {
258 project.open_local_buffer("/the-root/Cargo.toml", cx)
259 })
260 .await
261 .unwrap();
262
263 // Open a buffer with an associated language server before the language for it has been loaded.
264 let rust_buffer = project
265 .update(cx, |project, cx| {
266 project.open_local_buffer("/the-root/test.rs", cx)
267 })
268 .await
269 .unwrap();
270 rust_buffer.update(cx, |buffer, _| {
271 assert_eq!(buffer.language().map(|l| l.name()), None);
272 });
273
274 // Now we add the languages to the project, and ensure they get assigned to all
275 // the relevant open buffers.
276 project.update(cx, |project, _| {
277 project.languages.add(Arc::new(json_language));
278 project.languages.add(Arc::new(rust_language));
279 });
280 cx.executor().run_until_parked();
281 rust_buffer.update(cx, |buffer, _| {
282 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
283 });
284
285 // A server is started up, and it is notified about Rust files.
286 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
287 assert_eq!(
288 fake_rust_server
289 .receive_notification::<lsp::notification::DidOpenTextDocument>()
290 .await
291 .text_document,
292 lsp::TextDocumentItem {
293 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
294 version: 0,
295 text: "const A: i32 = 1;".to_string(),
296 language_id: Default::default()
297 }
298 );
299
300 // The buffer is configured based on the language server's capabilities.
301 rust_buffer.update(cx, |buffer, _| {
302 assert_eq!(
303 buffer.completion_triggers(),
304 &[".".to_string(), "::".to_string()]
305 );
306 });
307 toml_buffer.update(cx, |buffer, _| {
308 assert!(buffer.completion_triggers().is_empty());
309 });
310
311 // Edit a buffer. The changes are reported to the language server.
312 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
313 assert_eq!(
314 fake_rust_server
315 .receive_notification::<lsp::notification::DidChangeTextDocument>()
316 .await
317 .text_document,
318 lsp::VersionedTextDocumentIdentifier::new(
319 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
320 1
321 )
322 );
323
324 // Open a third buffer with a different associated language server.
325 let json_buffer = project
326 .update(cx, |project, cx| {
327 project.open_local_buffer("/the-root/package.json", cx)
328 })
329 .await
330 .unwrap();
331
332 // A json language server is started up and is only notified about the json buffer.
333 let mut fake_json_server = fake_json_servers.next().await.unwrap();
334 assert_eq!(
335 fake_json_server
336 .receive_notification::<lsp::notification::DidOpenTextDocument>()
337 .await
338 .text_document,
339 lsp::TextDocumentItem {
340 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
341 version: 0,
342 text: "{\"a\": 1}".to_string(),
343 language_id: Default::default()
344 }
345 );
346
347 // This buffer is configured based on the second language server's
348 // capabilities.
349 json_buffer.update(cx, |buffer, _| {
350 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
351 });
352
353 // When opening another buffer whose language server is already running,
354 // it is also configured based on the existing language server's capabilities.
355 let rust_buffer2 = project
356 .update(cx, |project, cx| {
357 project.open_local_buffer("/the-root/test2.rs", cx)
358 })
359 .await
360 .unwrap();
361 rust_buffer2.update(cx, |buffer, _| {
362 assert_eq!(
363 buffer.completion_triggers(),
364 &[".".to_string(), "::".to_string()]
365 );
366 });
367
368 // Changes are reported only to servers matching the buffer's language.
369 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
370 rust_buffer2.update(cx, |buffer, cx| {
371 buffer.edit([(0..0, "let x = 1;")], None, cx)
372 });
373 assert_eq!(
374 fake_rust_server
375 .receive_notification::<lsp::notification::DidChangeTextDocument>()
376 .await
377 .text_document,
378 lsp::VersionedTextDocumentIdentifier::new(
379 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
380 1
381 )
382 );
383
384 // Save notifications are reported to all servers.
385 project
386 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
387 .await
388 .unwrap();
389 assert_eq!(
390 fake_rust_server
391 .receive_notification::<lsp::notification::DidSaveTextDocument>()
392 .await
393 .text_document,
394 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
395 );
396 assert_eq!(
397 fake_json_server
398 .receive_notification::<lsp::notification::DidSaveTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
402 );
403
404 // Renames are reported only to servers matching the buffer's language.
405 fs.rename(
406 Path::new("/the-root/test2.rs"),
407 Path::new("/the-root/test3.rs"),
408 Default::default(),
409 )
410 .await
411 .unwrap();
412 assert_eq!(
413 fake_rust_server
414 .receive_notification::<lsp::notification::DidCloseTextDocument>()
415 .await
416 .text_document,
417 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
418 );
419 assert_eq!(
420 fake_rust_server
421 .receive_notification::<lsp::notification::DidOpenTextDocument>()
422 .await
423 .text_document,
424 lsp::TextDocumentItem {
425 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
426 version: 0,
427 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
428 language_id: Default::default()
429 },
430 );
431
432 rust_buffer2.update(cx, |buffer, cx| {
433 buffer.update_diagnostics(
434 LanguageServerId(0),
435 DiagnosticSet::from_sorted_entries(
436 vec![DiagnosticEntry {
437 diagnostic: Default::default(),
438 range: Anchor::MIN..Anchor::MAX,
439 }],
440 &buffer.snapshot(),
441 ),
442 cx,
443 );
444 assert_eq!(
445 buffer
446 .snapshot()
447 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
448 .count(),
449 1
450 );
451 });
452
453 // When the rename changes the extension of the file, the buffer gets closed on the old
454 // language server and gets opened on the new one.
455 fs.rename(
456 Path::new("/the-root/test3.rs"),
457 Path::new("/the-root/test3.json"),
458 Default::default(),
459 )
460 .await
461 .unwrap();
462 assert_eq!(
463 fake_rust_server
464 .receive_notification::<lsp::notification::DidCloseTextDocument>()
465 .await
466 .text_document,
467 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
468 );
469 assert_eq!(
470 fake_json_server
471 .receive_notification::<lsp::notification::DidOpenTextDocument>()
472 .await
473 .text_document,
474 lsp::TextDocumentItem {
475 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
476 version: 0,
477 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
478 language_id: Default::default()
479 },
480 );
481
482 // We clear the diagnostics, since the language has changed.
483 rust_buffer2.update(cx, |buffer, _| {
484 assert_eq!(
485 buffer
486 .snapshot()
487 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
488 .count(),
489 0
490 );
491 });
492
493 // The renamed file's version resets after changing language server.
494 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
495 assert_eq!(
496 fake_json_server
497 .receive_notification::<lsp::notification::DidChangeTextDocument>()
498 .await
499 .text_document,
500 lsp::VersionedTextDocumentIdentifier::new(
501 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
502 1
503 )
504 );
505
506 // Restart language servers
507 project.update(cx, |project, cx| {
508 project.restart_language_servers_for_buffers(
509 vec![rust_buffer.clone(), json_buffer.clone()],
510 cx,
511 );
512 });
513
514 let mut rust_shutdown_requests = fake_rust_server
515 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
516 let mut json_shutdown_requests = fake_json_server
517 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
518 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
519
520 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
521 let mut fake_json_server = fake_json_servers.next().await.unwrap();
522
523 // Ensure rust document is reopened in new rust language server
524 assert_eq!(
525 fake_rust_server
526 .receive_notification::<lsp::notification::DidOpenTextDocument>()
527 .await
528 .text_document,
529 lsp::TextDocumentItem {
530 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
531 version: 0,
532 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
533 language_id: Default::default()
534 }
535 );
536
537 // Ensure json documents are reopened in new json language server
538 assert_set_eq!(
539 [
540 fake_json_server
541 .receive_notification::<lsp::notification::DidOpenTextDocument>()
542 .await
543 .text_document,
544 fake_json_server
545 .receive_notification::<lsp::notification::DidOpenTextDocument>()
546 .await
547 .text_document,
548 ],
549 [
550 lsp::TextDocumentItem {
551 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
552 version: 0,
553 text: json_buffer.update(cx, |buffer, _| buffer.text()),
554 language_id: Default::default()
555 },
556 lsp::TextDocumentItem {
557 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
558 version: 0,
559 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
560 language_id: Default::default()
561 }
562 ]
563 );
564
565 // Close notifications are reported only to servers matching the buffer's language.
566 cx.update(|_| drop(json_buffer));
567 let close_message = lsp::DidCloseTextDocumentParams {
568 text_document: lsp::TextDocumentIdentifier::new(
569 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
570 ),
571 };
572 assert_eq!(
573 fake_json_server
574 .receive_notification::<lsp::notification::DidCloseTextDocument>()
575 .await,
576 close_message,
577 );
578}
579
580#[gpui::test]
581async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
582 init_test(cx);
583
584 let mut language = Language::new(
585 LanguageConfig {
586 name: "Rust".into(),
587 matcher: LanguageMatcher {
588 path_suffixes: vec!["rs".to_string()],
589 ..Default::default()
590 },
591 ..Default::default()
592 },
593 Some(tree_sitter_rust::language()),
594 );
595 let mut fake_servers = language
596 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
597 name: "the-language-server",
598 ..Default::default()
599 }))
600 .await;
601
602 let fs = FakeFs::new(cx.executor());
603 fs.insert_tree(
604 "/the-root",
605 json!({
606 ".gitignore": "target\n",
607 "src": {
608 "a.rs": "",
609 "b.rs": "",
610 },
611 "target": {
612 "x": {
613 "out": {
614 "x.rs": ""
615 }
616 },
617 "y": {
618 "out": {
619 "y.rs": "",
620 }
621 },
622 "z": {
623 "out": {
624 "z.rs": ""
625 }
626 }
627 }
628 }),
629 )
630 .await;
631
632 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
633 project.update(cx, |project, _| {
634 project.languages.add(Arc::new(language));
635 });
636 cx.executor().run_until_parked();
637
638 // Start the language server by opening a buffer with a compatible file extension.
639 let _buffer = project
640 .update(cx, |project, cx| {
641 project.open_local_buffer("/the-root/src/a.rs", cx)
642 })
643 .await
644 .unwrap();
645
646 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
647 project.update(cx, |project, cx| {
648 let worktree = project.worktrees().next().unwrap();
649 assert_eq!(
650 worktree
651 .read(cx)
652 .snapshot()
653 .entries(true)
654 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
655 .collect::<Vec<_>>(),
656 &[
657 (Path::new(""), false),
658 (Path::new(".gitignore"), false),
659 (Path::new("src"), false),
660 (Path::new("src/a.rs"), false),
661 (Path::new("src/b.rs"), false),
662 (Path::new("target"), true),
663 ]
664 );
665 });
666
667 let prev_read_dir_count = fs.read_dir_call_count();
668
669 // Keep track of the FS events reported to the language server.
670 let fake_server = fake_servers.next().await.unwrap();
671 let file_changes = Arc::new(Mutex::new(Vec::new()));
672 fake_server
673 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
674 registrations: vec![lsp::Registration {
675 id: Default::default(),
676 method: "workspace/didChangeWatchedFiles".to_string(),
677 register_options: serde_json::to_value(
678 lsp::DidChangeWatchedFilesRegistrationOptions {
679 watchers: vec![
680 lsp::FileSystemWatcher {
681 glob_pattern: lsp::GlobPattern::String(
682 "/the-root/Cargo.toml".to_string(),
683 ),
684 kind: None,
685 },
686 lsp::FileSystemWatcher {
687 glob_pattern: lsp::GlobPattern::String(
688 "/the-root/src/*.{rs,c}".to_string(),
689 ),
690 kind: None,
691 },
692 lsp::FileSystemWatcher {
693 glob_pattern: lsp::GlobPattern::String(
694 "/the-root/target/y/**/*.rs".to_string(),
695 ),
696 kind: None,
697 },
698 ],
699 },
700 )
701 .ok(),
702 }],
703 })
704 .await
705 .unwrap();
706 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
707 let file_changes = file_changes.clone();
708 move |params, _| {
709 let mut file_changes = file_changes.lock();
710 file_changes.extend(params.changes);
711 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
712 }
713 });
714
715 cx.executor().run_until_parked();
716 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
717 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
718
719 // Now the language server has asked us to watch an ignored directory path,
720 // so we recursively load it.
721 project.update(cx, |project, cx| {
722 let worktree = project.worktrees().next().unwrap();
723 assert_eq!(
724 worktree
725 .read(cx)
726 .snapshot()
727 .entries(true)
728 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
729 .collect::<Vec<_>>(),
730 &[
731 (Path::new(""), false),
732 (Path::new(".gitignore"), false),
733 (Path::new("src"), false),
734 (Path::new("src/a.rs"), false),
735 (Path::new("src/b.rs"), false),
736 (Path::new("target"), true),
737 (Path::new("target/x"), true),
738 (Path::new("target/y"), true),
739 (Path::new("target/y/out"), true),
740 (Path::new("target/y/out/y.rs"), true),
741 (Path::new("target/z"), true),
742 ]
743 );
744 });
745
746 // Perform some file system mutations, two of which match the watched patterns,
747 // and one of which does not.
748 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
749 .await
750 .unwrap();
751 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
752 .await
753 .unwrap();
754 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
755 .await
756 .unwrap();
757 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
758 .await
759 .unwrap();
760 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
761 .await
762 .unwrap();
763
764 // The language server receives events for the FS mutations that match its watch patterns.
765 cx.executor().run_until_parked();
766 assert_eq!(
767 &*file_changes.lock(),
768 &[
769 lsp::FileEvent {
770 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
771 typ: lsp::FileChangeType::DELETED,
772 },
773 lsp::FileEvent {
774 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
775 typ: lsp::FileChangeType::CREATED,
776 },
777 lsp::FileEvent {
778 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
779 typ: lsp::FileChangeType::CREATED,
780 },
781 ]
782 );
783}
784
785#[gpui::test]
786async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
787 init_test(cx);
788
789 let fs = FakeFs::new(cx.executor());
790 fs.insert_tree(
791 "/dir",
792 json!({
793 "a.rs": "let a = 1;",
794 "b.rs": "let b = 2;"
795 }),
796 )
797 .await;
798
799 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
800
801 let buffer_a = project
802 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
803 .await
804 .unwrap();
805 let buffer_b = project
806 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
807 .await
808 .unwrap();
809
810 project.update(cx, |project, cx| {
811 project
812 .update_diagnostics(
813 LanguageServerId(0),
814 lsp::PublishDiagnosticsParams {
815 uri: Url::from_file_path("/dir/a.rs").unwrap(),
816 version: None,
817 diagnostics: vec![lsp::Diagnostic {
818 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
819 severity: Some(lsp::DiagnosticSeverity::ERROR),
820 message: "error 1".to_string(),
821 ..Default::default()
822 }],
823 },
824 &[],
825 cx,
826 )
827 .unwrap();
828 project
829 .update_diagnostics(
830 LanguageServerId(0),
831 lsp::PublishDiagnosticsParams {
832 uri: Url::from_file_path("/dir/b.rs").unwrap(),
833 version: None,
834 diagnostics: vec![lsp::Diagnostic {
835 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
836 severity: Some(lsp::DiagnosticSeverity::WARNING),
837 message: "error 2".to_string(),
838 ..Default::default()
839 }],
840 },
841 &[],
842 cx,
843 )
844 .unwrap();
845 });
846
847 buffer_a.update(cx, |buffer, _| {
848 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
849 assert_eq!(
850 chunks
851 .iter()
852 .map(|(s, d)| (s.as_str(), *d))
853 .collect::<Vec<_>>(),
854 &[
855 ("let ", None),
856 ("a", Some(DiagnosticSeverity::ERROR)),
857 (" = 1;", None),
858 ]
859 );
860 });
861 buffer_b.update(cx, |buffer, _| {
862 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
863 assert_eq!(
864 chunks
865 .iter()
866 .map(|(s, d)| (s.as_str(), *d))
867 .collect::<Vec<_>>(),
868 &[
869 ("let ", None),
870 ("b", Some(DiagnosticSeverity::WARNING)),
871 (" = 2;", None),
872 ]
873 );
874 });
875}
876
877#[gpui::test]
878async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
879 init_test(cx);
880
881 let fs = FakeFs::new(cx.executor());
882 fs.insert_tree(
883 "/root",
884 json!({
885 "dir": {
886 ".git": {
887 "HEAD": "ref: refs/heads/main",
888 },
889 ".gitignore": "b.rs",
890 "a.rs": "let a = 1;",
891 "b.rs": "let b = 2;",
892 },
893 "other.rs": "let b = c;"
894 }),
895 )
896 .await;
897
898 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
899 let (worktree, _) = project
900 .update(cx, |project, cx| {
901 project.find_or_create_local_worktree("/root/dir", true, cx)
902 })
903 .await
904 .unwrap();
905 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
906
907 let (worktree, _) = project
908 .update(cx, |project, cx| {
909 project.find_or_create_local_worktree("/root/other.rs", false, cx)
910 })
911 .await
912 .unwrap();
913 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
914
915 let server_id = LanguageServerId(0);
916 project.update(cx, |project, cx| {
917 project
918 .update_diagnostics(
919 server_id,
920 lsp::PublishDiagnosticsParams {
921 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
922 version: None,
923 diagnostics: vec![lsp::Diagnostic {
924 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
925 severity: Some(lsp::DiagnosticSeverity::ERROR),
926 message: "unused variable 'b'".to_string(),
927 ..Default::default()
928 }],
929 },
930 &[],
931 cx,
932 )
933 .unwrap();
934 project
935 .update_diagnostics(
936 server_id,
937 lsp::PublishDiagnosticsParams {
938 uri: Url::from_file_path("/root/other.rs").unwrap(),
939 version: None,
940 diagnostics: vec![lsp::Diagnostic {
941 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
942 severity: Some(lsp::DiagnosticSeverity::ERROR),
943 message: "unknown variable 'c'".to_string(),
944 ..Default::default()
945 }],
946 },
947 &[],
948 cx,
949 )
950 .unwrap();
951 });
952
953 let main_ignored_buffer = project
954 .update(cx, |project, cx| {
955 project.open_buffer((main_worktree_id, "b.rs"), cx)
956 })
957 .await
958 .unwrap();
959 main_ignored_buffer.update(cx, |buffer, _| {
960 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
961 assert_eq!(
962 chunks
963 .iter()
964 .map(|(s, d)| (s.as_str(), *d))
965 .collect::<Vec<_>>(),
966 &[
967 ("let ", None),
968 ("b", Some(DiagnosticSeverity::ERROR)),
969 (" = 2;", None),
970 ],
971 "Gigitnored buffers should still get in-buffer diagnostics",
972 );
973 });
974 let other_buffer = project
975 .update(cx, |project, cx| {
976 project.open_buffer((other_worktree_id, ""), cx)
977 })
978 .await
979 .unwrap();
980 other_buffer.update(cx, |buffer, _| {
981 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
982 assert_eq!(
983 chunks
984 .iter()
985 .map(|(s, d)| (s.as_str(), *d))
986 .collect::<Vec<_>>(),
987 &[
988 ("let b = ", None),
989 ("c", Some(DiagnosticSeverity::ERROR)),
990 (";", None),
991 ],
992 "Buffers from hidden projects should still get in-buffer diagnostics"
993 );
994 });
995
996 project.update(cx, |project, cx| {
997 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
998 assert_eq!(
999 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1000 vec![(
1001 ProjectPath {
1002 worktree_id: main_worktree_id,
1003 path: Arc::from(Path::new("b.rs")),
1004 },
1005 server_id,
1006 DiagnosticSummary {
1007 error_count: 1,
1008 warning_count: 0,
1009 }
1010 )]
1011 );
1012 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1013 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1014 });
1015}
1016
1017#[gpui::test]
1018async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1019 init_test(cx);
1020
1021 let progress_token = "the-progress-token";
1022 let mut language = Language::new(
1023 LanguageConfig {
1024 name: "Rust".into(),
1025 matcher: LanguageMatcher {
1026 path_suffixes: vec!["rs".to_string()],
1027 ..Default::default()
1028 },
1029 ..Default::default()
1030 },
1031 Some(tree_sitter_rust::language()),
1032 );
1033 let mut fake_servers = language
1034 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1035 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1036 disk_based_diagnostics_sources: vec!["disk".into()],
1037 ..Default::default()
1038 }))
1039 .await;
1040
1041 let fs = FakeFs::new(cx.executor());
1042 fs.insert_tree(
1043 "/dir",
1044 json!({
1045 "a.rs": "fn a() { A }",
1046 "b.rs": "const y: i32 = 1",
1047 }),
1048 )
1049 .await;
1050
1051 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1052 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1053 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1054
1055 // Cause worktree to start the fake language server
1056 let _buffer = project
1057 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1058 .await
1059 .unwrap();
1060
1061 let mut events = cx.events(&project);
1062
1063 let fake_server = fake_servers.next().await.unwrap();
1064 assert_eq!(
1065 events.next().await.unwrap(),
1066 Event::LanguageServerAdded(LanguageServerId(0)),
1067 );
1068
1069 fake_server
1070 .start_progress(format!("{}/0", progress_token))
1071 .await;
1072 assert_eq!(
1073 events.next().await.unwrap(),
1074 Event::DiskBasedDiagnosticsStarted {
1075 language_server_id: LanguageServerId(0),
1076 }
1077 );
1078
1079 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1080 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1081 version: None,
1082 diagnostics: vec![lsp::Diagnostic {
1083 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1084 severity: Some(lsp::DiagnosticSeverity::ERROR),
1085 message: "undefined variable 'A'".to_string(),
1086 ..Default::default()
1087 }],
1088 });
1089 assert_eq!(
1090 events.next().await.unwrap(),
1091 Event::DiagnosticsUpdated {
1092 language_server_id: LanguageServerId(0),
1093 path: (worktree_id, Path::new("a.rs")).into()
1094 }
1095 );
1096
1097 fake_server.end_progress(format!("{}/0", progress_token));
1098 assert_eq!(
1099 events.next().await.unwrap(),
1100 Event::DiskBasedDiagnosticsFinished {
1101 language_server_id: LanguageServerId(0)
1102 }
1103 );
1104
1105 let buffer = project
1106 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1107 .await
1108 .unwrap();
1109
1110 buffer.update(cx, |buffer, _| {
1111 let snapshot = buffer.snapshot();
1112 let diagnostics = snapshot
1113 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1114 .collect::<Vec<_>>();
1115 assert_eq!(
1116 diagnostics,
1117 &[DiagnosticEntry {
1118 range: Point::new(0, 9)..Point::new(0, 10),
1119 diagnostic: Diagnostic {
1120 severity: lsp::DiagnosticSeverity::ERROR,
1121 message: "undefined variable 'A'".to_string(),
1122 group_id: 0,
1123 is_primary: true,
1124 ..Default::default()
1125 }
1126 }]
1127 )
1128 });
1129
1130 // Ensure publishing empty diagnostics twice only results in one update event.
1131 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1132 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1133 version: None,
1134 diagnostics: Default::default(),
1135 });
1136 assert_eq!(
1137 events.next().await.unwrap(),
1138 Event::DiagnosticsUpdated {
1139 language_server_id: LanguageServerId(0),
1140 path: (worktree_id, Path::new("a.rs")).into()
1141 }
1142 );
1143
1144 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1145 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1146 version: None,
1147 diagnostics: Default::default(),
1148 });
1149 cx.executor().run_until_parked();
1150 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1151}
1152
1153#[gpui::test]
1154async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1155 init_test(cx);
1156
1157 let progress_token = "the-progress-token";
1158 let mut language = Language::new(
1159 LanguageConfig {
1160 matcher: LanguageMatcher {
1161 path_suffixes: vec!["rs".to_string()],
1162 ..Default::default()
1163 },
1164 ..Default::default()
1165 },
1166 None,
1167 );
1168 let mut fake_servers = language
1169 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1170 disk_based_diagnostics_sources: vec!["disk".into()],
1171 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1172 ..Default::default()
1173 }))
1174 .await;
1175
1176 let fs = FakeFs::new(cx.executor());
1177 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1178
1179 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1180 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1181
1182 let buffer = project
1183 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1184 .await
1185 .unwrap();
1186
1187 // Simulate diagnostics starting to update.
1188 let fake_server = fake_servers.next().await.unwrap();
1189 fake_server.start_progress(progress_token).await;
1190
1191 // Restart the server before the diagnostics finish updating.
1192 project.update(cx, |project, cx| {
1193 project.restart_language_servers_for_buffers([buffer], cx);
1194 });
1195 let mut events = cx.events(&project);
1196
1197 // Simulate the newly started server sending more diagnostics.
1198 let fake_server = fake_servers.next().await.unwrap();
1199 assert_eq!(
1200 events.next().await.unwrap(),
1201 Event::LanguageServerAdded(LanguageServerId(1))
1202 );
1203 fake_server.start_progress(progress_token).await;
1204 assert_eq!(
1205 events.next().await.unwrap(),
1206 Event::DiskBasedDiagnosticsStarted {
1207 language_server_id: LanguageServerId(1)
1208 }
1209 );
1210 project.update(cx, |project, _| {
1211 assert_eq!(
1212 project
1213 .language_servers_running_disk_based_diagnostics()
1214 .collect::<Vec<_>>(),
1215 [LanguageServerId(1)]
1216 );
1217 });
1218
1219 // All diagnostics are considered done, despite the old server's diagnostic
1220 // task never completing.
1221 fake_server.end_progress(progress_token);
1222 assert_eq!(
1223 events.next().await.unwrap(),
1224 Event::DiskBasedDiagnosticsFinished {
1225 language_server_id: LanguageServerId(1)
1226 }
1227 );
1228 project.update(cx, |project, _| {
1229 assert_eq!(
1230 project
1231 .language_servers_running_disk_based_diagnostics()
1232 .collect::<Vec<_>>(),
1233 [LanguageServerId(0); 0]
1234 );
1235 });
1236}
1237
1238#[gpui::test]
1239async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1240 init_test(cx);
1241
1242 let mut language = Language::new(
1243 LanguageConfig {
1244 matcher: LanguageMatcher {
1245 path_suffixes: vec!["rs".to_string()],
1246 ..Default::default()
1247 },
1248 ..Default::default()
1249 },
1250 None,
1251 );
1252 let mut fake_servers = language
1253 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1254 ..Default::default()
1255 }))
1256 .await;
1257
1258 let fs = FakeFs::new(cx.executor());
1259 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1260
1261 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1262 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1263
1264 let buffer = project
1265 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1266 .await
1267 .unwrap();
1268
1269 // Publish diagnostics
1270 let fake_server = fake_servers.next().await.unwrap();
1271 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1272 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1273 version: None,
1274 diagnostics: vec![lsp::Diagnostic {
1275 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1276 severity: Some(lsp::DiagnosticSeverity::ERROR),
1277 message: "the message".to_string(),
1278 ..Default::default()
1279 }],
1280 });
1281
1282 cx.executor().run_until_parked();
1283 buffer.update(cx, |buffer, _| {
1284 assert_eq!(
1285 buffer
1286 .snapshot()
1287 .diagnostics_in_range::<_, usize>(0..1, false)
1288 .map(|entry| entry.diagnostic.message.clone())
1289 .collect::<Vec<_>>(),
1290 ["the message".to_string()]
1291 );
1292 });
1293 project.update(cx, |project, cx| {
1294 assert_eq!(
1295 project.diagnostic_summary(false, cx),
1296 DiagnosticSummary {
1297 error_count: 1,
1298 warning_count: 0,
1299 }
1300 );
1301 });
1302
1303 project.update(cx, |project, cx| {
1304 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1305 });
1306
1307 // The diagnostics are cleared.
1308 cx.executor().run_until_parked();
1309 buffer.update(cx, |buffer, _| {
1310 assert_eq!(
1311 buffer
1312 .snapshot()
1313 .diagnostics_in_range::<_, usize>(0..1, false)
1314 .map(|entry| entry.diagnostic.message.clone())
1315 .collect::<Vec<_>>(),
1316 Vec::<String>::new(),
1317 );
1318 });
1319 project.update(cx, |project, cx| {
1320 assert_eq!(
1321 project.diagnostic_summary(false, cx),
1322 DiagnosticSummary {
1323 error_count: 0,
1324 warning_count: 0,
1325 }
1326 );
1327 });
1328}
1329
1330#[gpui::test]
1331async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1332 init_test(cx);
1333
1334 let mut language = Language::new(
1335 LanguageConfig {
1336 matcher: LanguageMatcher {
1337 path_suffixes: vec!["rs".to_string()],
1338 ..Default::default()
1339 },
1340 ..Default::default()
1341 },
1342 None,
1343 );
1344 let mut fake_servers = language
1345 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1346 name: "the-lsp",
1347 ..Default::default()
1348 }))
1349 .await;
1350
1351 let fs = FakeFs::new(cx.executor());
1352 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1353
1354 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1355 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1356
1357 let buffer = project
1358 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1359 .await
1360 .unwrap();
1361
1362 // Before restarting the server, report diagnostics with an unknown buffer version.
1363 let fake_server = fake_servers.next().await.unwrap();
1364 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1365 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1366 version: Some(10000),
1367 diagnostics: Vec::new(),
1368 });
1369 cx.executor().run_until_parked();
1370
1371 project.update(cx, |project, cx| {
1372 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1373 });
1374 let mut fake_server = fake_servers.next().await.unwrap();
1375 let notification = fake_server
1376 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1377 .await
1378 .text_document;
1379 assert_eq!(notification.version, 0);
1380}
1381
1382#[gpui::test]
1383async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1384 init_test(cx);
1385
1386 let mut rust = Language::new(
1387 LanguageConfig {
1388 name: Arc::from("Rust"),
1389 matcher: LanguageMatcher {
1390 path_suffixes: vec!["rs".to_string()],
1391 ..Default::default()
1392 },
1393 ..Default::default()
1394 },
1395 None,
1396 );
1397 let mut fake_rust_servers = rust
1398 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1399 name: "rust-lsp",
1400 ..Default::default()
1401 }))
1402 .await;
1403 let mut js = Language::new(
1404 LanguageConfig {
1405 name: Arc::from("JavaScript"),
1406 matcher: LanguageMatcher {
1407 path_suffixes: vec!["js".to_string()],
1408 ..Default::default()
1409 },
1410 ..Default::default()
1411 },
1412 None,
1413 );
1414 let mut fake_js_servers = js
1415 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1416 name: "js-lsp",
1417 ..Default::default()
1418 }))
1419 .await;
1420
1421 let fs = FakeFs::new(cx.executor());
1422 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1423 .await;
1424
1425 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1426 project.update(cx, |project, _| {
1427 project.languages.add(Arc::new(rust));
1428 project.languages.add(Arc::new(js));
1429 });
1430
1431 let _rs_buffer = project
1432 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1433 .await
1434 .unwrap();
1435 let _js_buffer = project
1436 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1437 .await
1438 .unwrap();
1439
1440 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1441 assert_eq!(
1442 fake_rust_server_1
1443 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1444 .await
1445 .text_document
1446 .uri
1447 .as_str(),
1448 "file:///dir/a.rs"
1449 );
1450
1451 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1452 assert_eq!(
1453 fake_js_server
1454 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1455 .await
1456 .text_document
1457 .uri
1458 .as_str(),
1459 "file:///dir/b.js"
1460 );
1461
1462 // Disable Rust language server, ensuring only that server gets stopped.
1463 cx.update(|cx| {
1464 cx.update_global(|settings: &mut SettingsStore, cx| {
1465 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1466 settings.languages.insert(
1467 Arc::from("Rust"),
1468 LanguageSettingsContent {
1469 enable_language_server: Some(false),
1470 ..Default::default()
1471 },
1472 );
1473 });
1474 })
1475 });
1476 fake_rust_server_1
1477 .receive_notification::<lsp::notification::Exit>()
1478 .await;
1479
1480 // Enable Rust and disable JavaScript language servers, ensuring that the
1481 // former gets started again and that the latter stops.
1482 cx.update(|cx| {
1483 cx.update_global(|settings: &mut SettingsStore, cx| {
1484 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1485 settings.languages.insert(
1486 Arc::from("Rust"),
1487 LanguageSettingsContent {
1488 enable_language_server: Some(true),
1489 ..Default::default()
1490 },
1491 );
1492 settings.languages.insert(
1493 Arc::from("JavaScript"),
1494 LanguageSettingsContent {
1495 enable_language_server: Some(false),
1496 ..Default::default()
1497 },
1498 );
1499 });
1500 })
1501 });
1502 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1503 assert_eq!(
1504 fake_rust_server_2
1505 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1506 .await
1507 .text_document
1508 .uri
1509 .as_str(),
1510 "file:///dir/a.rs"
1511 );
1512 fake_js_server
1513 .receive_notification::<lsp::notification::Exit>()
1514 .await;
1515}
1516
1517#[gpui::test(iterations = 3)]
1518async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1519 init_test(cx);
1520
1521 let mut language = Language::new(
1522 LanguageConfig {
1523 name: "Rust".into(),
1524 matcher: LanguageMatcher {
1525 path_suffixes: vec!["rs".to_string()],
1526 ..Default::default()
1527 },
1528 ..Default::default()
1529 },
1530 Some(tree_sitter_rust::language()),
1531 );
1532 let mut fake_servers = language
1533 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1534 disk_based_diagnostics_sources: vec!["disk".into()],
1535 ..Default::default()
1536 }))
1537 .await;
1538
1539 let text = "
1540 fn a() { A }
1541 fn b() { BB }
1542 fn c() { CCC }
1543 "
1544 .unindent();
1545
1546 let fs = FakeFs::new(cx.executor());
1547 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1548
1549 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1550 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1551
1552 let buffer = project
1553 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1554 .await
1555 .unwrap();
1556
1557 let mut fake_server = fake_servers.next().await.unwrap();
1558 let open_notification = fake_server
1559 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1560 .await;
1561
1562 // Edit the buffer, moving the content down
1563 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1564 let change_notification_1 = fake_server
1565 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1566 .await;
1567 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1568
1569 // Report some diagnostics for the initial version of the buffer
1570 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1571 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1572 version: Some(open_notification.text_document.version),
1573 diagnostics: vec![
1574 lsp::Diagnostic {
1575 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1576 severity: Some(DiagnosticSeverity::ERROR),
1577 message: "undefined variable 'A'".to_string(),
1578 source: Some("disk".to_string()),
1579 ..Default::default()
1580 },
1581 lsp::Diagnostic {
1582 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1583 severity: Some(DiagnosticSeverity::ERROR),
1584 message: "undefined variable 'BB'".to_string(),
1585 source: Some("disk".to_string()),
1586 ..Default::default()
1587 },
1588 lsp::Diagnostic {
1589 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1590 severity: Some(DiagnosticSeverity::ERROR),
1591 source: Some("disk".to_string()),
1592 message: "undefined variable 'CCC'".to_string(),
1593 ..Default::default()
1594 },
1595 ],
1596 });
1597
1598 // The diagnostics have moved down since they were created.
1599 cx.executor().run_until_parked();
1600 buffer.update(cx, |buffer, _| {
1601 assert_eq!(
1602 buffer
1603 .snapshot()
1604 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1605 .collect::<Vec<_>>(),
1606 &[
1607 DiagnosticEntry {
1608 range: Point::new(3, 9)..Point::new(3, 11),
1609 diagnostic: Diagnostic {
1610 source: Some("disk".into()),
1611 severity: DiagnosticSeverity::ERROR,
1612 message: "undefined variable 'BB'".to_string(),
1613 is_disk_based: true,
1614 group_id: 1,
1615 is_primary: true,
1616 ..Default::default()
1617 },
1618 },
1619 DiagnosticEntry {
1620 range: Point::new(4, 9)..Point::new(4, 12),
1621 diagnostic: Diagnostic {
1622 source: Some("disk".into()),
1623 severity: DiagnosticSeverity::ERROR,
1624 message: "undefined variable 'CCC'".to_string(),
1625 is_disk_based: true,
1626 group_id: 2,
1627 is_primary: true,
1628 ..Default::default()
1629 }
1630 }
1631 ]
1632 );
1633 assert_eq!(
1634 chunks_with_diagnostics(buffer, 0..buffer.len()),
1635 [
1636 ("\n\nfn a() { ".to_string(), None),
1637 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1638 (" }\nfn b() { ".to_string(), None),
1639 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1640 (" }\nfn c() { ".to_string(), None),
1641 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1642 (" }\n".to_string(), None),
1643 ]
1644 );
1645 assert_eq!(
1646 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1647 [
1648 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1649 (" }\nfn c() { ".to_string(), None),
1650 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1651 ]
1652 );
1653 });
1654
1655 // Ensure overlapping diagnostics are highlighted correctly.
1656 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1657 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1658 version: Some(open_notification.text_document.version),
1659 diagnostics: vec![
1660 lsp::Diagnostic {
1661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1662 severity: Some(DiagnosticSeverity::ERROR),
1663 message: "undefined variable 'A'".to_string(),
1664 source: Some("disk".to_string()),
1665 ..Default::default()
1666 },
1667 lsp::Diagnostic {
1668 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1669 severity: Some(DiagnosticSeverity::WARNING),
1670 message: "unreachable statement".to_string(),
1671 source: Some("disk".to_string()),
1672 ..Default::default()
1673 },
1674 ],
1675 });
1676
1677 cx.executor().run_until_parked();
1678 buffer.update(cx, |buffer, _| {
1679 assert_eq!(
1680 buffer
1681 .snapshot()
1682 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1683 .collect::<Vec<_>>(),
1684 &[
1685 DiagnosticEntry {
1686 range: Point::new(2, 9)..Point::new(2, 12),
1687 diagnostic: Diagnostic {
1688 source: Some("disk".into()),
1689 severity: DiagnosticSeverity::WARNING,
1690 message: "unreachable statement".to_string(),
1691 is_disk_based: true,
1692 group_id: 4,
1693 is_primary: true,
1694 ..Default::default()
1695 }
1696 },
1697 DiagnosticEntry {
1698 range: Point::new(2, 9)..Point::new(2, 10),
1699 diagnostic: Diagnostic {
1700 source: Some("disk".into()),
1701 severity: DiagnosticSeverity::ERROR,
1702 message: "undefined variable 'A'".to_string(),
1703 is_disk_based: true,
1704 group_id: 3,
1705 is_primary: true,
1706 ..Default::default()
1707 },
1708 }
1709 ]
1710 );
1711 assert_eq!(
1712 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1713 [
1714 ("fn a() { ".to_string(), None),
1715 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1716 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1717 ("\n".to_string(), None),
1718 ]
1719 );
1720 assert_eq!(
1721 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1722 [
1723 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1724 ("\n".to_string(), None),
1725 ]
1726 );
1727 });
1728
1729 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1730 // changes since the last save.
1731 buffer.update(cx, |buffer, cx| {
1732 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1733 buffer.edit(
1734 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1735 None,
1736 cx,
1737 );
1738 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1739 });
1740 let change_notification_2 = fake_server
1741 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1742 .await;
1743 assert!(
1744 change_notification_2.text_document.version > change_notification_1.text_document.version
1745 );
1746
1747 // Handle out-of-order diagnostics
1748 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1749 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1750 version: Some(change_notification_2.text_document.version),
1751 diagnostics: vec![
1752 lsp::Diagnostic {
1753 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1754 severity: Some(DiagnosticSeverity::ERROR),
1755 message: "undefined variable 'BB'".to_string(),
1756 source: Some("disk".to_string()),
1757 ..Default::default()
1758 },
1759 lsp::Diagnostic {
1760 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1761 severity: Some(DiagnosticSeverity::WARNING),
1762 message: "undefined variable 'A'".to_string(),
1763 source: Some("disk".to_string()),
1764 ..Default::default()
1765 },
1766 ],
1767 });
1768
1769 cx.executor().run_until_parked();
1770 buffer.update(cx, |buffer, _| {
1771 assert_eq!(
1772 buffer
1773 .snapshot()
1774 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1775 .collect::<Vec<_>>(),
1776 &[
1777 DiagnosticEntry {
1778 range: Point::new(2, 21)..Point::new(2, 22),
1779 diagnostic: Diagnostic {
1780 source: Some("disk".into()),
1781 severity: DiagnosticSeverity::WARNING,
1782 message: "undefined variable 'A'".to_string(),
1783 is_disk_based: true,
1784 group_id: 6,
1785 is_primary: true,
1786 ..Default::default()
1787 }
1788 },
1789 DiagnosticEntry {
1790 range: Point::new(3, 9)..Point::new(3, 14),
1791 diagnostic: Diagnostic {
1792 source: Some("disk".into()),
1793 severity: DiagnosticSeverity::ERROR,
1794 message: "undefined variable 'BB'".to_string(),
1795 is_disk_based: true,
1796 group_id: 5,
1797 is_primary: true,
1798 ..Default::default()
1799 },
1800 }
1801 ]
1802 );
1803 });
1804}
1805
1806#[gpui::test]
1807async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1808 init_test(cx);
1809
1810 let text = concat!(
1811 "let one = ;\n", //
1812 "let two = \n",
1813 "let three = 3;\n",
1814 );
1815
1816 let fs = FakeFs::new(cx.executor());
1817 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1818
1819 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1820 let buffer = project
1821 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1822 .await
1823 .unwrap();
1824
1825 project.update(cx, |project, cx| {
1826 project
1827 .update_buffer_diagnostics(
1828 &buffer,
1829 LanguageServerId(0),
1830 None,
1831 vec![
1832 DiagnosticEntry {
1833 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1834 diagnostic: Diagnostic {
1835 severity: DiagnosticSeverity::ERROR,
1836 message: "syntax error 1".to_string(),
1837 ..Default::default()
1838 },
1839 },
1840 DiagnosticEntry {
1841 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1842 diagnostic: Diagnostic {
1843 severity: DiagnosticSeverity::ERROR,
1844 message: "syntax error 2".to_string(),
1845 ..Default::default()
1846 },
1847 },
1848 ],
1849 cx,
1850 )
1851 .unwrap();
1852 });
1853
1854 // An empty range is extended forward to include the following character.
1855 // At the end of a line, an empty range is extended backward to include
1856 // the preceding character.
1857 buffer.update(cx, |buffer, _| {
1858 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1859 assert_eq!(
1860 chunks
1861 .iter()
1862 .map(|(s, d)| (s.as_str(), *d))
1863 .collect::<Vec<_>>(),
1864 &[
1865 ("let one = ", None),
1866 (";", Some(DiagnosticSeverity::ERROR)),
1867 ("\nlet two =", None),
1868 (" ", Some(DiagnosticSeverity::ERROR)),
1869 ("\nlet three = 3;\n", None)
1870 ]
1871 );
1872 });
1873}
1874
1875#[gpui::test]
1876async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1877 init_test(cx);
1878
1879 let fs = FakeFs::new(cx.executor());
1880 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1881 .await;
1882
1883 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1884
1885 project.update(cx, |project, cx| {
1886 project
1887 .update_diagnostic_entries(
1888 LanguageServerId(0),
1889 Path::new("/dir/a.rs").to_owned(),
1890 None,
1891 vec![DiagnosticEntry {
1892 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1893 diagnostic: Diagnostic {
1894 severity: DiagnosticSeverity::ERROR,
1895 is_primary: true,
1896 message: "syntax error a1".to_string(),
1897 ..Default::default()
1898 },
1899 }],
1900 cx,
1901 )
1902 .unwrap();
1903 project
1904 .update_diagnostic_entries(
1905 LanguageServerId(1),
1906 Path::new("/dir/a.rs").to_owned(),
1907 None,
1908 vec![DiagnosticEntry {
1909 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1910 diagnostic: Diagnostic {
1911 severity: DiagnosticSeverity::ERROR,
1912 is_primary: true,
1913 message: "syntax error b1".to_string(),
1914 ..Default::default()
1915 },
1916 }],
1917 cx,
1918 )
1919 .unwrap();
1920
1921 assert_eq!(
1922 project.diagnostic_summary(false, cx),
1923 DiagnosticSummary {
1924 error_count: 2,
1925 warning_count: 0,
1926 }
1927 );
1928 });
1929}
1930
1931#[gpui::test]
1932async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1933 init_test(cx);
1934
1935 let mut language = Language::new(
1936 LanguageConfig {
1937 name: "Rust".into(),
1938 matcher: LanguageMatcher {
1939 path_suffixes: vec!["rs".to_string()],
1940 ..Default::default()
1941 },
1942 ..Default::default()
1943 },
1944 Some(tree_sitter_rust::language()),
1945 );
1946 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1947
1948 let text = "
1949 fn a() {
1950 f1();
1951 }
1952 fn b() {
1953 f2();
1954 }
1955 fn c() {
1956 f3();
1957 }
1958 "
1959 .unindent();
1960
1961 let fs = FakeFs::new(cx.executor());
1962 fs.insert_tree(
1963 "/dir",
1964 json!({
1965 "a.rs": text.clone(),
1966 }),
1967 )
1968 .await;
1969
1970 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1971 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1972 let buffer = project
1973 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1974 .await
1975 .unwrap();
1976
1977 let mut fake_server = fake_servers.next().await.unwrap();
1978 let lsp_document_version = fake_server
1979 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1980 .await
1981 .text_document
1982 .version;
1983
1984 // Simulate editing the buffer after the language server computes some edits.
1985 buffer.update(cx, |buffer, cx| {
1986 buffer.edit(
1987 [(
1988 Point::new(0, 0)..Point::new(0, 0),
1989 "// above first function\n",
1990 )],
1991 None,
1992 cx,
1993 );
1994 buffer.edit(
1995 [(
1996 Point::new(2, 0)..Point::new(2, 0),
1997 " // inside first function\n",
1998 )],
1999 None,
2000 cx,
2001 );
2002 buffer.edit(
2003 [(
2004 Point::new(6, 4)..Point::new(6, 4),
2005 "// inside second function ",
2006 )],
2007 None,
2008 cx,
2009 );
2010
2011 assert_eq!(
2012 buffer.text(),
2013 "
2014 // above first function
2015 fn a() {
2016 // inside first function
2017 f1();
2018 }
2019 fn b() {
2020 // inside second function f2();
2021 }
2022 fn c() {
2023 f3();
2024 }
2025 "
2026 .unindent()
2027 );
2028 });
2029
2030 let edits = project
2031 .update(cx, |project, cx| {
2032 project.edits_from_lsp(
2033 &buffer,
2034 vec![
2035 // replace body of first function
2036 lsp::TextEdit {
2037 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2038 new_text: "
2039 fn a() {
2040 f10();
2041 }
2042 "
2043 .unindent(),
2044 },
2045 // edit inside second function
2046 lsp::TextEdit {
2047 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2048 new_text: "00".into(),
2049 },
2050 // edit inside third function via two distinct edits
2051 lsp::TextEdit {
2052 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2053 new_text: "4000".into(),
2054 },
2055 lsp::TextEdit {
2056 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2057 new_text: "".into(),
2058 },
2059 ],
2060 LanguageServerId(0),
2061 Some(lsp_document_version),
2062 cx,
2063 )
2064 })
2065 .await
2066 .unwrap();
2067
2068 buffer.update(cx, |buffer, cx| {
2069 for (range, new_text) in edits {
2070 buffer.edit([(range, new_text)], None, cx);
2071 }
2072 assert_eq!(
2073 buffer.text(),
2074 "
2075 // above first function
2076 fn a() {
2077 // inside first function
2078 f10();
2079 }
2080 fn b() {
2081 // inside second function f200();
2082 }
2083 fn c() {
2084 f4000();
2085 }
2086 "
2087 .unindent()
2088 );
2089 });
2090}
2091
2092#[gpui::test]
2093async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2094 init_test(cx);
2095
2096 let text = "
2097 use a::b;
2098 use a::c;
2099
2100 fn f() {
2101 b();
2102 c();
2103 }
2104 "
2105 .unindent();
2106
2107 let fs = FakeFs::new(cx.executor());
2108 fs.insert_tree(
2109 "/dir",
2110 json!({
2111 "a.rs": text.clone(),
2112 }),
2113 )
2114 .await;
2115
2116 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2117 let buffer = project
2118 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2119 .await
2120 .unwrap();
2121
2122 // Simulate the language server sending us a small edit in the form of a very large diff.
2123 // Rust-analyzer does this when performing a merge-imports code action.
2124 let edits = project
2125 .update(cx, |project, cx| {
2126 project.edits_from_lsp(
2127 &buffer,
2128 [
2129 // Replace the first use statement without editing the semicolon.
2130 lsp::TextEdit {
2131 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2132 new_text: "a::{b, c}".into(),
2133 },
2134 // Reinsert the remainder of the file between the semicolon and the final
2135 // newline of the file.
2136 lsp::TextEdit {
2137 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2138 new_text: "\n\n".into(),
2139 },
2140 lsp::TextEdit {
2141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2142 new_text: "
2143 fn f() {
2144 b();
2145 c();
2146 }"
2147 .unindent(),
2148 },
2149 // Delete everything after the first newline of the file.
2150 lsp::TextEdit {
2151 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2152 new_text: "".into(),
2153 },
2154 ],
2155 LanguageServerId(0),
2156 None,
2157 cx,
2158 )
2159 })
2160 .await
2161 .unwrap();
2162
2163 buffer.update(cx, |buffer, cx| {
2164 let edits = edits
2165 .into_iter()
2166 .map(|(range, text)| {
2167 (
2168 range.start.to_point(buffer)..range.end.to_point(buffer),
2169 text,
2170 )
2171 })
2172 .collect::<Vec<_>>();
2173
2174 assert_eq!(
2175 edits,
2176 [
2177 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2178 (Point::new(1, 0)..Point::new(2, 0), "".into())
2179 ]
2180 );
2181
2182 for (range, new_text) in edits {
2183 buffer.edit([(range, new_text)], None, cx);
2184 }
2185 assert_eq!(
2186 buffer.text(),
2187 "
2188 use a::{b, c};
2189
2190 fn f() {
2191 b();
2192 c();
2193 }
2194 "
2195 .unindent()
2196 );
2197 });
2198}
2199
2200#[gpui::test]
2201async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2202 init_test(cx);
2203
2204 let text = "
2205 use a::b;
2206 use a::c;
2207
2208 fn f() {
2209 b();
2210 c();
2211 }
2212 "
2213 .unindent();
2214
2215 let fs = FakeFs::new(cx.executor());
2216 fs.insert_tree(
2217 "/dir",
2218 json!({
2219 "a.rs": text.clone(),
2220 }),
2221 )
2222 .await;
2223
2224 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2225 let buffer = project
2226 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2227 .await
2228 .unwrap();
2229
2230 // Simulate the language server sending us edits in a non-ordered fashion,
2231 // with ranges sometimes being inverted or pointing to invalid locations.
2232 let edits = project
2233 .update(cx, |project, cx| {
2234 project.edits_from_lsp(
2235 &buffer,
2236 [
2237 lsp::TextEdit {
2238 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2239 new_text: "\n\n".into(),
2240 },
2241 lsp::TextEdit {
2242 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2243 new_text: "a::{b, c}".into(),
2244 },
2245 lsp::TextEdit {
2246 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2247 new_text: "".into(),
2248 },
2249 lsp::TextEdit {
2250 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2251 new_text: "
2252 fn f() {
2253 b();
2254 c();
2255 }"
2256 .unindent(),
2257 },
2258 ],
2259 LanguageServerId(0),
2260 None,
2261 cx,
2262 )
2263 })
2264 .await
2265 .unwrap();
2266
2267 buffer.update(cx, |buffer, cx| {
2268 let edits = edits
2269 .into_iter()
2270 .map(|(range, text)| {
2271 (
2272 range.start.to_point(buffer)..range.end.to_point(buffer),
2273 text,
2274 )
2275 })
2276 .collect::<Vec<_>>();
2277
2278 assert_eq!(
2279 edits,
2280 [
2281 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2282 (Point::new(1, 0)..Point::new(2, 0), "".into())
2283 ]
2284 );
2285
2286 for (range, new_text) in edits {
2287 buffer.edit([(range, new_text)], None, cx);
2288 }
2289 assert_eq!(
2290 buffer.text(),
2291 "
2292 use a::{b, c};
2293
2294 fn f() {
2295 b();
2296 c();
2297 }
2298 "
2299 .unindent()
2300 );
2301 });
2302}
2303
2304fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2305 buffer: &Buffer,
2306 range: Range<T>,
2307) -> Vec<(String, Option<DiagnosticSeverity>)> {
2308 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2309 for chunk in buffer.snapshot().chunks(range, true) {
2310 if chunks.last().map_or(false, |prev_chunk| {
2311 prev_chunk.1 == chunk.diagnostic_severity
2312 }) {
2313 chunks.last_mut().unwrap().0.push_str(chunk.text);
2314 } else {
2315 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2316 }
2317 }
2318 chunks
2319}
2320
2321#[gpui::test(iterations = 10)]
2322async fn test_definition(cx: &mut gpui::TestAppContext) {
2323 init_test(cx);
2324
2325 let mut language = Language::new(
2326 LanguageConfig {
2327 name: "Rust".into(),
2328 matcher: LanguageMatcher {
2329 path_suffixes: vec!["rs".to_string()],
2330 ..Default::default()
2331 },
2332 ..Default::default()
2333 },
2334 Some(tree_sitter_rust::language()),
2335 );
2336 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2337
2338 let fs = FakeFs::new(cx.executor());
2339 fs.insert_tree(
2340 "/dir",
2341 json!({
2342 "a.rs": "const fn a() { A }",
2343 "b.rs": "const y: i32 = crate::a()",
2344 }),
2345 )
2346 .await;
2347
2348 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2349 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2350
2351 let buffer = project
2352 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2353 .await
2354 .unwrap();
2355
2356 let fake_server = fake_servers.next().await.unwrap();
2357 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2358 let params = params.text_document_position_params;
2359 assert_eq!(
2360 params.text_document.uri.to_file_path().unwrap(),
2361 Path::new("/dir/b.rs"),
2362 );
2363 assert_eq!(params.position, lsp::Position::new(0, 22));
2364
2365 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2366 lsp::Location::new(
2367 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2368 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2369 ),
2370 )))
2371 });
2372
2373 let mut definitions = project
2374 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2375 .await
2376 .unwrap();
2377
2378 // Assert no new language server started
2379 cx.executor().run_until_parked();
2380 assert!(fake_servers.try_next().is_err());
2381
2382 assert_eq!(definitions.len(), 1);
2383 let definition = definitions.pop().unwrap();
2384 cx.update(|cx| {
2385 let target_buffer = definition.target.buffer.read(cx);
2386 assert_eq!(
2387 target_buffer
2388 .file()
2389 .unwrap()
2390 .as_local()
2391 .unwrap()
2392 .abs_path(cx),
2393 Path::new("/dir/a.rs"),
2394 );
2395 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2396 assert_eq!(
2397 list_worktrees(&project, cx),
2398 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2399 );
2400
2401 drop(definition);
2402 });
2403 cx.update(|cx| {
2404 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2405 });
2406
2407 fn list_worktrees<'a>(
2408 project: &'a Model<Project>,
2409 cx: &'a AppContext,
2410 ) -> Vec<(&'a Path, bool)> {
2411 project
2412 .read(cx)
2413 .worktrees()
2414 .map(|worktree| {
2415 let worktree = worktree.read(cx);
2416 (
2417 worktree.as_local().unwrap().abs_path().as_ref(),
2418 worktree.is_visible(),
2419 )
2420 })
2421 .collect::<Vec<_>>()
2422 }
2423}
2424
2425#[gpui::test]
2426async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2427 init_test(cx);
2428
2429 let mut language = Language::new(
2430 LanguageConfig {
2431 name: "TypeScript".into(),
2432 matcher: LanguageMatcher {
2433 path_suffixes: vec!["ts".to_string()],
2434 ..Default::default()
2435 },
2436 ..Default::default()
2437 },
2438 Some(tree_sitter_typescript::language_typescript()),
2439 );
2440 let mut fake_language_servers = language
2441 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2442 capabilities: lsp::ServerCapabilities {
2443 completion_provider: Some(lsp::CompletionOptions {
2444 trigger_characters: Some(vec![":".to_string()]),
2445 ..Default::default()
2446 }),
2447 ..Default::default()
2448 },
2449 ..Default::default()
2450 }))
2451 .await;
2452
2453 let fs = FakeFs::new(cx.executor());
2454 fs.insert_tree(
2455 "/dir",
2456 json!({
2457 "a.ts": "",
2458 }),
2459 )
2460 .await;
2461
2462 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2463 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2464 let buffer = project
2465 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2466 .await
2467 .unwrap();
2468
2469 let fake_server = fake_language_servers.next().await.unwrap();
2470
2471 let text = "let a = b.fqn";
2472 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2473 let completions = project.update(cx, |project, cx| {
2474 project.completions(&buffer, text.len(), cx)
2475 });
2476
2477 fake_server
2478 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2479 Ok(Some(lsp::CompletionResponse::Array(vec![
2480 lsp::CompletionItem {
2481 label: "fullyQualifiedName?".into(),
2482 insert_text: Some("fullyQualifiedName".into()),
2483 ..Default::default()
2484 },
2485 ])))
2486 })
2487 .next()
2488 .await;
2489 let completions = completions.await.unwrap();
2490 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2491 assert_eq!(completions.len(), 1);
2492 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2493 assert_eq!(
2494 completions[0].old_range.to_offset(&snapshot),
2495 text.len() - 3..text.len()
2496 );
2497
2498 let text = "let a = \"atoms/cmp\"";
2499 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2500 let completions = project.update(cx, |project, cx| {
2501 project.completions(&buffer, text.len() - 1, cx)
2502 });
2503
2504 fake_server
2505 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2506 Ok(Some(lsp::CompletionResponse::Array(vec![
2507 lsp::CompletionItem {
2508 label: "component".into(),
2509 ..Default::default()
2510 },
2511 ])))
2512 })
2513 .next()
2514 .await;
2515 let completions = completions.await.unwrap();
2516 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2517 assert_eq!(completions.len(), 1);
2518 assert_eq!(completions[0].new_text, "component");
2519 assert_eq!(
2520 completions[0].old_range.to_offset(&snapshot),
2521 text.len() - 4..text.len() - 1
2522 );
2523}
2524
2525#[gpui::test]
2526async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2527 init_test(cx);
2528
2529 let mut language = Language::new(
2530 LanguageConfig {
2531 name: "TypeScript".into(),
2532 matcher: LanguageMatcher {
2533 path_suffixes: vec!["ts".to_string()],
2534 ..Default::default()
2535 },
2536 ..Default::default()
2537 },
2538 Some(tree_sitter_typescript::language_typescript()),
2539 );
2540 let mut fake_language_servers = language
2541 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2542 capabilities: lsp::ServerCapabilities {
2543 completion_provider: Some(lsp::CompletionOptions {
2544 trigger_characters: Some(vec![":".to_string()]),
2545 ..Default::default()
2546 }),
2547 ..Default::default()
2548 },
2549 ..Default::default()
2550 }))
2551 .await;
2552
2553 let fs = FakeFs::new(cx.executor());
2554 fs.insert_tree(
2555 "/dir",
2556 json!({
2557 "a.ts": "",
2558 }),
2559 )
2560 .await;
2561
2562 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2563 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2564 let buffer = project
2565 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2566 .await
2567 .unwrap();
2568
2569 let fake_server = fake_language_servers.next().await.unwrap();
2570
2571 let text = "let a = b.fqn";
2572 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2573 let completions = project.update(cx, |project, cx| {
2574 project.completions(&buffer, text.len(), cx)
2575 });
2576
2577 fake_server
2578 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2579 Ok(Some(lsp::CompletionResponse::Array(vec![
2580 lsp::CompletionItem {
2581 label: "fullyQualifiedName?".into(),
2582 insert_text: Some("fully\rQualified\r\nName".into()),
2583 ..Default::default()
2584 },
2585 ])))
2586 })
2587 .next()
2588 .await;
2589 let completions = completions.await.unwrap();
2590 assert_eq!(completions.len(), 1);
2591 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2592}
2593
2594#[gpui::test(iterations = 10)]
2595async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2596 init_test(cx);
2597
2598 let mut language = Language::new(
2599 LanguageConfig {
2600 name: "TypeScript".into(),
2601 matcher: LanguageMatcher {
2602 path_suffixes: vec!["ts".to_string()],
2603 ..Default::default()
2604 },
2605 ..Default::default()
2606 },
2607 None,
2608 );
2609 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2610
2611 let fs = FakeFs::new(cx.executor());
2612 fs.insert_tree(
2613 "/dir",
2614 json!({
2615 "a.ts": "a",
2616 }),
2617 )
2618 .await;
2619
2620 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2621 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2622 let buffer = project
2623 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2624 .await
2625 .unwrap();
2626
2627 let fake_server = fake_language_servers.next().await.unwrap();
2628
2629 // Language server returns code actions that contain commands, and not edits.
2630 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2631 fake_server
2632 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2633 Ok(Some(vec![
2634 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2635 title: "The code action".into(),
2636 command: Some(lsp::Command {
2637 title: "The command".into(),
2638 command: "_the/command".into(),
2639 arguments: Some(vec![json!("the-argument")]),
2640 }),
2641 ..Default::default()
2642 }),
2643 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2644 title: "two".into(),
2645 ..Default::default()
2646 }),
2647 ]))
2648 })
2649 .next()
2650 .await;
2651
2652 let action = actions.await.unwrap()[0].clone();
2653 let apply = project.update(cx, |project, cx| {
2654 project.apply_code_action(buffer.clone(), action, true, cx)
2655 });
2656
2657 // Resolving the code action does not populate its edits. In absence of
2658 // edits, we must execute the given command.
2659 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2660 |action, _| async move { Ok(action) },
2661 );
2662
2663 // While executing the command, the language server sends the editor
2664 // a `workspaceEdit` request.
2665 fake_server
2666 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2667 let fake = fake_server.clone();
2668 move |params, _| {
2669 assert_eq!(params.command, "_the/command");
2670 let fake = fake.clone();
2671 async move {
2672 fake.server
2673 .request::<lsp::request::ApplyWorkspaceEdit>(
2674 lsp::ApplyWorkspaceEditParams {
2675 label: None,
2676 edit: lsp::WorkspaceEdit {
2677 changes: Some(
2678 [(
2679 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2680 vec![lsp::TextEdit {
2681 range: lsp::Range::new(
2682 lsp::Position::new(0, 0),
2683 lsp::Position::new(0, 0),
2684 ),
2685 new_text: "X".into(),
2686 }],
2687 )]
2688 .into_iter()
2689 .collect(),
2690 ),
2691 ..Default::default()
2692 },
2693 },
2694 )
2695 .await
2696 .unwrap();
2697 Ok(Some(json!(null)))
2698 }
2699 }
2700 })
2701 .next()
2702 .await;
2703
2704 // Applying the code action returns a project transaction containing the edits
2705 // sent by the language server in its `workspaceEdit` request.
2706 let transaction = apply.await.unwrap();
2707 assert!(transaction.0.contains_key(&buffer));
2708 buffer.update(cx, |buffer, cx| {
2709 assert_eq!(buffer.text(), "Xa");
2710 buffer.undo(cx);
2711 assert_eq!(buffer.text(), "a");
2712 });
2713}
2714
2715#[gpui::test(iterations = 10)]
2716async fn test_save_file(cx: &mut gpui::TestAppContext) {
2717 init_test(cx);
2718
2719 let fs = FakeFs::new(cx.executor());
2720 fs.insert_tree(
2721 "/dir",
2722 json!({
2723 "file1": "the old contents",
2724 }),
2725 )
2726 .await;
2727
2728 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2729 let buffer = project
2730 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2731 .await
2732 .unwrap();
2733 buffer.update(cx, |buffer, cx| {
2734 assert_eq!(buffer.text(), "the old contents");
2735 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2736 });
2737
2738 project
2739 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2740 .await
2741 .unwrap();
2742
2743 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2744 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2745}
2746
2747#[gpui::test(iterations = 30)]
2748async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2749 init_test(cx);
2750
2751 let fs = FakeFs::new(cx.executor().clone());
2752 fs.insert_tree(
2753 "/dir",
2754 json!({
2755 "file1": "the original contents",
2756 }),
2757 )
2758 .await;
2759
2760 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2761 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2762 let buffer = project
2763 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2764 .await
2765 .unwrap();
2766
2767 // Simulate buffer diffs being slow, so that they don't complete before
2768 // the next file change occurs.
2769 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2770
2771 // Change the buffer's file on disk, and then wait for the file change
2772 // to be detected by the worktree, so that the buffer starts reloading.
2773 fs.save(
2774 "/dir/file1".as_ref(),
2775 &"the first contents".into(),
2776 Default::default(),
2777 )
2778 .await
2779 .unwrap();
2780 worktree.next_event(cx);
2781
2782 // Change the buffer's file again. Depending on the random seed, the
2783 // previous file change may still be in progress.
2784 fs.save(
2785 "/dir/file1".as_ref(),
2786 &"the second contents".into(),
2787 Default::default(),
2788 )
2789 .await
2790 .unwrap();
2791 worktree.next_event(cx);
2792
2793 cx.executor().run_until_parked();
2794 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2795 buffer.read_with(cx, |buffer, _| {
2796 assert_eq!(buffer.text(), on_disk_text);
2797 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2798 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2799 });
2800}
2801
2802#[gpui::test(iterations = 30)]
2803async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2804 init_test(cx);
2805
2806 let fs = FakeFs::new(cx.executor().clone());
2807 fs.insert_tree(
2808 "/dir",
2809 json!({
2810 "file1": "the original contents",
2811 }),
2812 )
2813 .await;
2814
2815 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2816 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2817 let buffer = project
2818 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2819 .await
2820 .unwrap();
2821
2822 // Simulate buffer diffs being slow, so that they don't complete before
2823 // the next file change occurs.
2824 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2825
2826 // Change the buffer's file on disk, and then wait for the file change
2827 // to be detected by the worktree, so that the buffer starts reloading.
2828 fs.save(
2829 "/dir/file1".as_ref(),
2830 &"the first contents".into(),
2831 Default::default(),
2832 )
2833 .await
2834 .unwrap();
2835 worktree.next_event(cx);
2836
2837 cx.executor()
2838 .spawn(cx.executor().simulate_random_delay())
2839 .await;
2840
2841 // Perform a noop edit, causing the buffer's version to increase.
2842 buffer.update(cx, |buffer, cx| {
2843 buffer.edit([(0..0, " ")], None, cx);
2844 buffer.undo(cx);
2845 });
2846
2847 cx.executor().run_until_parked();
2848 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2849 buffer.read_with(cx, |buffer, _| {
2850 let buffer_text = buffer.text();
2851 if buffer_text == on_disk_text {
2852 assert!(
2853 !buffer.is_dirty() && !buffer.has_conflict(),
2854 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2855 );
2856 }
2857 // If the file change occurred while the buffer was processing the first
2858 // change, the buffer will be in a conflicting state.
2859 else {
2860 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2861 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2862 }
2863 });
2864}
2865
2866#[gpui::test]
2867async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2868 init_test(cx);
2869
2870 let fs = FakeFs::new(cx.executor());
2871 fs.insert_tree(
2872 "/dir",
2873 json!({
2874 "file1": "the old contents",
2875 }),
2876 )
2877 .await;
2878
2879 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2880 let buffer = project
2881 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2882 .await
2883 .unwrap();
2884 buffer.update(cx, |buffer, cx| {
2885 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2886 });
2887
2888 project
2889 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2890 .await
2891 .unwrap();
2892
2893 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2894 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2895}
2896
2897#[gpui::test]
2898async fn test_save_as(cx: &mut gpui::TestAppContext) {
2899 init_test(cx);
2900
2901 let fs = FakeFs::new(cx.executor());
2902 fs.insert_tree("/dir", json!({})).await;
2903
2904 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2905
2906 let languages = project.update(cx, |project, _| project.languages().clone());
2907 languages.register_native_grammars([("rust", tree_sitter_rust::language())]);
2908 languages.register_test_language(LanguageConfig {
2909 name: "Rust".into(),
2910 grammar: Some("rust".into()),
2911 matcher: LanguageMatcher {
2912 path_suffixes: vec!["rs".into()],
2913 ..Default::default()
2914 },
2915 ..Default::default()
2916 });
2917
2918 let buffer = project.update(cx, |project, cx| {
2919 project.create_buffer("", None, cx).unwrap()
2920 });
2921 buffer.update(cx, |buffer, cx| {
2922 buffer.edit([(0..0, "abc")], None, cx);
2923 assert!(buffer.is_dirty());
2924 assert!(!buffer.has_conflict());
2925 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2926 });
2927 project
2928 .update(cx, |project, cx| {
2929 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2930 })
2931 .await
2932 .unwrap();
2933 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2934
2935 cx.executor().run_until_parked();
2936 buffer.update(cx, |buffer, cx| {
2937 assert_eq!(
2938 buffer.file().unwrap().full_path(cx),
2939 Path::new("dir/file1.rs")
2940 );
2941 assert!(!buffer.is_dirty());
2942 assert!(!buffer.has_conflict());
2943 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2944 });
2945
2946 let opened_buffer = project
2947 .update(cx, |project, cx| {
2948 project.open_local_buffer("/dir/file1.rs", cx)
2949 })
2950 .await
2951 .unwrap();
2952 assert_eq!(opened_buffer, buffer);
2953}
2954
2955#[gpui::test(retries = 5)]
2956async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2957 init_test(cx);
2958 cx.executor().allow_parking();
2959
2960 let dir = temp_tree(json!({
2961 "a": {
2962 "file1": "",
2963 "file2": "",
2964 "file3": "",
2965 },
2966 "b": {
2967 "c": {
2968 "file4": "",
2969 "file5": "",
2970 }
2971 }
2972 }));
2973
2974 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2975 let rpc = project.update(cx, |p, _| p.client.clone());
2976
2977 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2978 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2979 async move { buffer.await.unwrap() }
2980 };
2981 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2982 project.update(cx, |project, cx| {
2983 let tree = project.worktrees().next().unwrap();
2984 tree.read(cx)
2985 .entry_for_path(path)
2986 .unwrap_or_else(|| panic!("no entry for path {}", path))
2987 .id
2988 })
2989 };
2990
2991 let buffer2 = buffer_for_path("a/file2", cx).await;
2992 let buffer3 = buffer_for_path("a/file3", cx).await;
2993 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2994 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2995
2996 let file2_id = id_for_path("a/file2", cx);
2997 let file3_id = id_for_path("a/file3", cx);
2998 let file4_id = id_for_path("b/c/file4", cx);
2999
3000 // Create a remote copy of this worktree.
3001 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3002
3003 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
3004
3005 let updates = Arc::new(Mutex::new(Vec::new()));
3006 tree.update(cx, |tree, cx| {
3007 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
3008 let updates = updates.clone();
3009 move |update| {
3010 updates.lock().push(update);
3011 async { true }
3012 }
3013 });
3014 });
3015
3016 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
3017
3018 cx.executor().run_until_parked();
3019
3020 cx.update(|cx| {
3021 assert!(!buffer2.read(cx).is_dirty());
3022 assert!(!buffer3.read(cx).is_dirty());
3023 assert!(!buffer4.read(cx).is_dirty());
3024 assert!(!buffer5.read(cx).is_dirty());
3025 });
3026
3027 // Rename and delete files and directories.
3028 tree.flush_fs_events(cx).await;
3029 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3030 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3031 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3032 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3033 tree.flush_fs_events(cx).await;
3034
3035 let expected_paths = vec![
3036 "a",
3037 "a/file1",
3038 "a/file2.new",
3039 "b",
3040 "d",
3041 "d/file3",
3042 "d/file4",
3043 ];
3044
3045 cx.update(|app| {
3046 assert_eq!(
3047 tree.read(app)
3048 .paths()
3049 .map(|p| p.to_str().unwrap())
3050 .collect::<Vec<_>>(),
3051 expected_paths
3052 );
3053 });
3054
3055 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3056 assert_eq!(id_for_path("d/file3", cx), file3_id);
3057 assert_eq!(id_for_path("d/file4", cx), file4_id);
3058
3059 cx.update(|cx| {
3060 assert_eq!(
3061 buffer2.read(cx).file().unwrap().path().as_ref(),
3062 Path::new("a/file2.new")
3063 );
3064 assert_eq!(
3065 buffer3.read(cx).file().unwrap().path().as_ref(),
3066 Path::new("d/file3")
3067 );
3068 assert_eq!(
3069 buffer4.read(cx).file().unwrap().path().as_ref(),
3070 Path::new("d/file4")
3071 );
3072 assert_eq!(
3073 buffer5.read(cx).file().unwrap().path().as_ref(),
3074 Path::new("b/c/file5")
3075 );
3076
3077 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3078 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3079 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3080 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3081 });
3082
3083 // Update the remote worktree. Check that it becomes consistent with the
3084 // local worktree.
3085 cx.executor().run_until_parked();
3086
3087 remote.update(cx, |remote, _| {
3088 for update in updates.lock().drain(..) {
3089 remote.as_remote_mut().unwrap().update_from_remote(update);
3090 }
3091 });
3092 cx.executor().run_until_parked();
3093 remote.update(cx, |remote, _| {
3094 assert_eq!(
3095 remote
3096 .paths()
3097 .map(|p| p.to_str().unwrap())
3098 .collect::<Vec<_>>(),
3099 expected_paths
3100 );
3101 });
3102}
3103
3104#[gpui::test(iterations = 10)]
3105async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3106 init_test(cx);
3107
3108 let fs = FakeFs::new(cx.executor());
3109 fs.insert_tree(
3110 "/dir",
3111 json!({
3112 "a": {
3113 "file1": "",
3114 }
3115 }),
3116 )
3117 .await;
3118
3119 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3120 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3121 let tree_id = tree.update(cx, |tree, _| tree.id());
3122
3123 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3124 project.update(cx, |project, cx| {
3125 let tree = project.worktrees().next().unwrap();
3126 tree.read(cx)
3127 .entry_for_path(path)
3128 .unwrap_or_else(|| panic!("no entry for path {}", path))
3129 .id
3130 })
3131 };
3132
3133 let dir_id = id_for_path("a", cx);
3134 let file_id = id_for_path("a/file1", cx);
3135 let buffer = project
3136 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3137 .await
3138 .unwrap();
3139 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3140
3141 project
3142 .update(cx, |project, cx| {
3143 project.rename_entry(dir_id, Path::new("b"), cx)
3144 })
3145 .unwrap()
3146 .await
3147 .unwrap();
3148 cx.executor().run_until_parked();
3149
3150 assert_eq!(id_for_path("b", cx), dir_id);
3151 assert_eq!(id_for_path("b/file1", cx), file_id);
3152 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3153}
3154
3155#[gpui::test]
3156async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3157 init_test(cx);
3158
3159 let fs = FakeFs::new(cx.executor());
3160 fs.insert_tree(
3161 "/dir",
3162 json!({
3163 "a.txt": "a-contents",
3164 "b.txt": "b-contents",
3165 }),
3166 )
3167 .await;
3168
3169 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3170
3171 // Spawn multiple tasks to open paths, repeating some paths.
3172 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3173 (
3174 p.open_local_buffer("/dir/a.txt", cx),
3175 p.open_local_buffer("/dir/b.txt", cx),
3176 p.open_local_buffer("/dir/a.txt", cx),
3177 )
3178 });
3179
3180 let buffer_a_1 = buffer_a_1.await.unwrap();
3181 let buffer_a_2 = buffer_a_2.await.unwrap();
3182 let buffer_b = buffer_b.await.unwrap();
3183 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3184 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3185
3186 // There is only one buffer per path.
3187 let buffer_a_id = buffer_a_1.entity_id();
3188 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3189
3190 // Open the same path again while it is still open.
3191 drop(buffer_a_1);
3192 let buffer_a_3 = project
3193 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3194 .await
3195 .unwrap();
3196
3197 // There's still only one buffer per path.
3198 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3199}
3200
3201#[gpui::test]
3202async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3203 init_test(cx);
3204
3205 let fs = FakeFs::new(cx.executor());
3206 fs.insert_tree(
3207 "/dir",
3208 json!({
3209 "file1": "abc",
3210 "file2": "def",
3211 "file3": "ghi",
3212 }),
3213 )
3214 .await;
3215
3216 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3217
3218 let buffer1 = project
3219 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3220 .await
3221 .unwrap();
3222 let events = Arc::new(Mutex::new(Vec::new()));
3223
3224 // initially, the buffer isn't dirty.
3225 buffer1.update(cx, |buffer, cx| {
3226 cx.subscribe(&buffer1, {
3227 let events = events.clone();
3228 move |_, _, event, _| match event {
3229 BufferEvent::Operation(_) => {}
3230 _ => events.lock().push(event.clone()),
3231 }
3232 })
3233 .detach();
3234
3235 assert!(!buffer.is_dirty());
3236 assert!(events.lock().is_empty());
3237
3238 buffer.edit([(1..2, "")], None, cx);
3239 });
3240
3241 // after the first edit, the buffer is dirty, and emits a dirtied event.
3242 buffer1.update(cx, |buffer, cx| {
3243 assert!(buffer.text() == "ac");
3244 assert!(buffer.is_dirty());
3245 assert_eq!(
3246 *events.lock(),
3247 &[language::Event::Edited, language::Event::DirtyChanged]
3248 );
3249 events.lock().clear();
3250 buffer.did_save(
3251 buffer.version(),
3252 buffer.as_rope().fingerprint(),
3253 buffer.file().unwrap().mtime(),
3254 cx,
3255 );
3256 });
3257
3258 // after saving, the buffer is not dirty, and emits a saved event.
3259 buffer1.update(cx, |buffer, cx| {
3260 assert!(!buffer.is_dirty());
3261 assert_eq!(*events.lock(), &[language::Event::Saved]);
3262 events.lock().clear();
3263
3264 buffer.edit([(1..1, "B")], None, cx);
3265 buffer.edit([(2..2, "D")], None, cx);
3266 });
3267
3268 // after editing again, the buffer is dirty, and emits another dirty event.
3269 buffer1.update(cx, |buffer, cx| {
3270 assert!(buffer.text() == "aBDc");
3271 assert!(buffer.is_dirty());
3272 assert_eq!(
3273 *events.lock(),
3274 &[
3275 language::Event::Edited,
3276 language::Event::DirtyChanged,
3277 language::Event::Edited,
3278 ],
3279 );
3280 events.lock().clear();
3281
3282 // After restoring the buffer to its previously-saved state,
3283 // the buffer is not considered dirty anymore.
3284 buffer.edit([(1..3, "")], None, cx);
3285 assert!(buffer.text() == "ac");
3286 assert!(!buffer.is_dirty());
3287 });
3288
3289 assert_eq!(
3290 *events.lock(),
3291 &[language::Event::Edited, language::Event::DirtyChanged]
3292 );
3293
3294 // When a file is deleted, the buffer is considered dirty.
3295 let events = Arc::new(Mutex::new(Vec::new()));
3296 let buffer2 = project
3297 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3298 .await
3299 .unwrap();
3300 buffer2.update(cx, |_, cx| {
3301 cx.subscribe(&buffer2, {
3302 let events = events.clone();
3303 move |_, _, event, _| events.lock().push(event.clone())
3304 })
3305 .detach();
3306 });
3307
3308 fs.remove_file("/dir/file2".as_ref(), Default::default())
3309 .await
3310 .unwrap();
3311 cx.executor().run_until_parked();
3312 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3313 assert_eq!(
3314 *events.lock(),
3315 &[
3316 language::Event::DirtyChanged,
3317 language::Event::FileHandleChanged
3318 ]
3319 );
3320
3321 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3322 let events = Arc::new(Mutex::new(Vec::new()));
3323 let buffer3 = project
3324 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3325 .await
3326 .unwrap();
3327 buffer3.update(cx, |_, cx| {
3328 cx.subscribe(&buffer3, {
3329 let events = events.clone();
3330 move |_, _, event, _| events.lock().push(event.clone())
3331 })
3332 .detach();
3333 });
3334
3335 buffer3.update(cx, |buffer, cx| {
3336 buffer.edit([(0..0, "x")], None, cx);
3337 });
3338 events.lock().clear();
3339 fs.remove_file("/dir/file3".as_ref(), Default::default())
3340 .await
3341 .unwrap();
3342 cx.executor().run_until_parked();
3343 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3344 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3345}
3346
3347#[gpui::test]
3348async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3349 init_test(cx);
3350
3351 let initial_contents = "aaa\nbbbbb\nc\n";
3352 let fs = FakeFs::new(cx.executor());
3353 fs.insert_tree(
3354 "/dir",
3355 json!({
3356 "the-file": initial_contents,
3357 }),
3358 )
3359 .await;
3360 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3361 let buffer = project
3362 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3363 .await
3364 .unwrap();
3365
3366 let anchors = (0..3)
3367 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3368 .collect::<Vec<_>>();
3369
3370 // Change the file on disk, adding two new lines of text, and removing
3371 // one line.
3372 buffer.update(cx, |buffer, _| {
3373 assert!(!buffer.is_dirty());
3374 assert!(!buffer.has_conflict());
3375 });
3376 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3377 fs.save(
3378 "/dir/the-file".as_ref(),
3379 &new_contents.into(),
3380 LineEnding::Unix,
3381 )
3382 .await
3383 .unwrap();
3384
3385 // Because the buffer was not modified, it is reloaded from disk. Its
3386 // contents are edited according to the diff between the old and new
3387 // file contents.
3388 cx.executor().run_until_parked();
3389 buffer.update(cx, |buffer, _| {
3390 assert_eq!(buffer.text(), new_contents);
3391 assert!(!buffer.is_dirty());
3392 assert!(!buffer.has_conflict());
3393
3394 let anchor_positions = anchors
3395 .iter()
3396 .map(|anchor| anchor.to_point(&*buffer))
3397 .collect::<Vec<_>>();
3398 assert_eq!(
3399 anchor_positions,
3400 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3401 );
3402 });
3403
3404 // Modify the buffer
3405 buffer.update(cx, |buffer, cx| {
3406 buffer.edit([(0..0, " ")], None, cx);
3407 assert!(buffer.is_dirty());
3408 assert!(!buffer.has_conflict());
3409 });
3410
3411 // Change the file on disk again, adding blank lines to the beginning.
3412 fs.save(
3413 "/dir/the-file".as_ref(),
3414 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3415 LineEnding::Unix,
3416 )
3417 .await
3418 .unwrap();
3419
3420 // Because the buffer is modified, it doesn't reload from disk, but is
3421 // marked as having a conflict.
3422 cx.executor().run_until_parked();
3423 buffer.update(cx, |buffer, _| {
3424 assert!(buffer.has_conflict());
3425 });
3426}
3427
3428#[gpui::test]
3429async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3430 init_test(cx);
3431
3432 let fs = FakeFs::new(cx.executor());
3433 fs.insert_tree(
3434 "/dir",
3435 json!({
3436 "file1": "a\nb\nc\n",
3437 "file2": "one\r\ntwo\r\nthree\r\n",
3438 }),
3439 )
3440 .await;
3441
3442 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3443 let buffer1 = project
3444 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3445 .await
3446 .unwrap();
3447 let buffer2 = project
3448 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3449 .await
3450 .unwrap();
3451
3452 buffer1.update(cx, |buffer, _| {
3453 assert_eq!(buffer.text(), "a\nb\nc\n");
3454 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3455 });
3456 buffer2.update(cx, |buffer, _| {
3457 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3458 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3459 });
3460
3461 // Change a file's line endings on disk from unix to windows. The buffer's
3462 // state updates correctly.
3463 fs.save(
3464 "/dir/file1".as_ref(),
3465 &"aaa\nb\nc\n".into(),
3466 LineEnding::Windows,
3467 )
3468 .await
3469 .unwrap();
3470 cx.executor().run_until_parked();
3471 buffer1.update(cx, |buffer, _| {
3472 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3473 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3474 });
3475
3476 // Save a file with windows line endings. The file is written correctly.
3477 buffer2.update(cx, |buffer, cx| {
3478 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3479 });
3480 project
3481 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3482 .await
3483 .unwrap();
3484 assert_eq!(
3485 fs.load("/dir/file2".as_ref()).await.unwrap(),
3486 "one\r\ntwo\r\nthree\r\nfour\r\n",
3487 );
3488}
3489
3490#[gpui::test]
3491async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3492 init_test(cx);
3493
3494 let fs = FakeFs::new(cx.executor());
3495 fs.insert_tree(
3496 "/the-dir",
3497 json!({
3498 "a.rs": "
3499 fn foo(mut v: Vec<usize>) {
3500 for x in &v {
3501 v.push(1);
3502 }
3503 }
3504 "
3505 .unindent(),
3506 }),
3507 )
3508 .await;
3509
3510 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3511 let buffer = project
3512 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3513 .await
3514 .unwrap();
3515
3516 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3517 let message = lsp::PublishDiagnosticsParams {
3518 uri: buffer_uri.clone(),
3519 diagnostics: vec![
3520 lsp::Diagnostic {
3521 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3522 severity: Some(DiagnosticSeverity::WARNING),
3523 message: "error 1".to_string(),
3524 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3525 location: lsp::Location {
3526 uri: buffer_uri.clone(),
3527 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3528 },
3529 message: "error 1 hint 1".to_string(),
3530 }]),
3531 ..Default::default()
3532 },
3533 lsp::Diagnostic {
3534 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3535 severity: Some(DiagnosticSeverity::HINT),
3536 message: "error 1 hint 1".to_string(),
3537 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3538 location: lsp::Location {
3539 uri: buffer_uri.clone(),
3540 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3541 },
3542 message: "original diagnostic".to_string(),
3543 }]),
3544 ..Default::default()
3545 },
3546 lsp::Diagnostic {
3547 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3548 severity: Some(DiagnosticSeverity::ERROR),
3549 message: "error 2".to_string(),
3550 related_information: Some(vec![
3551 lsp::DiagnosticRelatedInformation {
3552 location: lsp::Location {
3553 uri: buffer_uri.clone(),
3554 range: lsp::Range::new(
3555 lsp::Position::new(1, 13),
3556 lsp::Position::new(1, 15),
3557 ),
3558 },
3559 message: "error 2 hint 1".to_string(),
3560 },
3561 lsp::DiagnosticRelatedInformation {
3562 location: lsp::Location {
3563 uri: buffer_uri.clone(),
3564 range: lsp::Range::new(
3565 lsp::Position::new(1, 13),
3566 lsp::Position::new(1, 15),
3567 ),
3568 },
3569 message: "error 2 hint 2".to_string(),
3570 },
3571 ]),
3572 ..Default::default()
3573 },
3574 lsp::Diagnostic {
3575 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3576 severity: Some(DiagnosticSeverity::HINT),
3577 message: "error 2 hint 1".to_string(),
3578 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3579 location: lsp::Location {
3580 uri: buffer_uri.clone(),
3581 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3582 },
3583 message: "original diagnostic".to_string(),
3584 }]),
3585 ..Default::default()
3586 },
3587 lsp::Diagnostic {
3588 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3589 severity: Some(DiagnosticSeverity::HINT),
3590 message: "error 2 hint 2".to_string(),
3591 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3592 location: lsp::Location {
3593 uri: buffer_uri,
3594 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3595 },
3596 message: "original diagnostic".to_string(),
3597 }]),
3598 ..Default::default()
3599 },
3600 ],
3601 version: None,
3602 };
3603
3604 project
3605 .update(cx, |p, cx| {
3606 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3607 })
3608 .unwrap();
3609 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3610
3611 assert_eq!(
3612 buffer
3613 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3614 .collect::<Vec<_>>(),
3615 &[
3616 DiagnosticEntry {
3617 range: Point::new(1, 8)..Point::new(1, 9),
3618 diagnostic: Diagnostic {
3619 severity: DiagnosticSeverity::WARNING,
3620 message: "error 1".to_string(),
3621 group_id: 1,
3622 is_primary: true,
3623 ..Default::default()
3624 }
3625 },
3626 DiagnosticEntry {
3627 range: Point::new(1, 8)..Point::new(1, 9),
3628 diagnostic: Diagnostic {
3629 severity: DiagnosticSeverity::HINT,
3630 message: "error 1 hint 1".to_string(),
3631 group_id: 1,
3632 is_primary: false,
3633 ..Default::default()
3634 }
3635 },
3636 DiagnosticEntry {
3637 range: Point::new(1, 13)..Point::new(1, 15),
3638 diagnostic: Diagnostic {
3639 severity: DiagnosticSeverity::HINT,
3640 message: "error 2 hint 1".to_string(),
3641 group_id: 0,
3642 is_primary: false,
3643 ..Default::default()
3644 }
3645 },
3646 DiagnosticEntry {
3647 range: Point::new(1, 13)..Point::new(1, 15),
3648 diagnostic: Diagnostic {
3649 severity: DiagnosticSeverity::HINT,
3650 message: "error 2 hint 2".to_string(),
3651 group_id: 0,
3652 is_primary: false,
3653 ..Default::default()
3654 }
3655 },
3656 DiagnosticEntry {
3657 range: Point::new(2, 8)..Point::new(2, 17),
3658 diagnostic: Diagnostic {
3659 severity: DiagnosticSeverity::ERROR,
3660 message: "error 2".to_string(),
3661 group_id: 0,
3662 is_primary: true,
3663 ..Default::default()
3664 }
3665 }
3666 ]
3667 );
3668
3669 assert_eq!(
3670 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3671 &[
3672 DiagnosticEntry {
3673 range: Point::new(1, 13)..Point::new(1, 15),
3674 diagnostic: Diagnostic {
3675 severity: DiagnosticSeverity::HINT,
3676 message: "error 2 hint 1".to_string(),
3677 group_id: 0,
3678 is_primary: false,
3679 ..Default::default()
3680 }
3681 },
3682 DiagnosticEntry {
3683 range: Point::new(1, 13)..Point::new(1, 15),
3684 diagnostic: Diagnostic {
3685 severity: DiagnosticSeverity::HINT,
3686 message: "error 2 hint 2".to_string(),
3687 group_id: 0,
3688 is_primary: false,
3689 ..Default::default()
3690 }
3691 },
3692 DiagnosticEntry {
3693 range: Point::new(2, 8)..Point::new(2, 17),
3694 diagnostic: Diagnostic {
3695 severity: DiagnosticSeverity::ERROR,
3696 message: "error 2".to_string(),
3697 group_id: 0,
3698 is_primary: true,
3699 ..Default::default()
3700 }
3701 }
3702 ]
3703 );
3704
3705 assert_eq!(
3706 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3707 &[
3708 DiagnosticEntry {
3709 range: Point::new(1, 8)..Point::new(1, 9),
3710 diagnostic: Diagnostic {
3711 severity: DiagnosticSeverity::WARNING,
3712 message: "error 1".to_string(),
3713 group_id: 1,
3714 is_primary: true,
3715 ..Default::default()
3716 }
3717 },
3718 DiagnosticEntry {
3719 range: Point::new(1, 8)..Point::new(1, 9),
3720 diagnostic: Diagnostic {
3721 severity: DiagnosticSeverity::HINT,
3722 message: "error 1 hint 1".to_string(),
3723 group_id: 1,
3724 is_primary: false,
3725 ..Default::default()
3726 }
3727 },
3728 ]
3729 );
3730}
3731
3732#[gpui::test]
3733async fn test_rename(cx: &mut gpui::TestAppContext) {
3734 init_test(cx);
3735
3736 let mut language = Language::new(
3737 LanguageConfig {
3738 name: "Rust".into(),
3739 matcher: LanguageMatcher {
3740 path_suffixes: vec!["rs".to_string()],
3741 ..Default::default()
3742 },
3743 ..Default::default()
3744 },
3745 Some(tree_sitter_rust::language()),
3746 );
3747 let mut fake_servers = language
3748 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3749 capabilities: lsp::ServerCapabilities {
3750 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3751 prepare_provider: Some(true),
3752 work_done_progress_options: Default::default(),
3753 })),
3754 ..Default::default()
3755 },
3756 ..Default::default()
3757 }))
3758 .await;
3759
3760 let fs = FakeFs::new(cx.executor());
3761 fs.insert_tree(
3762 "/dir",
3763 json!({
3764 "one.rs": "const ONE: usize = 1;",
3765 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3766 }),
3767 )
3768 .await;
3769
3770 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3771 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3772 let buffer = project
3773 .update(cx, |project, cx| {
3774 project.open_local_buffer("/dir/one.rs", cx)
3775 })
3776 .await
3777 .unwrap();
3778
3779 let fake_server = fake_servers.next().await.unwrap();
3780
3781 let response = project.update(cx, |project, cx| {
3782 project.prepare_rename(buffer.clone(), 7, cx)
3783 });
3784 fake_server
3785 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3786 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3787 assert_eq!(params.position, lsp::Position::new(0, 7));
3788 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3789 lsp::Position::new(0, 6),
3790 lsp::Position::new(0, 9),
3791 ))))
3792 })
3793 .next()
3794 .await
3795 .unwrap();
3796 let range = response.await.unwrap().unwrap();
3797 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3798 assert_eq!(range, 6..9);
3799
3800 let response = project.update(cx, |project, cx| {
3801 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3802 });
3803 fake_server
3804 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3805 assert_eq!(
3806 params.text_document_position.text_document.uri.as_str(),
3807 "file:///dir/one.rs"
3808 );
3809 assert_eq!(
3810 params.text_document_position.position,
3811 lsp::Position::new(0, 7)
3812 );
3813 assert_eq!(params.new_name, "THREE");
3814 Ok(Some(lsp::WorkspaceEdit {
3815 changes: Some(
3816 [
3817 (
3818 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3819 vec![lsp::TextEdit::new(
3820 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3821 "THREE".to_string(),
3822 )],
3823 ),
3824 (
3825 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3826 vec![
3827 lsp::TextEdit::new(
3828 lsp::Range::new(
3829 lsp::Position::new(0, 24),
3830 lsp::Position::new(0, 27),
3831 ),
3832 "THREE".to_string(),
3833 ),
3834 lsp::TextEdit::new(
3835 lsp::Range::new(
3836 lsp::Position::new(0, 35),
3837 lsp::Position::new(0, 38),
3838 ),
3839 "THREE".to_string(),
3840 ),
3841 ],
3842 ),
3843 ]
3844 .into_iter()
3845 .collect(),
3846 ),
3847 ..Default::default()
3848 }))
3849 })
3850 .next()
3851 .await
3852 .unwrap();
3853 let mut transaction = response.await.unwrap().0;
3854 assert_eq!(transaction.len(), 2);
3855 assert_eq!(
3856 transaction
3857 .remove_entry(&buffer)
3858 .unwrap()
3859 .0
3860 .update(cx, |buffer, _| buffer.text()),
3861 "const THREE: usize = 1;"
3862 );
3863 assert_eq!(
3864 transaction
3865 .into_keys()
3866 .next()
3867 .unwrap()
3868 .update(cx, |buffer, _| buffer.text()),
3869 "const TWO: usize = one::THREE + one::THREE;"
3870 );
3871}
3872
3873#[gpui::test]
3874async fn test_search(cx: &mut gpui::TestAppContext) {
3875 init_test(cx);
3876
3877 let fs = FakeFs::new(cx.executor());
3878 fs.insert_tree(
3879 "/dir",
3880 json!({
3881 "one.rs": "const ONE: usize = 1;",
3882 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3883 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3884 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3885 }),
3886 )
3887 .await;
3888 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3889 assert_eq!(
3890 search(
3891 &project,
3892 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3893 cx
3894 )
3895 .await
3896 .unwrap(),
3897 HashMap::from_iter([
3898 ("two.rs".to_string(), vec![6..9]),
3899 ("three.rs".to_string(), vec![37..40])
3900 ])
3901 );
3902
3903 let buffer_4 = project
3904 .update(cx, |project, cx| {
3905 project.open_local_buffer("/dir/four.rs", cx)
3906 })
3907 .await
3908 .unwrap();
3909 buffer_4.update(cx, |buffer, cx| {
3910 let text = "two::TWO";
3911 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3912 });
3913
3914 assert_eq!(
3915 search(
3916 &project,
3917 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3918 cx
3919 )
3920 .await
3921 .unwrap(),
3922 HashMap::from_iter([
3923 ("two.rs".to_string(), vec![6..9]),
3924 ("three.rs".to_string(), vec![37..40]),
3925 ("four.rs".to_string(), vec![25..28, 36..39])
3926 ])
3927 );
3928}
3929
3930#[gpui::test]
3931async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3932 init_test(cx);
3933
3934 let search_query = "file";
3935
3936 let fs = FakeFs::new(cx.executor());
3937 fs.insert_tree(
3938 "/dir",
3939 json!({
3940 "one.rs": r#"// Rust file one"#,
3941 "one.ts": r#"// TypeScript file one"#,
3942 "two.rs": r#"// Rust file two"#,
3943 "two.ts": r#"// TypeScript file two"#,
3944 }),
3945 )
3946 .await;
3947 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3948
3949 assert!(
3950 search(
3951 &project,
3952 SearchQuery::text(
3953 search_query,
3954 false,
3955 true,
3956 false,
3957 vec![PathMatcher::new("*.odd").unwrap()],
3958 Vec::new()
3959 )
3960 .unwrap(),
3961 cx
3962 )
3963 .await
3964 .unwrap()
3965 .is_empty(),
3966 "If no inclusions match, no files should be returned"
3967 );
3968
3969 assert_eq!(
3970 search(
3971 &project,
3972 SearchQuery::text(
3973 search_query,
3974 false,
3975 true,
3976 false,
3977 vec![PathMatcher::new("*.rs").unwrap()],
3978 Vec::new()
3979 )
3980 .unwrap(),
3981 cx
3982 )
3983 .await
3984 .unwrap(),
3985 HashMap::from_iter([
3986 ("one.rs".to_string(), vec![8..12]),
3987 ("two.rs".to_string(), vec![8..12]),
3988 ]),
3989 "Rust only search should give only Rust files"
3990 );
3991
3992 assert_eq!(
3993 search(
3994 &project,
3995 SearchQuery::text(
3996 search_query,
3997 false,
3998 true,
3999 false,
4000 vec![
4001 PathMatcher::new("*.ts").unwrap(),
4002 PathMatcher::new("*.odd").unwrap(),
4003 ],
4004 Vec::new()
4005 ).unwrap(),
4006 cx
4007 )
4008 .await
4009 .unwrap(),
4010 HashMap::from_iter([
4011 ("one.ts".to_string(), vec![14..18]),
4012 ("two.ts".to_string(), vec![14..18]),
4013 ]),
4014 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4015 );
4016
4017 assert_eq!(
4018 search(
4019 &project,
4020 SearchQuery::text(
4021 search_query,
4022 false,
4023 true,
4024 false,
4025 vec![
4026 PathMatcher::new("*.rs").unwrap(),
4027 PathMatcher::new("*.ts").unwrap(),
4028 PathMatcher::new("*.odd").unwrap(),
4029 ],
4030 Vec::new()
4031 ).unwrap(),
4032 cx
4033 )
4034 .await
4035 .unwrap(),
4036 HashMap::from_iter([
4037 ("one.rs".to_string(), vec![8..12]),
4038 ("one.ts".to_string(), vec![14..18]),
4039 ("two.rs".to_string(), vec![8..12]),
4040 ("two.ts".to_string(), vec![14..18]),
4041 ]),
4042 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4043 );
4044}
4045
4046#[gpui::test]
4047async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4048 init_test(cx);
4049
4050 let search_query = "file";
4051
4052 let fs = FakeFs::new(cx.executor());
4053 fs.insert_tree(
4054 "/dir",
4055 json!({
4056 "one.rs": r#"// Rust file one"#,
4057 "one.ts": r#"// TypeScript file one"#,
4058 "two.rs": r#"// Rust file two"#,
4059 "two.ts": r#"// TypeScript file two"#,
4060 }),
4061 )
4062 .await;
4063 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4064
4065 assert_eq!(
4066 search(
4067 &project,
4068 SearchQuery::text(
4069 search_query,
4070 false,
4071 true,
4072 false,
4073 Vec::new(),
4074 vec![PathMatcher::new("*.odd").unwrap()],
4075 )
4076 .unwrap(),
4077 cx
4078 )
4079 .await
4080 .unwrap(),
4081 HashMap::from_iter([
4082 ("one.rs".to_string(), vec![8..12]),
4083 ("one.ts".to_string(), vec![14..18]),
4084 ("two.rs".to_string(), vec![8..12]),
4085 ("two.ts".to_string(), vec![14..18]),
4086 ]),
4087 "If no exclusions match, all files should be returned"
4088 );
4089
4090 assert_eq!(
4091 search(
4092 &project,
4093 SearchQuery::text(
4094 search_query,
4095 false,
4096 true,
4097 false,
4098 Vec::new(),
4099 vec![PathMatcher::new("*.rs").unwrap()],
4100 )
4101 .unwrap(),
4102 cx
4103 )
4104 .await
4105 .unwrap(),
4106 HashMap::from_iter([
4107 ("one.ts".to_string(), vec![14..18]),
4108 ("two.ts".to_string(), vec![14..18]),
4109 ]),
4110 "Rust exclusion search should give only TypeScript files"
4111 );
4112
4113 assert_eq!(
4114 search(
4115 &project,
4116 SearchQuery::text(
4117 search_query,
4118 false,
4119 true,
4120 false,
4121 Vec::new(),
4122 vec![
4123 PathMatcher::new("*.ts").unwrap(),
4124 PathMatcher::new("*.odd").unwrap(),
4125 ],
4126 ).unwrap(),
4127 cx
4128 )
4129 .await
4130 .unwrap(),
4131 HashMap::from_iter([
4132 ("one.rs".to_string(), vec![8..12]),
4133 ("two.rs".to_string(), vec![8..12]),
4134 ]),
4135 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4136 );
4137
4138 assert!(
4139 search(
4140 &project,
4141 SearchQuery::text(
4142 search_query,
4143 false,
4144 true,
4145 false,
4146 Vec::new(),
4147 vec![
4148 PathMatcher::new("*.rs").unwrap(),
4149 PathMatcher::new("*.ts").unwrap(),
4150 PathMatcher::new("*.odd").unwrap(),
4151 ],
4152 ).unwrap(),
4153 cx
4154 )
4155 .await
4156 .unwrap().is_empty(),
4157 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4158 );
4159}
4160
4161#[gpui::test]
4162async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4163 init_test(cx);
4164
4165 let search_query = "file";
4166
4167 let fs = FakeFs::new(cx.executor());
4168 fs.insert_tree(
4169 "/dir",
4170 json!({
4171 "one.rs": r#"// Rust file one"#,
4172 "one.ts": r#"// TypeScript file one"#,
4173 "two.rs": r#"// Rust file two"#,
4174 "two.ts": r#"// TypeScript file two"#,
4175 }),
4176 )
4177 .await;
4178 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4179
4180 assert!(
4181 search(
4182 &project,
4183 SearchQuery::text(
4184 search_query,
4185 false,
4186 true,
4187 false,
4188 vec![PathMatcher::new("*.odd").unwrap()],
4189 vec![PathMatcher::new("*.odd").unwrap()],
4190 )
4191 .unwrap(),
4192 cx
4193 )
4194 .await
4195 .unwrap()
4196 .is_empty(),
4197 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4198 );
4199
4200 assert!(
4201 search(
4202 &project,
4203 SearchQuery::text(
4204 search_query,
4205 false,
4206 true,
4207 false,
4208 vec![PathMatcher::new("*.ts").unwrap()],
4209 vec![PathMatcher::new("*.ts").unwrap()],
4210 ).unwrap(),
4211 cx
4212 )
4213 .await
4214 .unwrap()
4215 .is_empty(),
4216 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4217 );
4218
4219 assert!(
4220 search(
4221 &project,
4222 SearchQuery::text(
4223 search_query,
4224 false,
4225 true,
4226 false,
4227 vec![
4228 PathMatcher::new("*.ts").unwrap(),
4229 PathMatcher::new("*.odd").unwrap()
4230 ],
4231 vec![
4232 PathMatcher::new("*.ts").unwrap(),
4233 PathMatcher::new("*.odd").unwrap()
4234 ],
4235 )
4236 .unwrap(),
4237 cx
4238 )
4239 .await
4240 .unwrap()
4241 .is_empty(),
4242 "Non-matching inclusions and exclusions should not change that."
4243 );
4244
4245 assert_eq!(
4246 search(
4247 &project,
4248 SearchQuery::text(
4249 search_query,
4250 false,
4251 true,
4252 false,
4253 vec![
4254 PathMatcher::new("*.ts").unwrap(),
4255 PathMatcher::new("*.odd").unwrap()
4256 ],
4257 vec![
4258 PathMatcher::new("*.rs").unwrap(),
4259 PathMatcher::new("*.odd").unwrap()
4260 ],
4261 )
4262 .unwrap(),
4263 cx
4264 )
4265 .await
4266 .unwrap(),
4267 HashMap::from_iter([
4268 ("one.ts".to_string(), vec![14..18]),
4269 ("two.ts".to_string(), vec![14..18]),
4270 ]),
4271 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4272 );
4273}
4274
4275#[gpui::test]
4276async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4277 init_test(cx);
4278
4279 let fs = FakeFs::new(cx.background_executor.clone());
4280 fs.insert_tree(
4281 "/dir",
4282 json!({
4283 ".git": {},
4284 ".gitignore": "**/target\n/node_modules\n",
4285 "target": {
4286 "index.txt": "index_key:index_value"
4287 },
4288 "node_modules": {
4289 "eslint": {
4290 "index.ts": "const eslint_key = 'eslint value'",
4291 "package.json": r#"{ "some_key": "some value" }"#,
4292 },
4293 "prettier": {
4294 "index.ts": "const prettier_key = 'prettier value'",
4295 "package.json": r#"{ "other_key": "other value" }"#,
4296 },
4297 },
4298 "package.json": r#"{ "main_key": "main value" }"#,
4299 }),
4300 )
4301 .await;
4302 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4303
4304 let query = "key";
4305 assert_eq!(
4306 search(
4307 &project,
4308 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4309 cx
4310 )
4311 .await
4312 .unwrap(),
4313 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4314 "Only one non-ignored file should have the query"
4315 );
4316
4317 assert_eq!(
4318 search(
4319 &project,
4320 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4321 cx
4322 )
4323 .await
4324 .unwrap(),
4325 HashMap::from_iter([
4326 ("package.json".to_string(), vec![8..11]),
4327 ("target/index.txt".to_string(), vec![6..9]),
4328 (
4329 "node_modules/prettier/package.json".to_string(),
4330 vec![9..12]
4331 ),
4332 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4333 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4334 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4335 ]),
4336 "Unrestricted search with ignored directories should find every file with the query"
4337 );
4338
4339 assert_eq!(
4340 search(
4341 &project,
4342 SearchQuery::text(
4343 query,
4344 false,
4345 false,
4346 true,
4347 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4348 vec![PathMatcher::new("*.ts").unwrap()],
4349 )
4350 .unwrap(),
4351 cx
4352 )
4353 .await
4354 .unwrap(),
4355 HashMap::from_iter([(
4356 "node_modules/prettier/package.json".to_string(),
4357 vec![9..12]
4358 )]),
4359 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4360 );
4361}
4362
4363#[test]
4364fn test_glob_literal_prefix() {
4365 assert_eq!(glob_literal_prefix("**/*.js"), "");
4366 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4367 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4368 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4369}
4370
4371#[gpui::test]
4372async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4373 init_test(cx);
4374
4375 let fs = FakeFs::new(cx.executor().clone());
4376 fs.insert_tree(
4377 "/one/two",
4378 json!({
4379 "three": {
4380 "a.txt": "",
4381 "four": {}
4382 },
4383 "c.rs": ""
4384 }),
4385 )
4386 .await;
4387
4388 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4389 project
4390 .update(cx, |project, cx| {
4391 let id = project.worktrees().next().unwrap().read(cx).id();
4392 project.create_entry((id, "b.."), true, cx)
4393 })
4394 .unwrap()
4395 .await
4396 .unwrap();
4397
4398 // Can't create paths outside the project
4399 let result = project
4400 .update(cx, |project, cx| {
4401 let id = project.worktrees().next().unwrap().read(cx).id();
4402 project.create_entry((id, "../../boop"), true, cx)
4403 })
4404 .await;
4405 assert!(result.is_err());
4406
4407 // Can't create paths with '..'
4408 let result = project
4409 .update(cx, |project, cx| {
4410 let id = project.worktrees().next().unwrap().read(cx).id();
4411 project.create_entry((id, "four/../beep"), true, cx)
4412 })
4413 .await;
4414 assert!(result.is_err());
4415
4416 assert_eq!(
4417 fs.paths(true),
4418 vec![
4419 PathBuf::from("/"),
4420 PathBuf::from("/one"),
4421 PathBuf::from("/one/two"),
4422 PathBuf::from("/one/two/c.rs"),
4423 PathBuf::from("/one/two/three"),
4424 PathBuf::from("/one/two/three/a.txt"),
4425 PathBuf::from("/one/two/three/b.."),
4426 PathBuf::from("/one/two/three/four"),
4427 ]
4428 );
4429
4430 // And we cannot open buffers with '..'
4431 let result = project
4432 .update(cx, |project, cx| {
4433 let id = project.worktrees().next().unwrap().read(cx).id();
4434 project.open_buffer((id, "../c.rs"), cx)
4435 })
4436 .await;
4437 assert!(result.is_err())
4438}
4439
4440async fn search(
4441 project: &Model<Project>,
4442 query: SearchQuery,
4443 cx: &mut gpui::TestAppContext,
4444) -> Result<HashMap<String, Vec<Range<usize>>>> {
4445 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4446 let mut result = HashMap::default();
4447 while let Some((buffer, range)) = search_rx.next().await {
4448 result.entry(buffer).or_insert(range);
4449 }
4450 Ok(result
4451 .into_iter()
4452 .map(|(buffer, ranges)| {
4453 buffer.update(cx, |buffer, _| {
4454 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4455 let ranges = ranges
4456 .into_iter()
4457 .map(|range| range.to_offset(buffer))
4458 .collect::<Vec<_>>();
4459 (path, ranges)
4460 })
4461 })
4462 .collect())
4463}
4464
4465fn init_test(cx: &mut gpui::TestAppContext) {
4466 if std::env::var("RUST_LOG").is_ok() {
4467 env_logger::try_init().ok();
4468 }
4469
4470 cx.update(|cx| {
4471 let settings_store = SettingsStore::test(cx);
4472 cx.set_global(settings_store);
4473 release_channel::init("0.0.0", cx);
4474 language::init(cx);
4475 Project::init_settings(cx);
4476 });
4477}