1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[cfg(not(windows))]
49#[gpui::test]
50async fn test_symlinks(cx: &mut gpui::TestAppContext) {
51 init_test(cx);
52 cx.executor().allow_parking();
53
54 let dir = temp_tree(json!({
55 "root": {
56 "apple": "",
57 "banana": {
58 "carrot": {
59 "date": "",
60 "endive": "",
61 }
62 },
63 "fennel": {
64 "grape": "",
65 }
66 }
67 }));
68
69 let root_link_path = dir.path().join("root_link");
70 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
71 os::unix::fs::symlink(
72 &dir.path().join("root/fennel"),
73 &dir.path().join("root/finnochio"),
74 )
75 .unwrap();
76
77 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
78
79 project.update(cx, |project, cx| {
80 let tree = project.worktrees().next().unwrap().read(cx);
81 assert_eq!(tree.file_count(), 5);
82 assert_eq!(
83 tree.inode_for_path("fennel/grape"),
84 tree.inode_for_path("finnochio/grape")
85 );
86 });
87}
88
89#[gpui::test]
90async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
91 init_test(cx);
92
93 let fs = FakeFs::new(cx.executor());
94 fs.insert_tree(
95 "/the-root",
96 json!({
97 ".zed": {
98 "settings.json": r#"{ "tab_size": 8 }"#
99 },
100 "a": {
101 "a.rs": "fn a() {\n A\n}"
102 },
103 "b": {
104 ".zed": {
105 "settings.json": r#"{ "tab_size": 2 }"#
106 },
107 "b.rs": "fn b() {\n B\n}"
108 }
109 }),
110 )
111 .await;
112
113 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
114 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
115
116 cx.executor().run_until_parked();
117 cx.update(|cx| {
118 let tree = worktree.read(cx);
119
120 let settings_a = language_settings(
121 None,
122 Some(
123 &(File::for_entry(
124 tree.entry_for_path("a/a.rs").unwrap().clone(),
125 worktree.clone(),
126 ) as _),
127 ),
128 cx,
129 );
130 let settings_b = language_settings(
131 None,
132 Some(
133 &(File::for_entry(
134 tree.entry_for_path("b/b.rs").unwrap().clone(),
135 worktree.clone(),
136 ) as _),
137 ),
138 cx,
139 );
140
141 assert_eq!(settings_a.tab_size.get(), 8);
142 assert_eq!(settings_b.tab_size.get(), 2);
143 });
144}
145
146#[gpui::test]
147async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
148 init_test(cx);
149
150 let mut rust_language = Language::new(
151 LanguageConfig {
152 name: "Rust".into(),
153 matcher: LanguageMatcher {
154 path_suffixes: vec!["rs".to_string()],
155 ..Default::default()
156 },
157 ..Default::default()
158 },
159 Some(tree_sitter_rust::language()),
160 );
161 let mut json_language = Language::new(
162 LanguageConfig {
163 name: "JSON".into(),
164 matcher: LanguageMatcher {
165 path_suffixes: vec!["json".to_string()],
166 ..Default::default()
167 },
168 ..Default::default()
169 },
170 None,
171 );
172 let mut fake_rust_servers = rust_language
173 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
174 name: "the-rust-language-server",
175 capabilities: lsp::ServerCapabilities {
176 completion_provider: Some(lsp::CompletionOptions {
177 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
178 ..Default::default()
179 }),
180 ..Default::default()
181 },
182 ..Default::default()
183 }))
184 .await;
185 let mut fake_json_servers = json_language
186 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
187 name: "the-json-language-server",
188 capabilities: lsp::ServerCapabilities {
189 completion_provider: Some(lsp::CompletionOptions {
190 trigger_characters: Some(vec![":".to_string()]),
191 ..Default::default()
192 }),
193 ..Default::default()
194 },
195 ..Default::default()
196 }))
197 .await;
198
199 let fs = FakeFs::new(cx.executor());
200 fs.insert_tree(
201 "/the-root",
202 json!({
203 "test.rs": "const A: i32 = 1;",
204 "test2.rs": "",
205 "Cargo.toml": "a = 1",
206 "package.json": "{\"a\": 1}",
207 }),
208 )
209 .await;
210
211 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
212
213 // Open a buffer without an associated language server.
214 let toml_buffer = project
215 .update(cx, |project, cx| {
216 project.open_local_buffer("/the-root/Cargo.toml", cx)
217 })
218 .await
219 .unwrap();
220
221 // Open a buffer with an associated language server before the language for it has been loaded.
222 let rust_buffer = project
223 .update(cx, |project, cx| {
224 project.open_local_buffer("/the-root/test.rs", cx)
225 })
226 .await
227 .unwrap();
228 rust_buffer.update(cx, |buffer, _| {
229 assert_eq!(buffer.language().map(|l| l.name()), None);
230 });
231
232 // Now we add the languages to the project, and ensure they get assigned to all
233 // the relevant open buffers.
234 project.update(cx, |project, _| {
235 project.languages.add(Arc::new(json_language));
236 project.languages.add(Arc::new(rust_language));
237 });
238 cx.executor().run_until_parked();
239 rust_buffer.update(cx, |buffer, _| {
240 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
241 });
242
243 // A server is started up, and it is notified about Rust files.
244 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
245 assert_eq!(
246 fake_rust_server
247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
248 .await
249 .text_document,
250 lsp::TextDocumentItem {
251 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
252 version: 0,
253 text: "const A: i32 = 1;".to_string(),
254 language_id: Default::default()
255 }
256 );
257
258 // The buffer is configured based on the language server's capabilities.
259 rust_buffer.update(cx, |buffer, _| {
260 assert_eq!(
261 buffer.completion_triggers(),
262 &[".".to_string(), "::".to_string()]
263 );
264 });
265 toml_buffer.update(cx, |buffer, _| {
266 assert!(buffer.completion_triggers().is_empty());
267 });
268
269 // Edit a buffer. The changes are reported to the language server.
270 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidChangeTextDocument>()
274 .await
275 .text_document,
276 lsp::VersionedTextDocumentIdentifier::new(
277 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
278 1
279 )
280 );
281
282 // Open a third buffer with a different associated language server.
283 let json_buffer = project
284 .update(cx, |project, cx| {
285 project.open_local_buffer("/the-root/package.json", cx)
286 })
287 .await
288 .unwrap();
289
290 // A json language server is started up and is only notified about the json buffer.
291 let mut fake_json_server = fake_json_servers.next().await.unwrap();
292 assert_eq!(
293 fake_json_server
294 .receive_notification::<lsp::notification::DidOpenTextDocument>()
295 .await
296 .text_document,
297 lsp::TextDocumentItem {
298 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
299 version: 0,
300 text: "{\"a\": 1}".to_string(),
301 language_id: Default::default()
302 }
303 );
304
305 // This buffer is configured based on the second language server's
306 // capabilities.
307 json_buffer.update(cx, |buffer, _| {
308 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
309 });
310
311 // When opening another buffer whose language server is already running,
312 // it is also configured based on the existing language server's capabilities.
313 let rust_buffer2 = project
314 .update(cx, |project, cx| {
315 project.open_local_buffer("/the-root/test2.rs", cx)
316 })
317 .await
318 .unwrap();
319 rust_buffer2.update(cx, |buffer, _| {
320 assert_eq!(
321 buffer.completion_triggers(),
322 &[".".to_string(), "::".to_string()]
323 );
324 });
325
326 // Changes are reported only to servers matching the buffer's language.
327 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
328 rust_buffer2.update(cx, |buffer, cx| {
329 buffer.edit([(0..0, "let x = 1;")], None, cx)
330 });
331 assert_eq!(
332 fake_rust_server
333 .receive_notification::<lsp::notification::DidChangeTextDocument>()
334 .await
335 .text_document,
336 lsp::VersionedTextDocumentIdentifier::new(
337 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
338 1
339 )
340 );
341
342 // Save notifications are reported to all servers.
343 project
344 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidSaveTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
353 );
354 assert_eq!(
355 fake_json_server
356 .receive_notification::<lsp::notification::DidSaveTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
360 );
361
362 // Renames are reported only to servers matching the buffer's language.
363 fs.rename(
364 Path::new("/the-root/test2.rs"),
365 Path::new("/the-root/test3.rs"),
366 Default::default(),
367 )
368 .await
369 .unwrap();
370 assert_eq!(
371 fake_rust_server
372 .receive_notification::<lsp::notification::DidCloseTextDocument>()
373 .await
374 .text_document,
375 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
376 );
377 assert_eq!(
378 fake_rust_server
379 .receive_notification::<lsp::notification::DidOpenTextDocument>()
380 .await
381 .text_document,
382 lsp::TextDocumentItem {
383 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
384 version: 0,
385 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
386 language_id: Default::default()
387 },
388 );
389
390 rust_buffer2.update(cx, |buffer, cx| {
391 buffer.update_diagnostics(
392 LanguageServerId(0),
393 DiagnosticSet::from_sorted_entries(
394 vec![DiagnosticEntry {
395 diagnostic: Default::default(),
396 range: Anchor::MIN..Anchor::MAX,
397 }],
398 &buffer.snapshot(),
399 ),
400 cx,
401 );
402 assert_eq!(
403 buffer
404 .snapshot()
405 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
406 .count(),
407 1
408 );
409 });
410
411 // When the rename changes the extension of the file, the buffer gets closed on the old
412 // language server and gets opened on the new one.
413 fs.rename(
414 Path::new("/the-root/test3.rs"),
415 Path::new("/the-root/test3.json"),
416 Default::default(),
417 )
418 .await
419 .unwrap();
420 assert_eq!(
421 fake_rust_server
422 .receive_notification::<lsp::notification::DidCloseTextDocument>()
423 .await
424 .text_document,
425 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
426 );
427 assert_eq!(
428 fake_json_server
429 .receive_notification::<lsp::notification::DidOpenTextDocument>()
430 .await
431 .text_document,
432 lsp::TextDocumentItem {
433 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
434 version: 0,
435 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
436 language_id: Default::default()
437 },
438 );
439
440 // We clear the diagnostics, since the language has changed.
441 rust_buffer2.update(cx, |buffer, _| {
442 assert_eq!(
443 buffer
444 .snapshot()
445 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
446 .count(),
447 0
448 );
449 });
450
451 // The renamed file's version resets after changing language server.
452 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
453 assert_eq!(
454 fake_json_server
455 .receive_notification::<lsp::notification::DidChangeTextDocument>()
456 .await
457 .text_document,
458 lsp::VersionedTextDocumentIdentifier::new(
459 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
460 1
461 )
462 );
463
464 // Restart language servers
465 project.update(cx, |project, cx| {
466 project.restart_language_servers_for_buffers(
467 vec![rust_buffer.clone(), json_buffer.clone()],
468 cx,
469 );
470 });
471
472 let mut rust_shutdown_requests = fake_rust_server
473 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
474 let mut json_shutdown_requests = fake_json_server
475 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
476 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
477
478 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
479 let mut fake_json_server = fake_json_servers.next().await.unwrap();
480
481 // Ensure rust document is reopened in new rust language server
482 assert_eq!(
483 fake_rust_server
484 .receive_notification::<lsp::notification::DidOpenTextDocument>()
485 .await
486 .text_document,
487 lsp::TextDocumentItem {
488 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
489 version: 0,
490 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
491 language_id: Default::default()
492 }
493 );
494
495 // Ensure json documents are reopened in new json language server
496 assert_set_eq!(
497 [
498 fake_json_server
499 .receive_notification::<lsp::notification::DidOpenTextDocument>()
500 .await
501 .text_document,
502 fake_json_server
503 .receive_notification::<lsp::notification::DidOpenTextDocument>()
504 .await
505 .text_document,
506 ],
507 [
508 lsp::TextDocumentItem {
509 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
510 version: 0,
511 text: json_buffer.update(cx, |buffer, _| buffer.text()),
512 language_id: Default::default()
513 },
514 lsp::TextDocumentItem {
515 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
516 version: 0,
517 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
518 language_id: Default::default()
519 }
520 ]
521 );
522
523 // Close notifications are reported only to servers matching the buffer's language.
524 cx.update(|_| drop(json_buffer));
525 let close_message = lsp::DidCloseTextDocumentParams {
526 text_document: lsp::TextDocumentIdentifier::new(
527 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
528 ),
529 };
530 assert_eq!(
531 fake_json_server
532 .receive_notification::<lsp::notification::DidCloseTextDocument>()
533 .await,
534 close_message,
535 );
536}
537
538#[gpui::test]
539async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
540 init_test(cx);
541
542 let mut language = Language::new(
543 LanguageConfig {
544 name: "Rust".into(),
545 matcher: LanguageMatcher {
546 path_suffixes: vec!["rs".to_string()],
547 ..Default::default()
548 },
549 ..Default::default()
550 },
551 Some(tree_sitter_rust::language()),
552 );
553 let mut fake_servers = language
554 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
555 name: "the-language-server",
556 ..Default::default()
557 }))
558 .await;
559
560 let fs = FakeFs::new(cx.executor());
561 fs.insert_tree(
562 "/the-root",
563 json!({
564 ".gitignore": "target\n",
565 "src": {
566 "a.rs": "",
567 "b.rs": "",
568 },
569 "target": {
570 "x": {
571 "out": {
572 "x.rs": ""
573 }
574 },
575 "y": {
576 "out": {
577 "y.rs": "",
578 }
579 },
580 "z": {
581 "out": {
582 "z.rs": ""
583 }
584 }
585 }
586 }),
587 )
588 .await;
589
590 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
591 project.update(cx, |project, _| {
592 project.languages.add(Arc::new(language));
593 });
594 cx.executor().run_until_parked();
595
596 // Start the language server by opening a buffer with a compatible file extension.
597 let _buffer = project
598 .update(cx, |project, cx| {
599 project.open_local_buffer("/the-root/src/a.rs", cx)
600 })
601 .await
602 .unwrap();
603
604 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
605 project.update(cx, |project, cx| {
606 let worktree = project.worktrees().next().unwrap();
607 assert_eq!(
608 worktree
609 .read(cx)
610 .snapshot()
611 .entries(true)
612 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
613 .collect::<Vec<_>>(),
614 &[
615 (Path::new(""), false),
616 (Path::new(".gitignore"), false),
617 (Path::new("src"), false),
618 (Path::new("src/a.rs"), false),
619 (Path::new("src/b.rs"), false),
620 (Path::new("target"), true),
621 ]
622 );
623 });
624
625 let prev_read_dir_count = fs.read_dir_call_count();
626
627 // Keep track of the FS events reported to the language server.
628 let fake_server = fake_servers.next().await.unwrap();
629 let file_changes = Arc::new(Mutex::new(Vec::new()));
630 fake_server
631 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
632 registrations: vec![lsp::Registration {
633 id: Default::default(),
634 method: "workspace/didChangeWatchedFiles".to_string(),
635 register_options: serde_json::to_value(
636 lsp::DidChangeWatchedFilesRegistrationOptions {
637 watchers: vec![
638 lsp::FileSystemWatcher {
639 glob_pattern: lsp::GlobPattern::String(
640 "/the-root/Cargo.toml".to_string(),
641 ),
642 kind: None,
643 },
644 lsp::FileSystemWatcher {
645 glob_pattern: lsp::GlobPattern::String(
646 "/the-root/src/*.{rs,c}".to_string(),
647 ),
648 kind: None,
649 },
650 lsp::FileSystemWatcher {
651 glob_pattern: lsp::GlobPattern::String(
652 "/the-root/target/y/**/*.rs".to_string(),
653 ),
654 kind: None,
655 },
656 ],
657 },
658 )
659 .ok(),
660 }],
661 })
662 .await
663 .unwrap();
664 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
665 let file_changes = file_changes.clone();
666 move |params, _| {
667 let mut file_changes = file_changes.lock();
668 file_changes.extend(params.changes);
669 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
670 }
671 });
672
673 cx.executor().run_until_parked();
674 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
675 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
676
677 // Now the language server has asked us to watch an ignored directory path,
678 // so we recursively load it.
679 project.update(cx, |project, cx| {
680 let worktree = project.worktrees().next().unwrap();
681 assert_eq!(
682 worktree
683 .read(cx)
684 .snapshot()
685 .entries(true)
686 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
687 .collect::<Vec<_>>(),
688 &[
689 (Path::new(""), false),
690 (Path::new(".gitignore"), false),
691 (Path::new("src"), false),
692 (Path::new("src/a.rs"), false),
693 (Path::new("src/b.rs"), false),
694 (Path::new("target"), true),
695 (Path::new("target/x"), true),
696 (Path::new("target/y"), true),
697 (Path::new("target/y/out"), true),
698 (Path::new("target/y/out/y.rs"), true),
699 (Path::new("target/z"), true),
700 ]
701 );
702 });
703
704 // Perform some file system mutations, two of which match the watched patterns,
705 // and one of which does not.
706 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
707 .await
708 .unwrap();
709 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
710 .await
711 .unwrap();
712 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
713 .await
714 .unwrap();
715 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
716 .await
717 .unwrap();
718 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
719 .await
720 .unwrap();
721
722 // The language server receives events for the FS mutations that match its watch patterns.
723 cx.executor().run_until_parked();
724 assert_eq!(
725 &*file_changes.lock(),
726 &[
727 lsp::FileEvent {
728 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
729 typ: lsp::FileChangeType::DELETED,
730 },
731 lsp::FileEvent {
732 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
733 typ: lsp::FileChangeType::CREATED,
734 },
735 lsp::FileEvent {
736 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
737 typ: lsp::FileChangeType::CREATED,
738 },
739 ]
740 );
741}
742
743#[gpui::test]
744async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
745 init_test(cx);
746
747 let fs = FakeFs::new(cx.executor());
748 fs.insert_tree(
749 "/dir",
750 json!({
751 "a.rs": "let a = 1;",
752 "b.rs": "let b = 2;"
753 }),
754 )
755 .await;
756
757 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
758
759 let buffer_a = project
760 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
761 .await
762 .unwrap();
763 let buffer_b = project
764 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
765 .await
766 .unwrap();
767
768 project.update(cx, |project, cx| {
769 project
770 .update_diagnostics(
771 LanguageServerId(0),
772 lsp::PublishDiagnosticsParams {
773 uri: Url::from_file_path("/dir/a.rs").unwrap(),
774 version: None,
775 diagnostics: vec![lsp::Diagnostic {
776 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
777 severity: Some(lsp::DiagnosticSeverity::ERROR),
778 message: "error 1".to_string(),
779 ..Default::default()
780 }],
781 },
782 &[],
783 cx,
784 )
785 .unwrap();
786 project
787 .update_diagnostics(
788 LanguageServerId(0),
789 lsp::PublishDiagnosticsParams {
790 uri: Url::from_file_path("/dir/b.rs").unwrap(),
791 version: None,
792 diagnostics: vec![lsp::Diagnostic {
793 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
794 severity: Some(lsp::DiagnosticSeverity::WARNING),
795 message: "error 2".to_string(),
796 ..Default::default()
797 }],
798 },
799 &[],
800 cx,
801 )
802 .unwrap();
803 });
804
805 buffer_a.update(cx, |buffer, _| {
806 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
807 assert_eq!(
808 chunks
809 .iter()
810 .map(|(s, d)| (s.as_str(), *d))
811 .collect::<Vec<_>>(),
812 &[
813 ("let ", None),
814 ("a", Some(DiagnosticSeverity::ERROR)),
815 (" = 1;", None),
816 ]
817 );
818 });
819 buffer_b.update(cx, |buffer, _| {
820 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
821 assert_eq!(
822 chunks
823 .iter()
824 .map(|(s, d)| (s.as_str(), *d))
825 .collect::<Vec<_>>(),
826 &[
827 ("let ", None),
828 ("b", Some(DiagnosticSeverity::WARNING)),
829 (" = 2;", None),
830 ]
831 );
832 });
833}
834
835#[gpui::test]
836async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
837 init_test(cx);
838
839 let fs = FakeFs::new(cx.executor());
840 fs.insert_tree(
841 "/root",
842 json!({
843 "dir": {
844 ".git": {
845 "HEAD": "ref: refs/heads/main",
846 },
847 ".gitignore": "b.rs",
848 "a.rs": "let a = 1;",
849 "b.rs": "let b = 2;",
850 },
851 "other.rs": "let b = c;"
852 }),
853 )
854 .await;
855
856 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
857 let (worktree, _) = project
858 .update(cx, |project, cx| {
859 project.find_or_create_local_worktree("/root/dir", true, cx)
860 })
861 .await
862 .unwrap();
863 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
864
865 let (worktree, _) = project
866 .update(cx, |project, cx| {
867 project.find_or_create_local_worktree("/root/other.rs", false, cx)
868 })
869 .await
870 .unwrap();
871 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
872
873 let server_id = LanguageServerId(0);
874 project.update(cx, |project, cx| {
875 project
876 .update_diagnostics(
877 server_id,
878 lsp::PublishDiagnosticsParams {
879 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
880 version: None,
881 diagnostics: vec![lsp::Diagnostic {
882 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
883 severity: Some(lsp::DiagnosticSeverity::ERROR),
884 message: "unused variable 'b'".to_string(),
885 ..Default::default()
886 }],
887 },
888 &[],
889 cx,
890 )
891 .unwrap();
892 project
893 .update_diagnostics(
894 server_id,
895 lsp::PublishDiagnosticsParams {
896 uri: Url::from_file_path("/root/other.rs").unwrap(),
897 version: None,
898 diagnostics: vec![lsp::Diagnostic {
899 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
900 severity: Some(lsp::DiagnosticSeverity::ERROR),
901 message: "unknown variable 'c'".to_string(),
902 ..Default::default()
903 }],
904 },
905 &[],
906 cx,
907 )
908 .unwrap();
909 });
910
911 let main_ignored_buffer = project
912 .update(cx, |project, cx| {
913 project.open_buffer((main_worktree_id, "b.rs"), cx)
914 })
915 .await
916 .unwrap();
917 main_ignored_buffer.update(cx, |buffer, _| {
918 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
919 assert_eq!(
920 chunks
921 .iter()
922 .map(|(s, d)| (s.as_str(), *d))
923 .collect::<Vec<_>>(),
924 &[
925 ("let ", None),
926 ("b", Some(DiagnosticSeverity::ERROR)),
927 (" = 2;", None),
928 ],
929 "Gigitnored buffers should still get in-buffer diagnostics",
930 );
931 });
932 let other_buffer = project
933 .update(cx, |project, cx| {
934 project.open_buffer((other_worktree_id, ""), cx)
935 })
936 .await
937 .unwrap();
938 other_buffer.update(cx, |buffer, _| {
939 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
940 assert_eq!(
941 chunks
942 .iter()
943 .map(|(s, d)| (s.as_str(), *d))
944 .collect::<Vec<_>>(),
945 &[
946 ("let b = ", None),
947 ("c", Some(DiagnosticSeverity::ERROR)),
948 (";", None),
949 ],
950 "Buffers from hidden projects should still get in-buffer diagnostics"
951 );
952 });
953
954 project.update(cx, |project, cx| {
955 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
956 assert_eq!(
957 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
958 vec![(
959 ProjectPath {
960 worktree_id: main_worktree_id,
961 path: Arc::from(Path::new("b.rs")),
962 },
963 server_id,
964 DiagnosticSummary {
965 error_count: 1,
966 warning_count: 0,
967 }
968 )]
969 );
970 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
971 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
972 });
973}
974
975#[gpui::test]
976async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
977 init_test(cx);
978
979 let progress_token = "the-progress-token";
980 let mut language = Language::new(
981 LanguageConfig {
982 name: "Rust".into(),
983 matcher: LanguageMatcher {
984 path_suffixes: vec!["rs".to_string()],
985 ..Default::default()
986 },
987 ..Default::default()
988 },
989 Some(tree_sitter_rust::language()),
990 );
991 let mut fake_servers = language
992 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
993 disk_based_diagnostics_progress_token: Some(progress_token.into()),
994 disk_based_diagnostics_sources: vec!["disk".into()],
995 ..Default::default()
996 }))
997 .await;
998
999 let fs = FakeFs::new(cx.executor());
1000 fs.insert_tree(
1001 "/dir",
1002 json!({
1003 "a.rs": "fn a() { A }",
1004 "b.rs": "const y: i32 = 1",
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1010 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1011 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1012
1013 // Cause worktree to start the fake language server
1014 let _buffer = project
1015 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1016 .await
1017 .unwrap();
1018
1019 let mut events = cx.events(&project);
1020
1021 let fake_server = fake_servers.next().await.unwrap();
1022 assert_eq!(
1023 events.next().await.unwrap(),
1024 Event::LanguageServerAdded(LanguageServerId(0)),
1025 );
1026
1027 fake_server
1028 .start_progress(format!("{}/0", progress_token))
1029 .await;
1030 assert_eq!(
1031 events.next().await.unwrap(),
1032 Event::DiskBasedDiagnosticsStarted {
1033 language_server_id: LanguageServerId(0),
1034 }
1035 );
1036
1037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1038 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1039 version: None,
1040 diagnostics: vec![lsp::Diagnostic {
1041 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1042 severity: Some(lsp::DiagnosticSeverity::ERROR),
1043 message: "undefined variable 'A'".to_string(),
1044 ..Default::default()
1045 }],
1046 });
1047 assert_eq!(
1048 events.next().await.unwrap(),
1049 Event::DiagnosticsUpdated {
1050 language_server_id: LanguageServerId(0),
1051 path: (worktree_id, Path::new("a.rs")).into()
1052 }
1053 );
1054
1055 fake_server.end_progress(format!("{}/0", progress_token));
1056 assert_eq!(
1057 events.next().await.unwrap(),
1058 Event::DiskBasedDiagnosticsFinished {
1059 language_server_id: LanguageServerId(0)
1060 }
1061 );
1062
1063 let buffer = project
1064 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1065 .await
1066 .unwrap();
1067
1068 buffer.update(cx, |buffer, _| {
1069 let snapshot = buffer.snapshot();
1070 let diagnostics = snapshot
1071 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1072 .collect::<Vec<_>>();
1073 assert_eq!(
1074 diagnostics,
1075 &[DiagnosticEntry {
1076 range: Point::new(0, 9)..Point::new(0, 10),
1077 diagnostic: Diagnostic {
1078 severity: lsp::DiagnosticSeverity::ERROR,
1079 message: "undefined variable 'A'".to_string(),
1080 group_id: 0,
1081 is_primary: true,
1082 ..Default::default()
1083 }
1084 }]
1085 )
1086 });
1087
1088 // Ensure publishing empty diagnostics twice only results in one update event.
1089 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1090 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1091 version: None,
1092 diagnostics: Default::default(),
1093 });
1094 assert_eq!(
1095 events.next().await.unwrap(),
1096 Event::DiagnosticsUpdated {
1097 language_server_id: LanguageServerId(0),
1098 path: (worktree_id, Path::new("a.rs")).into()
1099 }
1100 );
1101
1102 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1103 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1104 version: None,
1105 diagnostics: Default::default(),
1106 });
1107 cx.executor().run_until_parked();
1108 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1109}
1110
1111#[gpui::test]
1112async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1113 init_test(cx);
1114
1115 let progress_token = "the-progress-token";
1116 let mut language = Language::new(
1117 LanguageConfig {
1118 matcher: LanguageMatcher {
1119 path_suffixes: vec!["rs".to_string()],
1120 ..Default::default()
1121 },
1122 ..Default::default()
1123 },
1124 None,
1125 );
1126 let mut fake_servers = language
1127 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1128 disk_based_diagnostics_sources: vec!["disk".into()],
1129 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1130 ..Default::default()
1131 }))
1132 .await;
1133
1134 let fs = FakeFs::new(cx.executor());
1135 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1136
1137 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1138 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1139
1140 let buffer = project
1141 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1142 .await
1143 .unwrap();
1144
1145 // Simulate diagnostics starting to update.
1146 let fake_server = fake_servers.next().await.unwrap();
1147 fake_server.start_progress(progress_token).await;
1148
1149 // Restart the server before the diagnostics finish updating.
1150 project.update(cx, |project, cx| {
1151 project.restart_language_servers_for_buffers([buffer], cx);
1152 });
1153 let mut events = cx.events(&project);
1154
1155 // Simulate the newly started server sending more diagnostics.
1156 let fake_server = fake_servers.next().await.unwrap();
1157 assert_eq!(
1158 events.next().await.unwrap(),
1159 Event::LanguageServerAdded(LanguageServerId(1))
1160 );
1161 fake_server.start_progress(progress_token).await;
1162 assert_eq!(
1163 events.next().await.unwrap(),
1164 Event::DiskBasedDiagnosticsStarted {
1165 language_server_id: LanguageServerId(1)
1166 }
1167 );
1168 project.update(cx, |project, _| {
1169 assert_eq!(
1170 project
1171 .language_servers_running_disk_based_diagnostics()
1172 .collect::<Vec<_>>(),
1173 [LanguageServerId(1)]
1174 );
1175 });
1176
1177 // All diagnostics are considered done, despite the old server's diagnostic
1178 // task never completing.
1179 fake_server.end_progress(progress_token);
1180 assert_eq!(
1181 events.next().await.unwrap(),
1182 Event::DiskBasedDiagnosticsFinished {
1183 language_server_id: LanguageServerId(1)
1184 }
1185 );
1186 project.update(cx, |project, _| {
1187 assert_eq!(
1188 project
1189 .language_servers_running_disk_based_diagnostics()
1190 .collect::<Vec<_>>(),
1191 [LanguageServerId(0); 0]
1192 );
1193 });
1194}
1195
1196#[gpui::test]
1197async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let mut language = Language::new(
1201 LanguageConfig {
1202 matcher: LanguageMatcher {
1203 path_suffixes: vec!["rs".to_string()],
1204 ..Default::default()
1205 },
1206 ..Default::default()
1207 },
1208 None,
1209 );
1210 let mut fake_servers = language
1211 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1212 ..Default::default()
1213 }))
1214 .await;
1215
1216 let fs = FakeFs::new(cx.executor());
1217 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1218
1219 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1220 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1221
1222 let buffer = project
1223 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1224 .await
1225 .unwrap();
1226
1227 // Publish diagnostics
1228 let fake_server = fake_servers.next().await.unwrap();
1229 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1230 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1231 version: None,
1232 diagnostics: vec![lsp::Diagnostic {
1233 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1234 severity: Some(lsp::DiagnosticSeverity::ERROR),
1235 message: "the message".to_string(),
1236 ..Default::default()
1237 }],
1238 });
1239
1240 cx.executor().run_until_parked();
1241 buffer.update(cx, |buffer, _| {
1242 assert_eq!(
1243 buffer
1244 .snapshot()
1245 .diagnostics_in_range::<_, usize>(0..1, false)
1246 .map(|entry| entry.diagnostic.message.clone())
1247 .collect::<Vec<_>>(),
1248 ["the message".to_string()]
1249 );
1250 });
1251 project.update(cx, |project, cx| {
1252 assert_eq!(
1253 project.diagnostic_summary(false, cx),
1254 DiagnosticSummary {
1255 error_count: 1,
1256 warning_count: 0,
1257 }
1258 );
1259 });
1260
1261 project.update(cx, |project, cx| {
1262 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1263 });
1264
1265 // The diagnostics are cleared.
1266 cx.executor().run_until_parked();
1267 buffer.update(cx, |buffer, _| {
1268 assert_eq!(
1269 buffer
1270 .snapshot()
1271 .diagnostics_in_range::<_, usize>(0..1, false)
1272 .map(|entry| entry.diagnostic.message.clone())
1273 .collect::<Vec<_>>(),
1274 Vec::<String>::new(),
1275 );
1276 });
1277 project.update(cx, |project, cx| {
1278 assert_eq!(
1279 project.diagnostic_summary(false, cx),
1280 DiagnosticSummary {
1281 error_count: 0,
1282 warning_count: 0,
1283 }
1284 );
1285 });
1286}
1287
1288#[gpui::test]
1289async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1290 init_test(cx);
1291
1292 let mut language = Language::new(
1293 LanguageConfig {
1294 matcher: LanguageMatcher {
1295 path_suffixes: vec!["rs".to_string()],
1296 ..Default::default()
1297 },
1298 ..Default::default()
1299 },
1300 None,
1301 );
1302 let mut fake_servers = language
1303 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1304 name: "the-lsp",
1305 ..Default::default()
1306 }))
1307 .await;
1308
1309 let fs = FakeFs::new(cx.executor());
1310 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1311
1312 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1313 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1314
1315 let buffer = project
1316 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1317 .await
1318 .unwrap();
1319
1320 // Before restarting the server, report diagnostics with an unknown buffer version.
1321 let fake_server = fake_servers.next().await.unwrap();
1322 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1323 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1324 version: Some(10000),
1325 diagnostics: Vec::new(),
1326 });
1327 cx.executor().run_until_parked();
1328
1329 project.update(cx, |project, cx| {
1330 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1331 });
1332 let mut fake_server = fake_servers.next().await.unwrap();
1333 let notification = fake_server
1334 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1335 .await
1336 .text_document;
1337 assert_eq!(notification.version, 0);
1338}
1339
1340#[gpui::test]
1341async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1342 init_test(cx);
1343
1344 let mut rust = Language::new(
1345 LanguageConfig {
1346 name: Arc::from("Rust"),
1347 matcher: LanguageMatcher {
1348 path_suffixes: vec!["rs".to_string()],
1349 ..Default::default()
1350 },
1351 ..Default::default()
1352 },
1353 None,
1354 );
1355 let mut fake_rust_servers = rust
1356 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1357 name: "rust-lsp",
1358 ..Default::default()
1359 }))
1360 .await;
1361 let mut js = Language::new(
1362 LanguageConfig {
1363 name: Arc::from("JavaScript"),
1364 matcher: LanguageMatcher {
1365 path_suffixes: vec!["js".to_string()],
1366 ..Default::default()
1367 },
1368 ..Default::default()
1369 },
1370 None,
1371 );
1372 let mut fake_js_servers = js
1373 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1374 name: "js-lsp",
1375 ..Default::default()
1376 }))
1377 .await;
1378
1379 let fs = FakeFs::new(cx.executor());
1380 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1381 .await;
1382
1383 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1384 project.update(cx, |project, _| {
1385 project.languages.add(Arc::new(rust));
1386 project.languages.add(Arc::new(js));
1387 });
1388
1389 let _rs_buffer = project
1390 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1391 .await
1392 .unwrap();
1393 let _js_buffer = project
1394 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1395 .await
1396 .unwrap();
1397
1398 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1399 assert_eq!(
1400 fake_rust_server_1
1401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1402 .await
1403 .text_document
1404 .uri
1405 .as_str(),
1406 "file:///dir/a.rs"
1407 );
1408
1409 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1410 assert_eq!(
1411 fake_js_server
1412 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1413 .await
1414 .text_document
1415 .uri
1416 .as_str(),
1417 "file:///dir/b.js"
1418 );
1419
1420 // Disable Rust language server, ensuring only that server gets stopped.
1421 cx.update(|cx| {
1422 cx.update_global(|settings: &mut SettingsStore, cx| {
1423 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1424 settings.languages.insert(
1425 Arc::from("Rust"),
1426 LanguageSettingsContent {
1427 enable_language_server: Some(false),
1428 ..Default::default()
1429 },
1430 );
1431 });
1432 })
1433 });
1434 fake_rust_server_1
1435 .receive_notification::<lsp::notification::Exit>()
1436 .await;
1437
1438 // Enable Rust and disable JavaScript language servers, ensuring that the
1439 // former gets started again and that the latter stops.
1440 cx.update(|cx| {
1441 cx.update_global(|settings: &mut SettingsStore, cx| {
1442 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1443 settings.languages.insert(
1444 Arc::from("Rust"),
1445 LanguageSettingsContent {
1446 enable_language_server: Some(true),
1447 ..Default::default()
1448 },
1449 );
1450 settings.languages.insert(
1451 Arc::from("JavaScript"),
1452 LanguageSettingsContent {
1453 enable_language_server: Some(false),
1454 ..Default::default()
1455 },
1456 );
1457 });
1458 })
1459 });
1460 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1461 assert_eq!(
1462 fake_rust_server_2
1463 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1464 .await
1465 .text_document
1466 .uri
1467 .as_str(),
1468 "file:///dir/a.rs"
1469 );
1470 fake_js_server
1471 .receive_notification::<lsp::notification::Exit>()
1472 .await;
1473}
1474
1475#[gpui::test(iterations = 3)]
1476async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1477 init_test(cx);
1478
1479 let mut language = Language::new(
1480 LanguageConfig {
1481 name: "Rust".into(),
1482 matcher: LanguageMatcher {
1483 path_suffixes: vec!["rs".to_string()],
1484 ..Default::default()
1485 },
1486 ..Default::default()
1487 },
1488 Some(tree_sitter_rust::language()),
1489 );
1490 let mut fake_servers = language
1491 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1492 disk_based_diagnostics_sources: vec!["disk".into()],
1493 ..Default::default()
1494 }))
1495 .await;
1496
1497 let text = "
1498 fn a() { A }
1499 fn b() { BB }
1500 fn c() { CCC }
1501 "
1502 .unindent();
1503
1504 let fs = FakeFs::new(cx.executor());
1505 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1506
1507 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1508 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1509
1510 let buffer = project
1511 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1512 .await
1513 .unwrap();
1514
1515 let mut fake_server = fake_servers.next().await.unwrap();
1516 let open_notification = fake_server
1517 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1518 .await;
1519
1520 // Edit the buffer, moving the content down
1521 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1522 let change_notification_1 = fake_server
1523 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1524 .await;
1525 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1526
1527 // Report some diagnostics for the initial version of the buffer
1528 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1529 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1530 version: Some(open_notification.text_document.version),
1531 diagnostics: vec![
1532 lsp::Diagnostic {
1533 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1534 severity: Some(DiagnosticSeverity::ERROR),
1535 message: "undefined variable 'A'".to_string(),
1536 source: Some("disk".to_string()),
1537 ..Default::default()
1538 },
1539 lsp::Diagnostic {
1540 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1541 severity: Some(DiagnosticSeverity::ERROR),
1542 message: "undefined variable 'BB'".to_string(),
1543 source: Some("disk".to_string()),
1544 ..Default::default()
1545 },
1546 lsp::Diagnostic {
1547 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1548 severity: Some(DiagnosticSeverity::ERROR),
1549 source: Some("disk".to_string()),
1550 message: "undefined variable 'CCC'".to_string(),
1551 ..Default::default()
1552 },
1553 ],
1554 });
1555
1556 // The diagnostics have moved down since they were created.
1557 cx.executor().run_until_parked();
1558 buffer.update(cx, |buffer, _| {
1559 assert_eq!(
1560 buffer
1561 .snapshot()
1562 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1563 .collect::<Vec<_>>(),
1564 &[
1565 DiagnosticEntry {
1566 range: Point::new(3, 9)..Point::new(3, 11),
1567 diagnostic: Diagnostic {
1568 source: Some("disk".into()),
1569 severity: DiagnosticSeverity::ERROR,
1570 message: "undefined variable 'BB'".to_string(),
1571 is_disk_based: true,
1572 group_id: 1,
1573 is_primary: true,
1574 ..Default::default()
1575 },
1576 },
1577 DiagnosticEntry {
1578 range: Point::new(4, 9)..Point::new(4, 12),
1579 diagnostic: Diagnostic {
1580 source: Some("disk".into()),
1581 severity: DiagnosticSeverity::ERROR,
1582 message: "undefined variable 'CCC'".to_string(),
1583 is_disk_based: true,
1584 group_id: 2,
1585 is_primary: true,
1586 ..Default::default()
1587 }
1588 }
1589 ]
1590 );
1591 assert_eq!(
1592 chunks_with_diagnostics(buffer, 0..buffer.len()),
1593 [
1594 ("\n\nfn a() { ".to_string(), None),
1595 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1596 (" }\nfn b() { ".to_string(), None),
1597 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1598 (" }\nfn c() { ".to_string(), None),
1599 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1600 (" }\n".to_string(), None),
1601 ]
1602 );
1603 assert_eq!(
1604 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1605 [
1606 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1607 (" }\nfn c() { ".to_string(), None),
1608 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1609 ]
1610 );
1611 });
1612
1613 // Ensure overlapping diagnostics are highlighted correctly.
1614 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1615 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1616 version: Some(open_notification.text_document.version),
1617 diagnostics: vec![
1618 lsp::Diagnostic {
1619 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1620 severity: Some(DiagnosticSeverity::ERROR),
1621 message: "undefined variable 'A'".to_string(),
1622 source: Some("disk".to_string()),
1623 ..Default::default()
1624 },
1625 lsp::Diagnostic {
1626 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1627 severity: Some(DiagnosticSeverity::WARNING),
1628 message: "unreachable statement".to_string(),
1629 source: Some("disk".to_string()),
1630 ..Default::default()
1631 },
1632 ],
1633 });
1634
1635 cx.executor().run_until_parked();
1636 buffer.update(cx, |buffer, _| {
1637 assert_eq!(
1638 buffer
1639 .snapshot()
1640 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1641 .collect::<Vec<_>>(),
1642 &[
1643 DiagnosticEntry {
1644 range: Point::new(2, 9)..Point::new(2, 12),
1645 diagnostic: Diagnostic {
1646 source: Some("disk".into()),
1647 severity: DiagnosticSeverity::WARNING,
1648 message: "unreachable statement".to_string(),
1649 is_disk_based: true,
1650 group_id: 4,
1651 is_primary: true,
1652 ..Default::default()
1653 }
1654 },
1655 DiagnosticEntry {
1656 range: Point::new(2, 9)..Point::new(2, 10),
1657 diagnostic: Diagnostic {
1658 source: Some("disk".into()),
1659 severity: DiagnosticSeverity::ERROR,
1660 message: "undefined variable 'A'".to_string(),
1661 is_disk_based: true,
1662 group_id: 3,
1663 is_primary: true,
1664 ..Default::default()
1665 },
1666 }
1667 ]
1668 );
1669 assert_eq!(
1670 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1671 [
1672 ("fn a() { ".to_string(), None),
1673 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1674 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1675 ("\n".to_string(), None),
1676 ]
1677 );
1678 assert_eq!(
1679 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1680 [
1681 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1682 ("\n".to_string(), None),
1683 ]
1684 );
1685 });
1686
1687 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1688 // changes since the last save.
1689 buffer.update(cx, |buffer, cx| {
1690 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1691 buffer.edit(
1692 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1693 None,
1694 cx,
1695 );
1696 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1697 });
1698 let change_notification_2 = fake_server
1699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1700 .await;
1701 assert!(
1702 change_notification_2.text_document.version > change_notification_1.text_document.version
1703 );
1704
1705 // Handle out-of-order diagnostics
1706 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1707 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1708 version: Some(change_notification_2.text_document.version),
1709 diagnostics: vec![
1710 lsp::Diagnostic {
1711 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1712 severity: Some(DiagnosticSeverity::ERROR),
1713 message: "undefined variable 'BB'".to_string(),
1714 source: Some("disk".to_string()),
1715 ..Default::default()
1716 },
1717 lsp::Diagnostic {
1718 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1719 severity: Some(DiagnosticSeverity::WARNING),
1720 message: "undefined variable 'A'".to_string(),
1721 source: Some("disk".to_string()),
1722 ..Default::default()
1723 },
1724 ],
1725 });
1726
1727 cx.executor().run_until_parked();
1728 buffer.update(cx, |buffer, _| {
1729 assert_eq!(
1730 buffer
1731 .snapshot()
1732 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1733 .collect::<Vec<_>>(),
1734 &[
1735 DiagnosticEntry {
1736 range: Point::new(2, 21)..Point::new(2, 22),
1737 diagnostic: Diagnostic {
1738 source: Some("disk".into()),
1739 severity: DiagnosticSeverity::WARNING,
1740 message: "undefined variable 'A'".to_string(),
1741 is_disk_based: true,
1742 group_id: 6,
1743 is_primary: true,
1744 ..Default::default()
1745 }
1746 },
1747 DiagnosticEntry {
1748 range: Point::new(3, 9)..Point::new(3, 14),
1749 diagnostic: Diagnostic {
1750 source: Some("disk".into()),
1751 severity: DiagnosticSeverity::ERROR,
1752 message: "undefined variable 'BB'".to_string(),
1753 is_disk_based: true,
1754 group_id: 5,
1755 is_primary: true,
1756 ..Default::default()
1757 },
1758 }
1759 ]
1760 );
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let text = concat!(
1769 "let one = ;\n", //
1770 "let two = \n",
1771 "let three = 3;\n",
1772 );
1773
1774 let fs = FakeFs::new(cx.executor());
1775 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1776
1777 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1778 let buffer = project
1779 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1780 .await
1781 .unwrap();
1782
1783 project.update(cx, |project, cx| {
1784 project
1785 .update_buffer_diagnostics(
1786 &buffer,
1787 LanguageServerId(0),
1788 None,
1789 vec![
1790 DiagnosticEntry {
1791 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1792 diagnostic: Diagnostic {
1793 severity: DiagnosticSeverity::ERROR,
1794 message: "syntax error 1".to_string(),
1795 ..Default::default()
1796 },
1797 },
1798 DiagnosticEntry {
1799 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1800 diagnostic: Diagnostic {
1801 severity: DiagnosticSeverity::ERROR,
1802 message: "syntax error 2".to_string(),
1803 ..Default::default()
1804 },
1805 },
1806 ],
1807 cx,
1808 )
1809 .unwrap();
1810 });
1811
1812 // An empty range is extended forward to include the following character.
1813 // At the end of a line, an empty range is extended backward to include
1814 // the preceding character.
1815 buffer.update(cx, |buffer, _| {
1816 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1817 assert_eq!(
1818 chunks
1819 .iter()
1820 .map(|(s, d)| (s.as_str(), *d))
1821 .collect::<Vec<_>>(),
1822 &[
1823 ("let one = ", None),
1824 (";", Some(DiagnosticSeverity::ERROR)),
1825 ("\nlet two =", None),
1826 (" ", Some(DiagnosticSeverity::ERROR)),
1827 ("\nlet three = 3;\n", None)
1828 ]
1829 );
1830 });
1831}
1832
1833#[gpui::test]
1834async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1835 init_test(cx);
1836
1837 let fs = FakeFs::new(cx.executor());
1838 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1839 .await;
1840
1841 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1842
1843 project.update(cx, |project, cx| {
1844 project
1845 .update_diagnostic_entries(
1846 LanguageServerId(0),
1847 Path::new("/dir/a.rs").to_owned(),
1848 None,
1849 vec![DiagnosticEntry {
1850 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1851 diagnostic: Diagnostic {
1852 severity: DiagnosticSeverity::ERROR,
1853 is_primary: true,
1854 message: "syntax error a1".to_string(),
1855 ..Default::default()
1856 },
1857 }],
1858 cx,
1859 )
1860 .unwrap();
1861 project
1862 .update_diagnostic_entries(
1863 LanguageServerId(1),
1864 Path::new("/dir/a.rs").to_owned(),
1865 None,
1866 vec![DiagnosticEntry {
1867 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1868 diagnostic: Diagnostic {
1869 severity: DiagnosticSeverity::ERROR,
1870 is_primary: true,
1871 message: "syntax error b1".to_string(),
1872 ..Default::default()
1873 },
1874 }],
1875 cx,
1876 )
1877 .unwrap();
1878
1879 assert_eq!(
1880 project.diagnostic_summary(false, cx),
1881 DiagnosticSummary {
1882 error_count: 2,
1883 warning_count: 0,
1884 }
1885 );
1886 });
1887}
1888
1889#[gpui::test]
1890async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1891 init_test(cx);
1892
1893 let mut language = Language::new(
1894 LanguageConfig {
1895 name: "Rust".into(),
1896 matcher: LanguageMatcher {
1897 path_suffixes: vec!["rs".to_string()],
1898 ..Default::default()
1899 },
1900 ..Default::default()
1901 },
1902 Some(tree_sitter_rust::language()),
1903 );
1904 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1905
1906 let text = "
1907 fn a() {
1908 f1();
1909 }
1910 fn b() {
1911 f2();
1912 }
1913 fn c() {
1914 f3();
1915 }
1916 "
1917 .unindent();
1918
1919 let fs = FakeFs::new(cx.executor());
1920 fs.insert_tree(
1921 "/dir",
1922 json!({
1923 "a.rs": text.clone(),
1924 }),
1925 )
1926 .await;
1927
1928 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1929 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1930 let buffer = project
1931 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1932 .await
1933 .unwrap();
1934
1935 let mut fake_server = fake_servers.next().await.unwrap();
1936 let lsp_document_version = fake_server
1937 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1938 .await
1939 .text_document
1940 .version;
1941
1942 // Simulate editing the buffer after the language server computes some edits.
1943 buffer.update(cx, |buffer, cx| {
1944 buffer.edit(
1945 [(
1946 Point::new(0, 0)..Point::new(0, 0),
1947 "// above first function\n",
1948 )],
1949 None,
1950 cx,
1951 );
1952 buffer.edit(
1953 [(
1954 Point::new(2, 0)..Point::new(2, 0),
1955 " // inside first function\n",
1956 )],
1957 None,
1958 cx,
1959 );
1960 buffer.edit(
1961 [(
1962 Point::new(6, 4)..Point::new(6, 4),
1963 "// inside second function ",
1964 )],
1965 None,
1966 cx,
1967 );
1968
1969 assert_eq!(
1970 buffer.text(),
1971 "
1972 // above first function
1973 fn a() {
1974 // inside first function
1975 f1();
1976 }
1977 fn b() {
1978 // inside second function f2();
1979 }
1980 fn c() {
1981 f3();
1982 }
1983 "
1984 .unindent()
1985 );
1986 });
1987
1988 let edits = project
1989 .update(cx, |project, cx| {
1990 project.edits_from_lsp(
1991 &buffer,
1992 vec![
1993 // replace body of first function
1994 lsp::TextEdit {
1995 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1996 new_text: "
1997 fn a() {
1998 f10();
1999 }
2000 "
2001 .unindent(),
2002 },
2003 // edit inside second function
2004 lsp::TextEdit {
2005 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2006 new_text: "00".into(),
2007 },
2008 // edit inside third function via two distinct edits
2009 lsp::TextEdit {
2010 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2011 new_text: "4000".into(),
2012 },
2013 lsp::TextEdit {
2014 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2015 new_text: "".into(),
2016 },
2017 ],
2018 LanguageServerId(0),
2019 Some(lsp_document_version),
2020 cx,
2021 )
2022 })
2023 .await
2024 .unwrap();
2025
2026 buffer.update(cx, |buffer, cx| {
2027 for (range, new_text) in edits {
2028 buffer.edit([(range, new_text)], None, cx);
2029 }
2030 assert_eq!(
2031 buffer.text(),
2032 "
2033 // above first function
2034 fn a() {
2035 // inside first function
2036 f10();
2037 }
2038 fn b() {
2039 // inside second function f200();
2040 }
2041 fn c() {
2042 f4000();
2043 }
2044 "
2045 .unindent()
2046 );
2047 });
2048}
2049
2050#[gpui::test]
2051async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2052 init_test(cx);
2053
2054 let text = "
2055 use a::b;
2056 use a::c;
2057
2058 fn f() {
2059 b();
2060 c();
2061 }
2062 "
2063 .unindent();
2064
2065 let fs = FakeFs::new(cx.executor());
2066 fs.insert_tree(
2067 "/dir",
2068 json!({
2069 "a.rs": text.clone(),
2070 }),
2071 )
2072 .await;
2073
2074 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2075 let buffer = project
2076 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2077 .await
2078 .unwrap();
2079
2080 // Simulate the language server sending us a small edit in the form of a very large diff.
2081 // Rust-analyzer does this when performing a merge-imports code action.
2082 let edits = project
2083 .update(cx, |project, cx| {
2084 project.edits_from_lsp(
2085 &buffer,
2086 [
2087 // Replace the first use statement without editing the semicolon.
2088 lsp::TextEdit {
2089 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2090 new_text: "a::{b, c}".into(),
2091 },
2092 // Reinsert the remainder of the file between the semicolon and the final
2093 // newline of the file.
2094 lsp::TextEdit {
2095 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2096 new_text: "\n\n".into(),
2097 },
2098 lsp::TextEdit {
2099 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2100 new_text: "
2101 fn f() {
2102 b();
2103 c();
2104 }"
2105 .unindent(),
2106 },
2107 // Delete everything after the first newline of the file.
2108 lsp::TextEdit {
2109 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2110 new_text: "".into(),
2111 },
2112 ],
2113 LanguageServerId(0),
2114 None,
2115 cx,
2116 )
2117 })
2118 .await
2119 .unwrap();
2120
2121 buffer.update(cx, |buffer, cx| {
2122 let edits = edits
2123 .into_iter()
2124 .map(|(range, text)| {
2125 (
2126 range.start.to_point(buffer)..range.end.to_point(buffer),
2127 text,
2128 )
2129 })
2130 .collect::<Vec<_>>();
2131
2132 assert_eq!(
2133 edits,
2134 [
2135 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2136 (Point::new(1, 0)..Point::new(2, 0), "".into())
2137 ]
2138 );
2139
2140 for (range, new_text) in edits {
2141 buffer.edit([(range, new_text)], None, cx);
2142 }
2143 assert_eq!(
2144 buffer.text(),
2145 "
2146 use a::{b, c};
2147
2148 fn f() {
2149 b();
2150 c();
2151 }
2152 "
2153 .unindent()
2154 );
2155 });
2156}
2157
2158#[gpui::test]
2159async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2160 init_test(cx);
2161
2162 let text = "
2163 use a::b;
2164 use a::c;
2165
2166 fn f() {
2167 b();
2168 c();
2169 }
2170 "
2171 .unindent();
2172
2173 let fs = FakeFs::new(cx.executor());
2174 fs.insert_tree(
2175 "/dir",
2176 json!({
2177 "a.rs": text.clone(),
2178 }),
2179 )
2180 .await;
2181
2182 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2183 let buffer = project
2184 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2185 .await
2186 .unwrap();
2187
2188 // Simulate the language server sending us edits in a non-ordered fashion,
2189 // with ranges sometimes being inverted or pointing to invalid locations.
2190 let edits = project
2191 .update(cx, |project, cx| {
2192 project.edits_from_lsp(
2193 &buffer,
2194 [
2195 lsp::TextEdit {
2196 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2197 new_text: "\n\n".into(),
2198 },
2199 lsp::TextEdit {
2200 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2201 new_text: "a::{b, c}".into(),
2202 },
2203 lsp::TextEdit {
2204 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2205 new_text: "".into(),
2206 },
2207 lsp::TextEdit {
2208 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2209 new_text: "
2210 fn f() {
2211 b();
2212 c();
2213 }"
2214 .unindent(),
2215 },
2216 ],
2217 LanguageServerId(0),
2218 None,
2219 cx,
2220 )
2221 })
2222 .await
2223 .unwrap();
2224
2225 buffer.update(cx, |buffer, cx| {
2226 let edits = edits
2227 .into_iter()
2228 .map(|(range, text)| {
2229 (
2230 range.start.to_point(buffer)..range.end.to_point(buffer),
2231 text,
2232 )
2233 })
2234 .collect::<Vec<_>>();
2235
2236 assert_eq!(
2237 edits,
2238 [
2239 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2240 (Point::new(1, 0)..Point::new(2, 0), "".into())
2241 ]
2242 );
2243
2244 for (range, new_text) in edits {
2245 buffer.edit([(range, new_text)], None, cx);
2246 }
2247 assert_eq!(
2248 buffer.text(),
2249 "
2250 use a::{b, c};
2251
2252 fn f() {
2253 b();
2254 c();
2255 }
2256 "
2257 .unindent()
2258 );
2259 });
2260}
2261
2262fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2263 buffer: &Buffer,
2264 range: Range<T>,
2265) -> Vec<(String, Option<DiagnosticSeverity>)> {
2266 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2267 for chunk in buffer.snapshot().chunks(range, true) {
2268 if chunks.last().map_or(false, |prev_chunk| {
2269 prev_chunk.1 == chunk.diagnostic_severity
2270 }) {
2271 chunks.last_mut().unwrap().0.push_str(chunk.text);
2272 } else {
2273 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2274 }
2275 }
2276 chunks
2277}
2278
2279#[gpui::test(iterations = 10)]
2280async fn test_definition(cx: &mut gpui::TestAppContext) {
2281 init_test(cx);
2282
2283 let mut language = Language::new(
2284 LanguageConfig {
2285 name: "Rust".into(),
2286 matcher: LanguageMatcher {
2287 path_suffixes: vec!["rs".to_string()],
2288 ..Default::default()
2289 },
2290 ..Default::default()
2291 },
2292 Some(tree_sitter_rust::language()),
2293 );
2294 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2295
2296 let fs = FakeFs::new(cx.executor());
2297 fs.insert_tree(
2298 "/dir",
2299 json!({
2300 "a.rs": "const fn a() { A }",
2301 "b.rs": "const y: i32 = crate::a()",
2302 }),
2303 )
2304 .await;
2305
2306 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2307 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2308
2309 let buffer = project
2310 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2311 .await
2312 .unwrap();
2313
2314 let fake_server = fake_servers.next().await.unwrap();
2315 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2316 let params = params.text_document_position_params;
2317 assert_eq!(
2318 params.text_document.uri.to_file_path().unwrap(),
2319 Path::new("/dir/b.rs"),
2320 );
2321 assert_eq!(params.position, lsp::Position::new(0, 22));
2322
2323 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2324 lsp::Location::new(
2325 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2326 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2327 ),
2328 )))
2329 });
2330
2331 let mut definitions = project
2332 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2333 .await
2334 .unwrap();
2335
2336 // Assert no new language server started
2337 cx.executor().run_until_parked();
2338 assert!(fake_servers.try_next().is_err());
2339
2340 assert_eq!(definitions.len(), 1);
2341 let definition = definitions.pop().unwrap();
2342 cx.update(|cx| {
2343 let target_buffer = definition.target.buffer.read(cx);
2344 assert_eq!(
2345 target_buffer
2346 .file()
2347 .unwrap()
2348 .as_local()
2349 .unwrap()
2350 .abs_path(cx),
2351 Path::new("/dir/a.rs"),
2352 );
2353 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2354 assert_eq!(
2355 list_worktrees(&project, cx),
2356 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2357 );
2358
2359 drop(definition);
2360 });
2361 cx.update(|cx| {
2362 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2363 });
2364
2365 fn list_worktrees<'a>(
2366 project: &'a Model<Project>,
2367 cx: &'a AppContext,
2368 ) -> Vec<(&'a Path, bool)> {
2369 project
2370 .read(cx)
2371 .worktrees()
2372 .map(|worktree| {
2373 let worktree = worktree.read(cx);
2374 (
2375 worktree.as_local().unwrap().abs_path().as_ref(),
2376 worktree.is_visible(),
2377 )
2378 })
2379 .collect::<Vec<_>>()
2380 }
2381}
2382
2383#[gpui::test]
2384async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2385 init_test(cx);
2386
2387 let mut language = Language::new(
2388 LanguageConfig {
2389 name: "TypeScript".into(),
2390 matcher: LanguageMatcher {
2391 path_suffixes: vec!["ts".to_string()],
2392 ..Default::default()
2393 },
2394 ..Default::default()
2395 },
2396 Some(tree_sitter_typescript::language_typescript()),
2397 );
2398 let mut fake_language_servers = language
2399 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2400 capabilities: lsp::ServerCapabilities {
2401 completion_provider: Some(lsp::CompletionOptions {
2402 trigger_characters: Some(vec![":".to_string()]),
2403 ..Default::default()
2404 }),
2405 ..Default::default()
2406 },
2407 ..Default::default()
2408 }))
2409 .await;
2410
2411 let fs = FakeFs::new(cx.executor());
2412 fs.insert_tree(
2413 "/dir",
2414 json!({
2415 "a.ts": "",
2416 }),
2417 )
2418 .await;
2419
2420 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2421 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2422 let buffer = project
2423 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2424 .await
2425 .unwrap();
2426
2427 let fake_server = fake_language_servers.next().await.unwrap();
2428
2429 let text = "let a = b.fqn";
2430 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2431 let completions = project.update(cx, |project, cx| {
2432 project.completions(&buffer, text.len(), cx)
2433 });
2434
2435 fake_server
2436 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2437 Ok(Some(lsp::CompletionResponse::Array(vec![
2438 lsp::CompletionItem {
2439 label: "fullyQualifiedName?".into(),
2440 insert_text: Some("fullyQualifiedName".into()),
2441 ..Default::default()
2442 },
2443 ])))
2444 })
2445 .next()
2446 .await;
2447 let completions = completions.await.unwrap();
2448 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2449 assert_eq!(completions.len(), 1);
2450 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2451 assert_eq!(
2452 completions[0].old_range.to_offset(&snapshot),
2453 text.len() - 3..text.len()
2454 );
2455
2456 let text = "let a = \"atoms/cmp\"";
2457 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2458 let completions = project.update(cx, |project, cx| {
2459 project.completions(&buffer, text.len() - 1, cx)
2460 });
2461
2462 fake_server
2463 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2464 Ok(Some(lsp::CompletionResponse::Array(vec![
2465 lsp::CompletionItem {
2466 label: "component".into(),
2467 ..Default::default()
2468 },
2469 ])))
2470 })
2471 .next()
2472 .await;
2473 let completions = completions.await.unwrap();
2474 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2475 assert_eq!(completions.len(), 1);
2476 assert_eq!(completions[0].new_text, "component");
2477 assert_eq!(
2478 completions[0].old_range.to_offset(&snapshot),
2479 text.len() - 4..text.len() - 1
2480 );
2481}
2482
2483#[gpui::test]
2484async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2485 init_test(cx);
2486
2487 let mut language = Language::new(
2488 LanguageConfig {
2489 name: "TypeScript".into(),
2490 matcher: LanguageMatcher {
2491 path_suffixes: vec!["ts".to_string()],
2492 ..Default::default()
2493 },
2494 ..Default::default()
2495 },
2496 Some(tree_sitter_typescript::language_typescript()),
2497 );
2498 let mut fake_language_servers = language
2499 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2500 capabilities: lsp::ServerCapabilities {
2501 completion_provider: Some(lsp::CompletionOptions {
2502 trigger_characters: Some(vec![":".to_string()]),
2503 ..Default::default()
2504 }),
2505 ..Default::default()
2506 },
2507 ..Default::default()
2508 }))
2509 .await;
2510
2511 let fs = FakeFs::new(cx.executor());
2512 fs.insert_tree(
2513 "/dir",
2514 json!({
2515 "a.ts": "",
2516 }),
2517 )
2518 .await;
2519
2520 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2521 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2522 let buffer = project
2523 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2524 .await
2525 .unwrap();
2526
2527 let fake_server = fake_language_servers.next().await.unwrap();
2528
2529 let text = "let a = b.fqn";
2530 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2531 let completions = project.update(cx, |project, cx| {
2532 project.completions(&buffer, text.len(), cx)
2533 });
2534
2535 fake_server
2536 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2537 Ok(Some(lsp::CompletionResponse::Array(vec![
2538 lsp::CompletionItem {
2539 label: "fullyQualifiedName?".into(),
2540 insert_text: Some("fully\rQualified\r\nName".into()),
2541 ..Default::default()
2542 },
2543 ])))
2544 })
2545 .next()
2546 .await;
2547 let completions = completions.await.unwrap();
2548 assert_eq!(completions.len(), 1);
2549 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2550}
2551
2552#[gpui::test(iterations = 10)]
2553async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2554 init_test(cx);
2555
2556 let mut language = Language::new(
2557 LanguageConfig {
2558 name: "TypeScript".into(),
2559 matcher: LanguageMatcher {
2560 path_suffixes: vec!["ts".to_string()],
2561 ..Default::default()
2562 },
2563 ..Default::default()
2564 },
2565 None,
2566 );
2567 let mut fake_language_servers = language
2568 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2569 capabilities: lsp::ServerCapabilities {
2570 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2571 lsp::CodeActionOptions {
2572 resolve_provider: Some(true),
2573 ..lsp::CodeActionOptions::default()
2574 },
2575 )),
2576 ..lsp::ServerCapabilities::default()
2577 },
2578 ..FakeLspAdapter::default()
2579 }))
2580 .await;
2581
2582 let fs = FakeFs::new(cx.executor());
2583 fs.insert_tree(
2584 "/dir",
2585 json!({
2586 "a.ts": "a",
2587 }),
2588 )
2589 .await;
2590
2591 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2592 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2593 let buffer = project
2594 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2595 .await
2596 .unwrap();
2597
2598 let fake_server = fake_language_servers.next().await.unwrap();
2599
2600 // Language server returns code actions that contain commands, and not edits.
2601 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2602 fake_server
2603 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2604 Ok(Some(vec![
2605 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2606 title: "The code action".into(),
2607 data: Some(serde_json::json!({
2608 "command": "_the/command",
2609 })),
2610 ..lsp::CodeAction::default()
2611 }),
2612 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2613 title: "two".into(),
2614 ..lsp::CodeAction::default()
2615 }),
2616 ]))
2617 })
2618 .next()
2619 .await;
2620
2621 let action = actions.await.unwrap()[0].clone();
2622 let apply = project.update(cx, |project, cx| {
2623 project.apply_code_action(buffer.clone(), action, true, cx)
2624 });
2625
2626 // Resolving the code action does not populate its edits. In absence of
2627 // edits, we must execute the given command.
2628 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2629 |mut action, _| async move {
2630 if action.data.is_some() {
2631 action.command = Some(lsp::Command {
2632 title: "The command".into(),
2633 command: "_the/command".into(),
2634 arguments: Some(vec![json!("the-argument")]),
2635 });
2636 }
2637 Ok(action)
2638 },
2639 );
2640
2641 // While executing the command, the language server sends the editor
2642 // a `workspaceEdit` request.
2643 fake_server
2644 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2645 let fake = fake_server.clone();
2646 move |params, _| {
2647 assert_eq!(params.command, "_the/command");
2648 let fake = fake.clone();
2649 async move {
2650 fake.server
2651 .request::<lsp::request::ApplyWorkspaceEdit>(
2652 lsp::ApplyWorkspaceEditParams {
2653 label: None,
2654 edit: lsp::WorkspaceEdit {
2655 changes: Some(
2656 [(
2657 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2658 vec![lsp::TextEdit {
2659 range: lsp::Range::new(
2660 lsp::Position::new(0, 0),
2661 lsp::Position::new(0, 0),
2662 ),
2663 new_text: "X".into(),
2664 }],
2665 )]
2666 .into_iter()
2667 .collect(),
2668 ),
2669 ..Default::default()
2670 },
2671 },
2672 )
2673 .await
2674 .unwrap();
2675 Ok(Some(json!(null)))
2676 }
2677 }
2678 })
2679 .next()
2680 .await;
2681
2682 // Applying the code action returns a project transaction containing the edits
2683 // sent by the language server in its `workspaceEdit` request.
2684 let transaction = apply.await.unwrap();
2685 assert!(transaction.0.contains_key(&buffer));
2686 buffer.update(cx, |buffer, cx| {
2687 assert_eq!(buffer.text(), "Xa");
2688 buffer.undo(cx);
2689 assert_eq!(buffer.text(), "a");
2690 });
2691}
2692
2693#[gpui::test(iterations = 10)]
2694async fn test_save_file(cx: &mut gpui::TestAppContext) {
2695 init_test(cx);
2696
2697 let fs = FakeFs::new(cx.executor());
2698 fs.insert_tree(
2699 "/dir",
2700 json!({
2701 "file1": "the old contents",
2702 }),
2703 )
2704 .await;
2705
2706 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2707 let buffer = project
2708 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2709 .await
2710 .unwrap();
2711 buffer.update(cx, |buffer, cx| {
2712 assert_eq!(buffer.text(), "the old contents");
2713 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2714 });
2715
2716 project
2717 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2718 .await
2719 .unwrap();
2720
2721 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2722 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2723}
2724
2725#[gpui::test(iterations = 30)]
2726async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2727 init_test(cx);
2728
2729 let fs = FakeFs::new(cx.executor().clone());
2730 fs.insert_tree(
2731 "/dir",
2732 json!({
2733 "file1": "the original contents",
2734 }),
2735 )
2736 .await;
2737
2738 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2739 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2740 let buffer = project
2741 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2742 .await
2743 .unwrap();
2744
2745 // Simulate buffer diffs being slow, so that they don't complete before
2746 // the next file change occurs.
2747 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2748
2749 // Change the buffer's file on disk, and then wait for the file change
2750 // to be detected by the worktree, so that the buffer starts reloading.
2751 fs.save(
2752 "/dir/file1".as_ref(),
2753 &"the first contents".into(),
2754 Default::default(),
2755 )
2756 .await
2757 .unwrap();
2758 worktree.next_event(cx);
2759
2760 // Change the buffer's file again. Depending on the random seed, the
2761 // previous file change may still be in progress.
2762 fs.save(
2763 "/dir/file1".as_ref(),
2764 &"the second contents".into(),
2765 Default::default(),
2766 )
2767 .await
2768 .unwrap();
2769 worktree.next_event(cx);
2770
2771 cx.executor().run_until_parked();
2772 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2773 buffer.read_with(cx, |buffer, _| {
2774 assert_eq!(buffer.text(), on_disk_text);
2775 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2776 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2777 });
2778}
2779
2780#[gpui::test(iterations = 30)]
2781async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2782 init_test(cx);
2783
2784 let fs = FakeFs::new(cx.executor().clone());
2785 fs.insert_tree(
2786 "/dir",
2787 json!({
2788 "file1": "the original contents",
2789 }),
2790 )
2791 .await;
2792
2793 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2794 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2795 let buffer = project
2796 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2797 .await
2798 .unwrap();
2799
2800 // Simulate buffer diffs being slow, so that they don't complete before
2801 // the next file change occurs.
2802 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2803
2804 // Change the buffer's file on disk, and then wait for the file change
2805 // to be detected by the worktree, so that the buffer starts reloading.
2806 fs.save(
2807 "/dir/file1".as_ref(),
2808 &"the first contents".into(),
2809 Default::default(),
2810 )
2811 .await
2812 .unwrap();
2813 worktree.next_event(cx);
2814
2815 cx.executor()
2816 .spawn(cx.executor().simulate_random_delay())
2817 .await;
2818
2819 // Perform a noop edit, causing the buffer's version to increase.
2820 buffer.update(cx, |buffer, cx| {
2821 buffer.edit([(0..0, " ")], None, cx);
2822 buffer.undo(cx);
2823 });
2824
2825 cx.executor().run_until_parked();
2826 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2827 buffer.read_with(cx, |buffer, _| {
2828 let buffer_text = buffer.text();
2829 if buffer_text == on_disk_text {
2830 assert!(
2831 !buffer.is_dirty() && !buffer.has_conflict(),
2832 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2833 );
2834 }
2835 // If the file change occurred while the buffer was processing the first
2836 // change, the buffer will be in a conflicting state.
2837 else {
2838 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2839 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2840 }
2841 });
2842}
2843
2844#[gpui::test]
2845async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2846 init_test(cx);
2847
2848 let fs = FakeFs::new(cx.executor());
2849 fs.insert_tree(
2850 "/dir",
2851 json!({
2852 "file1": "the old contents",
2853 }),
2854 )
2855 .await;
2856
2857 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2858 let buffer = project
2859 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2860 .await
2861 .unwrap();
2862 buffer.update(cx, |buffer, cx| {
2863 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2864 });
2865
2866 project
2867 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2868 .await
2869 .unwrap();
2870
2871 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2872 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2873}
2874
2875#[gpui::test]
2876async fn test_save_as(cx: &mut gpui::TestAppContext) {
2877 init_test(cx);
2878
2879 let fs = FakeFs::new(cx.executor());
2880 fs.insert_tree("/dir", json!({})).await;
2881
2882 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2883
2884 let languages = project.update(cx, |project, _| project.languages().clone());
2885 languages.register_native_grammars([("rust", tree_sitter_rust::language())]);
2886 languages.register_test_language(LanguageConfig {
2887 name: "Rust".into(),
2888 grammar: Some("rust".into()),
2889 matcher: LanguageMatcher {
2890 path_suffixes: vec!["rs".into()],
2891 ..Default::default()
2892 },
2893 ..Default::default()
2894 });
2895
2896 let buffer = project.update(cx, |project, cx| {
2897 project.create_buffer("", None, cx).unwrap()
2898 });
2899 buffer.update(cx, |buffer, cx| {
2900 buffer.edit([(0..0, "abc")], None, cx);
2901 assert!(buffer.is_dirty());
2902 assert!(!buffer.has_conflict());
2903 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2904 });
2905 project
2906 .update(cx, |project, cx| {
2907 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2908 })
2909 .await
2910 .unwrap();
2911 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2912
2913 cx.executor().run_until_parked();
2914 buffer.update(cx, |buffer, cx| {
2915 assert_eq!(
2916 buffer.file().unwrap().full_path(cx),
2917 Path::new("dir/file1.rs")
2918 );
2919 assert!(!buffer.is_dirty());
2920 assert!(!buffer.has_conflict());
2921 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2922 });
2923
2924 let opened_buffer = project
2925 .update(cx, |project, cx| {
2926 project.open_local_buffer("/dir/file1.rs", cx)
2927 })
2928 .await
2929 .unwrap();
2930 assert_eq!(opened_buffer, buffer);
2931}
2932
2933#[gpui::test(retries = 5)]
2934async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2935 init_test(cx);
2936 cx.executor().allow_parking();
2937
2938 let dir = temp_tree(json!({
2939 "a": {
2940 "file1": "",
2941 "file2": "",
2942 "file3": "",
2943 },
2944 "b": {
2945 "c": {
2946 "file4": "",
2947 "file5": "",
2948 }
2949 }
2950 }));
2951
2952 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2953 let rpc = project.update(cx, |p, _| p.client.clone());
2954
2955 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2956 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2957 async move { buffer.await.unwrap() }
2958 };
2959 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2960 project.update(cx, |project, cx| {
2961 let tree = project.worktrees().next().unwrap();
2962 tree.read(cx)
2963 .entry_for_path(path)
2964 .unwrap_or_else(|| panic!("no entry for path {}", path))
2965 .id
2966 })
2967 };
2968
2969 let buffer2 = buffer_for_path("a/file2", cx).await;
2970 let buffer3 = buffer_for_path("a/file3", cx).await;
2971 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2972 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2973
2974 let file2_id = id_for_path("a/file2", cx);
2975 let file3_id = id_for_path("a/file3", cx);
2976 let file4_id = id_for_path("b/c/file4", cx);
2977
2978 // Create a remote copy of this worktree.
2979 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2980
2981 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2982
2983 let updates = Arc::new(Mutex::new(Vec::new()));
2984 tree.update(cx, |tree, cx| {
2985 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2986 let updates = updates.clone();
2987 move |update| {
2988 updates.lock().push(update);
2989 async { true }
2990 }
2991 });
2992 });
2993
2994 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2995
2996 cx.executor().run_until_parked();
2997
2998 cx.update(|cx| {
2999 assert!(!buffer2.read(cx).is_dirty());
3000 assert!(!buffer3.read(cx).is_dirty());
3001 assert!(!buffer4.read(cx).is_dirty());
3002 assert!(!buffer5.read(cx).is_dirty());
3003 });
3004
3005 // Rename and delete files and directories.
3006 tree.flush_fs_events(cx).await;
3007 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3008 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3009 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3010 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3011 tree.flush_fs_events(cx).await;
3012
3013 let expected_paths = vec![
3014 "a",
3015 "a/file1",
3016 "a/file2.new",
3017 "b",
3018 "d",
3019 "d/file3",
3020 "d/file4",
3021 ];
3022
3023 cx.update(|app| {
3024 assert_eq!(
3025 tree.read(app)
3026 .paths()
3027 .map(|p| p.to_str().unwrap())
3028 .collect::<Vec<_>>(),
3029 expected_paths
3030 );
3031 });
3032
3033 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3034 assert_eq!(id_for_path("d/file3", cx), file3_id);
3035 assert_eq!(id_for_path("d/file4", cx), file4_id);
3036
3037 cx.update(|cx| {
3038 assert_eq!(
3039 buffer2.read(cx).file().unwrap().path().as_ref(),
3040 Path::new("a/file2.new")
3041 );
3042 assert_eq!(
3043 buffer3.read(cx).file().unwrap().path().as_ref(),
3044 Path::new("d/file3")
3045 );
3046 assert_eq!(
3047 buffer4.read(cx).file().unwrap().path().as_ref(),
3048 Path::new("d/file4")
3049 );
3050 assert_eq!(
3051 buffer5.read(cx).file().unwrap().path().as_ref(),
3052 Path::new("b/c/file5")
3053 );
3054
3055 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3056 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3057 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3058 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3059 });
3060
3061 // Update the remote worktree. Check that it becomes consistent with the
3062 // local worktree.
3063 cx.executor().run_until_parked();
3064
3065 remote.update(cx, |remote, _| {
3066 for update in updates.lock().drain(..) {
3067 remote.as_remote_mut().unwrap().update_from_remote(update);
3068 }
3069 });
3070 cx.executor().run_until_parked();
3071 remote.update(cx, |remote, _| {
3072 assert_eq!(
3073 remote
3074 .paths()
3075 .map(|p| p.to_str().unwrap())
3076 .collect::<Vec<_>>(),
3077 expected_paths
3078 );
3079 });
3080}
3081
3082#[gpui::test(iterations = 10)]
3083async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3084 init_test(cx);
3085
3086 let fs = FakeFs::new(cx.executor());
3087 fs.insert_tree(
3088 "/dir",
3089 json!({
3090 "a": {
3091 "file1": "",
3092 }
3093 }),
3094 )
3095 .await;
3096
3097 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3098 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3099 let tree_id = tree.update(cx, |tree, _| tree.id());
3100
3101 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3102 project.update(cx, |project, cx| {
3103 let tree = project.worktrees().next().unwrap();
3104 tree.read(cx)
3105 .entry_for_path(path)
3106 .unwrap_or_else(|| panic!("no entry for path {}", path))
3107 .id
3108 })
3109 };
3110
3111 let dir_id = id_for_path("a", cx);
3112 let file_id = id_for_path("a/file1", cx);
3113 let buffer = project
3114 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3115 .await
3116 .unwrap();
3117 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3118
3119 project
3120 .update(cx, |project, cx| {
3121 project.rename_entry(dir_id, Path::new("b"), cx)
3122 })
3123 .unwrap()
3124 .await
3125 .unwrap();
3126 cx.executor().run_until_parked();
3127
3128 assert_eq!(id_for_path("b", cx), dir_id);
3129 assert_eq!(id_for_path("b/file1", cx), file_id);
3130 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3131}
3132
3133#[gpui::test]
3134async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3135 init_test(cx);
3136
3137 let fs = FakeFs::new(cx.executor());
3138 fs.insert_tree(
3139 "/dir",
3140 json!({
3141 "a.txt": "a-contents",
3142 "b.txt": "b-contents",
3143 }),
3144 )
3145 .await;
3146
3147 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3148
3149 // Spawn multiple tasks to open paths, repeating some paths.
3150 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3151 (
3152 p.open_local_buffer("/dir/a.txt", cx),
3153 p.open_local_buffer("/dir/b.txt", cx),
3154 p.open_local_buffer("/dir/a.txt", cx),
3155 )
3156 });
3157
3158 let buffer_a_1 = buffer_a_1.await.unwrap();
3159 let buffer_a_2 = buffer_a_2.await.unwrap();
3160 let buffer_b = buffer_b.await.unwrap();
3161 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3162 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3163
3164 // There is only one buffer per path.
3165 let buffer_a_id = buffer_a_1.entity_id();
3166 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3167
3168 // Open the same path again while it is still open.
3169 drop(buffer_a_1);
3170 let buffer_a_3 = project
3171 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3172 .await
3173 .unwrap();
3174
3175 // There's still only one buffer per path.
3176 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3177}
3178
3179#[gpui::test]
3180async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3181 init_test(cx);
3182
3183 let fs = FakeFs::new(cx.executor());
3184 fs.insert_tree(
3185 "/dir",
3186 json!({
3187 "file1": "abc",
3188 "file2": "def",
3189 "file3": "ghi",
3190 }),
3191 )
3192 .await;
3193
3194 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3195
3196 let buffer1 = project
3197 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3198 .await
3199 .unwrap();
3200 let events = Arc::new(Mutex::new(Vec::new()));
3201
3202 // initially, the buffer isn't dirty.
3203 buffer1.update(cx, |buffer, cx| {
3204 cx.subscribe(&buffer1, {
3205 let events = events.clone();
3206 move |_, _, event, _| match event {
3207 BufferEvent::Operation(_) => {}
3208 _ => events.lock().push(event.clone()),
3209 }
3210 })
3211 .detach();
3212
3213 assert!(!buffer.is_dirty());
3214 assert!(events.lock().is_empty());
3215
3216 buffer.edit([(1..2, "")], None, cx);
3217 });
3218
3219 // after the first edit, the buffer is dirty, and emits a dirtied event.
3220 buffer1.update(cx, |buffer, cx| {
3221 assert!(buffer.text() == "ac");
3222 assert!(buffer.is_dirty());
3223 assert_eq!(
3224 *events.lock(),
3225 &[language::Event::Edited, language::Event::DirtyChanged]
3226 );
3227 events.lock().clear();
3228 buffer.did_save(
3229 buffer.version(),
3230 buffer.as_rope().fingerprint(),
3231 buffer.file().unwrap().mtime(),
3232 cx,
3233 );
3234 });
3235
3236 // after saving, the buffer is not dirty, and emits a saved event.
3237 buffer1.update(cx, |buffer, cx| {
3238 assert!(!buffer.is_dirty());
3239 assert_eq!(*events.lock(), &[language::Event::Saved]);
3240 events.lock().clear();
3241
3242 buffer.edit([(1..1, "B")], None, cx);
3243 buffer.edit([(2..2, "D")], None, cx);
3244 });
3245
3246 // after editing again, the buffer is dirty, and emits another dirty event.
3247 buffer1.update(cx, |buffer, cx| {
3248 assert!(buffer.text() == "aBDc");
3249 assert!(buffer.is_dirty());
3250 assert_eq!(
3251 *events.lock(),
3252 &[
3253 language::Event::Edited,
3254 language::Event::DirtyChanged,
3255 language::Event::Edited,
3256 ],
3257 );
3258 events.lock().clear();
3259
3260 // After restoring the buffer to its previously-saved state,
3261 // the buffer is not considered dirty anymore.
3262 buffer.edit([(1..3, "")], None, cx);
3263 assert!(buffer.text() == "ac");
3264 assert!(!buffer.is_dirty());
3265 });
3266
3267 assert_eq!(
3268 *events.lock(),
3269 &[language::Event::Edited, language::Event::DirtyChanged]
3270 );
3271
3272 // When a file is deleted, the buffer is considered dirty.
3273 let events = Arc::new(Mutex::new(Vec::new()));
3274 let buffer2 = project
3275 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3276 .await
3277 .unwrap();
3278 buffer2.update(cx, |_, cx| {
3279 cx.subscribe(&buffer2, {
3280 let events = events.clone();
3281 move |_, _, event, _| events.lock().push(event.clone())
3282 })
3283 .detach();
3284 });
3285
3286 fs.remove_file("/dir/file2".as_ref(), Default::default())
3287 .await
3288 .unwrap();
3289 cx.executor().run_until_parked();
3290 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3291 assert_eq!(
3292 *events.lock(),
3293 &[
3294 language::Event::DirtyChanged,
3295 language::Event::FileHandleChanged
3296 ]
3297 );
3298
3299 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3300 let events = Arc::new(Mutex::new(Vec::new()));
3301 let buffer3 = project
3302 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3303 .await
3304 .unwrap();
3305 buffer3.update(cx, |_, cx| {
3306 cx.subscribe(&buffer3, {
3307 let events = events.clone();
3308 move |_, _, event, _| events.lock().push(event.clone())
3309 })
3310 .detach();
3311 });
3312
3313 buffer3.update(cx, |buffer, cx| {
3314 buffer.edit([(0..0, "x")], None, cx);
3315 });
3316 events.lock().clear();
3317 fs.remove_file("/dir/file3".as_ref(), Default::default())
3318 .await
3319 .unwrap();
3320 cx.executor().run_until_parked();
3321 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3322 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3323}
3324
3325#[gpui::test]
3326async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3327 init_test(cx);
3328
3329 let initial_contents = "aaa\nbbbbb\nc\n";
3330 let fs = FakeFs::new(cx.executor());
3331 fs.insert_tree(
3332 "/dir",
3333 json!({
3334 "the-file": initial_contents,
3335 }),
3336 )
3337 .await;
3338 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3339 let buffer = project
3340 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3341 .await
3342 .unwrap();
3343
3344 let anchors = (0..3)
3345 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3346 .collect::<Vec<_>>();
3347
3348 // Change the file on disk, adding two new lines of text, and removing
3349 // one line.
3350 buffer.update(cx, |buffer, _| {
3351 assert!(!buffer.is_dirty());
3352 assert!(!buffer.has_conflict());
3353 });
3354 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3355 fs.save(
3356 "/dir/the-file".as_ref(),
3357 &new_contents.into(),
3358 LineEnding::Unix,
3359 )
3360 .await
3361 .unwrap();
3362
3363 // Because the buffer was not modified, it is reloaded from disk. Its
3364 // contents are edited according to the diff between the old and new
3365 // file contents.
3366 cx.executor().run_until_parked();
3367 buffer.update(cx, |buffer, _| {
3368 assert_eq!(buffer.text(), new_contents);
3369 assert!(!buffer.is_dirty());
3370 assert!(!buffer.has_conflict());
3371
3372 let anchor_positions = anchors
3373 .iter()
3374 .map(|anchor| anchor.to_point(&*buffer))
3375 .collect::<Vec<_>>();
3376 assert_eq!(
3377 anchor_positions,
3378 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3379 );
3380 });
3381
3382 // Modify the buffer
3383 buffer.update(cx, |buffer, cx| {
3384 buffer.edit([(0..0, " ")], None, cx);
3385 assert!(buffer.is_dirty());
3386 assert!(!buffer.has_conflict());
3387 });
3388
3389 // Change the file on disk again, adding blank lines to the beginning.
3390 fs.save(
3391 "/dir/the-file".as_ref(),
3392 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3393 LineEnding::Unix,
3394 )
3395 .await
3396 .unwrap();
3397
3398 // Because the buffer is modified, it doesn't reload from disk, but is
3399 // marked as having a conflict.
3400 cx.executor().run_until_parked();
3401 buffer.update(cx, |buffer, _| {
3402 assert!(buffer.has_conflict());
3403 });
3404}
3405
3406#[gpui::test]
3407async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3408 init_test(cx);
3409
3410 let fs = FakeFs::new(cx.executor());
3411 fs.insert_tree(
3412 "/dir",
3413 json!({
3414 "file1": "a\nb\nc\n",
3415 "file2": "one\r\ntwo\r\nthree\r\n",
3416 }),
3417 )
3418 .await;
3419
3420 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3421 let buffer1 = project
3422 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3423 .await
3424 .unwrap();
3425 let buffer2 = project
3426 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3427 .await
3428 .unwrap();
3429
3430 buffer1.update(cx, |buffer, _| {
3431 assert_eq!(buffer.text(), "a\nb\nc\n");
3432 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3433 });
3434 buffer2.update(cx, |buffer, _| {
3435 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3436 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3437 });
3438
3439 // Change a file's line endings on disk from unix to windows. The buffer's
3440 // state updates correctly.
3441 fs.save(
3442 "/dir/file1".as_ref(),
3443 &"aaa\nb\nc\n".into(),
3444 LineEnding::Windows,
3445 )
3446 .await
3447 .unwrap();
3448 cx.executor().run_until_parked();
3449 buffer1.update(cx, |buffer, _| {
3450 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3451 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3452 });
3453
3454 // Save a file with windows line endings. The file is written correctly.
3455 buffer2.update(cx, |buffer, cx| {
3456 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3457 });
3458 project
3459 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3460 .await
3461 .unwrap();
3462 assert_eq!(
3463 fs.load("/dir/file2".as_ref()).await.unwrap(),
3464 "one\r\ntwo\r\nthree\r\nfour\r\n",
3465 );
3466}
3467
3468#[gpui::test]
3469async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3470 init_test(cx);
3471
3472 let fs = FakeFs::new(cx.executor());
3473 fs.insert_tree(
3474 "/the-dir",
3475 json!({
3476 "a.rs": "
3477 fn foo(mut v: Vec<usize>) {
3478 for x in &v {
3479 v.push(1);
3480 }
3481 }
3482 "
3483 .unindent(),
3484 }),
3485 )
3486 .await;
3487
3488 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3489 let buffer = project
3490 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3491 .await
3492 .unwrap();
3493
3494 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3495 let message = lsp::PublishDiagnosticsParams {
3496 uri: buffer_uri.clone(),
3497 diagnostics: vec![
3498 lsp::Diagnostic {
3499 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3500 severity: Some(DiagnosticSeverity::WARNING),
3501 message: "error 1".to_string(),
3502 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3503 location: lsp::Location {
3504 uri: buffer_uri.clone(),
3505 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3506 },
3507 message: "error 1 hint 1".to_string(),
3508 }]),
3509 ..Default::default()
3510 },
3511 lsp::Diagnostic {
3512 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3513 severity: Some(DiagnosticSeverity::HINT),
3514 message: "error 1 hint 1".to_string(),
3515 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3516 location: lsp::Location {
3517 uri: buffer_uri.clone(),
3518 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3519 },
3520 message: "original diagnostic".to_string(),
3521 }]),
3522 ..Default::default()
3523 },
3524 lsp::Diagnostic {
3525 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3526 severity: Some(DiagnosticSeverity::ERROR),
3527 message: "error 2".to_string(),
3528 related_information: Some(vec![
3529 lsp::DiagnosticRelatedInformation {
3530 location: lsp::Location {
3531 uri: buffer_uri.clone(),
3532 range: lsp::Range::new(
3533 lsp::Position::new(1, 13),
3534 lsp::Position::new(1, 15),
3535 ),
3536 },
3537 message: "error 2 hint 1".to_string(),
3538 },
3539 lsp::DiagnosticRelatedInformation {
3540 location: lsp::Location {
3541 uri: buffer_uri.clone(),
3542 range: lsp::Range::new(
3543 lsp::Position::new(1, 13),
3544 lsp::Position::new(1, 15),
3545 ),
3546 },
3547 message: "error 2 hint 2".to_string(),
3548 },
3549 ]),
3550 ..Default::default()
3551 },
3552 lsp::Diagnostic {
3553 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3554 severity: Some(DiagnosticSeverity::HINT),
3555 message: "error 2 hint 1".to_string(),
3556 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3557 location: lsp::Location {
3558 uri: buffer_uri.clone(),
3559 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3560 },
3561 message: "original diagnostic".to_string(),
3562 }]),
3563 ..Default::default()
3564 },
3565 lsp::Diagnostic {
3566 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3567 severity: Some(DiagnosticSeverity::HINT),
3568 message: "error 2 hint 2".to_string(),
3569 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3570 location: lsp::Location {
3571 uri: buffer_uri,
3572 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3573 },
3574 message: "original diagnostic".to_string(),
3575 }]),
3576 ..Default::default()
3577 },
3578 ],
3579 version: None,
3580 };
3581
3582 project
3583 .update(cx, |p, cx| {
3584 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3585 })
3586 .unwrap();
3587 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3588
3589 assert_eq!(
3590 buffer
3591 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3592 .collect::<Vec<_>>(),
3593 &[
3594 DiagnosticEntry {
3595 range: Point::new(1, 8)..Point::new(1, 9),
3596 diagnostic: Diagnostic {
3597 severity: DiagnosticSeverity::WARNING,
3598 message: "error 1".to_string(),
3599 group_id: 1,
3600 is_primary: true,
3601 ..Default::default()
3602 }
3603 },
3604 DiagnosticEntry {
3605 range: Point::new(1, 8)..Point::new(1, 9),
3606 diagnostic: Diagnostic {
3607 severity: DiagnosticSeverity::HINT,
3608 message: "error 1 hint 1".to_string(),
3609 group_id: 1,
3610 is_primary: false,
3611 ..Default::default()
3612 }
3613 },
3614 DiagnosticEntry {
3615 range: Point::new(1, 13)..Point::new(1, 15),
3616 diagnostic: Diagnostic {
3617 severity: DiagnosticSeverity::HINT,
3618 message: "error 2 hint 1".to_string(),
3619 group_id: 0,
3620 is_primary: false,
3621 ..Default::default()
3622 }
3623 },
3624 DiagnosticEntry {
3625 range: Point::new(1, 13)..Point::new(1, 15),
3626 diagnostic: Diagnostic {
3627 severity: DiagnosticSeverity::HINT,
3628 message: "error 2 hint 2".to_string(),
3629 group_id: 0,
3630 is_primary: false,
3631 ..Default::default()
3632 }
3633 },
3634 DiagnosticEntry {
3635 range: Point::new(2, 8)..Point::new(2, 17),
3636 diagnostic: Diagnostic {
3637 severity: DiagnosticSeverity::ERROR,
3638 message: "error 2".to_string(),
3639 group_id: 0,
3640 is_primary: true,
3641 ..Default::default()
3642 }
3643 }
3644 ]
3645 );
3646
3647 assert_eq!(
3648 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3649 &[
3650 DiagnosticEntry {
3651 range: Point::new(1, 13)..Point::new(1, 15),
3652 diagnostic: Diagnostic {
3653 severity: DiagnosticSeverity::HINT,
3654 message: "error 2 hint 1".to_string(),
3655 group_id: 0,
3656 is_primary: false,
3657 ..Default::default()
3658 }
3659 },
3660 DiagnosticEntry {
3661 range: Point::new(1, 13)..Point::new(1, 15),
3662 diagnostic: Diagnostic {
3663 severity: DiagnosticSeverity::HINT,
3664 message: "error 2 hint 2".to_string(),
3665 group_id: 0,
3666 is_primary: false,
3667 ..Default::default()
3668 }
3669 },
3670 DiagnosticEntry {
3671 range: Point::new(2, 8)..Point::new(2, 17),
3672 diagnostic: Diagnostic {
3673 severity: DiagnosticSeverity::ERROR,
3674 message: "error 2".to_string(),
3675 group_id: 0,
3676 is_primary: true,
3677 ..Default::default()
3678 }
3679 }
3680 ]
3681 );
3682
3683 assert_eq!(
3684 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3685 &[
3686 DiagnosticEntry {
3687 range: Point::new(1, 8)..Point::new(1, 9),
3688 diagnostic: Diagnostic {
3689 severity: DiagnosticSeverity::WARNING,
3690 message: "error 1".to_string(),
3691 group_id: 1,
3692 is_primary: true,
3693 ..Default::default()
3694 }
3695 },
3696 DiagnosticEntry {
3697 range: Point::new(1, 8)..Point::new(1, 9),
3698 diagnostic: Diagnostic {
3699 severity: DiagnosticSeverity::HINT,
3700 message: "error 1 hint 1".to_string(),
3701 group_id: 1,
3702 is_primary: false,
3703 ..Default::default()
3704 }
3705 },
3706 ]
3707 );
3708}
3709
3710#[gpui::test]
3711async fn test_rename(cx: &mut gpui::TestAppContext) {
3712 init_test(cx);
3713
3714 let mut language = Language::new(
3715 LanguageConfig {
3716 name: "Rust".into(),
3717 matcher: LanguageMatcher {
3718 path_suffixes: vec!["rs".to_string()],
3719 ..Default::default()
3720 },
3721 ..Default::default()
3722 },
3723 Some(tree_sitter_rust::language()),
3724 );
3725 let mut fake_servers = language
3726 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3727 capabilities: lsp::ServerCapabilities {
3728 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3729 prepare_provider: Some(true),
3730 work_done_progress_options: Default::default(),
3731 })),
3732 ..Default::default()
3733 },
3734 ..Default::default()
3735 }))
3736 .await;
3737
3738 let fs = FakeFs::new(cx.executor());
3739 fs.insert_tree(
3740 "/dir",
3741 json!({
3742 "one.rs": "const ONE: usize = 1;",
3743 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3744 }),
3745 )
3746 .await;
3747
3748 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3749 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3750 let buffer = project
3751 .update(cx, |project, cx| {
3752 project.open_local_buffer("/dir/one.rs", cx)
3753 })
3754 .await
3755 .unwrap();
3756
3757 let fake_server = fake_servers.next().await.unwrap();
3758
3759 let response = project.update(cx, |project, cx| {
3760 project.prepare_rename(buffer.clone(), 7, cx)
3761 });
3762 fake_server
3763 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3764 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3765 assert_eq!(params.position, lsp::Position::new(0, 7));
3766 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3767 lsp::Position::new(0, 6),
3768 lsp::Position::new(0, 9),
3769 ))))
3770 })
3771 .next()
3772 .await
3773 .unwrap();
3774 let range = response.await.unwrap().unwrap();
3775 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3776 assert_eq!(range, 6..9);
3777
3778 let response = project.update(cx, |project, cx| {
3779 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3780 });
3781 fake_server
3782 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3783 assert_eq!(
3784 params.text_document_position.text_document.uri.as_str(),
3785 "file:///dir/one.rs"
3786 );
3787 assert_eq!(
3788 params.text_document_position.position,
3789 lsp::Position::new(0, 7)
3790 );
3791 assert_eq!(params.new_name, "THREE");
3792 Ok(Some(lsp::WorkspaceEdit {
3793 changes: Some(
3794 [
3795 (
3796 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3797 vec![lsp::TextEdit::new(
3798 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3799 "THREE".to_string(),
3800 )],
3801 ),
3802 (
3803 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3804 vec![
3805 lsp::TextEdit::new(
3806 lsp::Range::new(
3807 lsp::Position::new(0, 24),
3808 lsp::Position::new(0, 27),
3809 ),
3810 "THREE".to_string(),
3811 ),
3812 lsp::TextEdit::new(
3813 lsp::Range::new(
3814 lsp::Position::new(0, 35),
3815 lsp::Position::new(0, 38),
3816 ),
3817 "THREE".to_string(),
3818 ),
3819 ],
3820 ),
3821 ]
3822 .into_iter()
3823 .collect(),
3824 ),
3825 ..Default::default()
3826 }))
3827 })
3828 .next()
3829 .await
3830 .unwrap();
3831 let mut transaction = response.await.unwrap().0;
3832 assert_eq!(transaction.len(), 2);
3833 assert_eq!(
3834 transaction
3835 .remove_entry(&buffer)
3836 .unwrap()
3837 .0
3838 .update(cx, |buffer, _| buffer.text()),
3839 "const THREE: usize = 1;"
3840 );
3841 assert_eq!(
3842 transaction
3843 .into_keys()
3844 .next()
3845 .unwrap()
3846 .update(cx, |buffer, _| buffer.text()),
3847 "const TWO: usize = one::THREE + one::THREE;"
3848 );
3849}
3850
3851#[gpui::test]
3852async fn test_search(cx: &mut gpui::TestAppContext) {
3853 init_test(cx);
3854
3855 let fs = FakeFs::new(cx.executor());
3856 fs.insert_tree(
3857 "/dir",
3858 json!({
3859 "one.rs": "const ONE: usize = 1;",
3860 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3861 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3862 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3863 }),
3864 )
3865 .await;
3866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3867 assert_eq!(
3868 search(
3869 &project,
3870 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3871 cx
3872 )
3873 .await
3874 .unwrap(),
3875 HashMap::from_iter([
3876 ("two.rs".to_string(), vec![6..9]),
3877 ("three.rs".to_string(), vec![37..40])
3878 ])
3879 );
3880
3881 let buffer_4 = project
3882 .update(cx, |project, cx| {
3883 project.open_local_buffer("/dir/four.rs", cx)
3884 })
3885 .await
3886 .unwrap();
3887 buffer_4.update(cx, |buffer, cx| {
3888 let text = "two::TWO";
3889 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3890 });
3891
3892 assert_eq!(
3893 search(
3894 &project,
3895 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3896 cx
3897 )
3898 .await
3899 .unwrap(),
3900 HashMap::from_iter([
3901 ("two.rs".to_string(), vec![6..9]),
3902 ("three.rs".to_string(), vec![37..40]),
3903 ("four.rs".to_string(), vec![25..28, 36..39])
3904 ])
3905 );
3906}
3907
3908#[gpui::test]
3909async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3910 init_test(cx);
3911
3912 let search_query = "file";
3913
3914 let fs = FakeFs::new(cx.executor());
3915 fs.insert_tree(
3916 "/dir",
3917 json!({
3918 "one.rs": r#"// Rust file one"#,
3919 "one.ts": r#"// TypeScript file one"#,
3920 "two.rs": r#"// Rust file two"#,
3921 "two.ts": r#"// TypeScript file two"#,
3922 }),
3923 )
3924 .await;
3925 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3926
3927 assert!(
3928 search(
3929 &project,
3930 SearchQuery::text(
3931 search_query,
3932 false,
3933 true,
3934 false,
3935 vec![PathMatcher::new("*.odd").unwrap()],
3936 Vec::new()
3937 )
3938 .unwrap(),
3939 cx
3940 )
3941 .await
3942 .unwrap()
3943 .is_empty(),
3944 "If no inclusions match, no files should be returned"
3945 );
3946
3947 assert_eq!(
3948 search(
3949 &project,
3950 SearchQuery::text(
3951 search_query,
3952 false,
3953 true,
3954 false,
3955 vec![PathMatcher::new("*.rs").unwrap()],
3956 Vec::new()
3957 )
3958 .unwrap(),
3959 cx
3960 )
3961 .await
3962 .unwrap(),
3963 HashMap::from_iter([
3964 ("one.rs".to_string(), vec![8..12]),
3965 ("two.rs".to_string(), vec![8..12]),
3966 ]),
3967 "Rust only search should give only Rust files"
3968 );
3969
3970 assert_eq!(
3971 search(
3972 &project,
3973 SearchQuery::text(
3974 search_query,
3975 false,
3976 true,
3977 false,
3978 vec![
3979 PathMatcher::new("*.ts").unwrap(),
3980 PathMatcher::new("*.odd").unwrap(),
3981 ],
3982 Vec::new()
3983 ).unwrap(),
3984 cx
3985 )
3986 .await
3987 .unwrap(),
3988 HashMap::from_iter([
3989 ("one.ts".to_string(), vec![14..18]),
3990 ("two.ts".to_string(), vec![14..18]),
3991 ]),
3992 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3993 );
3994
3995 assert_eq!(
3996 search(
3997 &project,
3998 SearchQuery::text(
3999 search_query,
4000 false,
4001 true,
4002 false,
4003 vec![
4004 PathMatcher::new("*.rs").unwrap(),
4005 PathMatcher::new("*.ts").unwrap(),
4006 PathMatcher::new("*.odd").unwrap(),
4007 ],
4008 Vec::new()
4009 ).unwrap(),
4010 cx
4011 )
4012 .await
4013 .unwrap(),
4014 HashMap::from_iter([
4015 ("one.rs".to_string(), vec![8..12]),
4016 ("one.ts".to_string(), vec![14..18]),
4017 ("two.rs".to_string(), vec![8..12]),
4018 ("two.ts".to_string(), vec![14..18]),
4019 ]),
4020 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4021 );
4022}
4023
4024#[gpui::test]
4025async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4026 init_test(cx);
4027
4028 let search_query = "file";
4029
4030 let fs = FakeFs::new(cx.executor());
4031 fs.insert_tree(
4032 "/dir",
4033 json!({
4034 "one.rs": r#"// Rust file one"#,
4035 "one.ts": r#"// TypeScript file one"#,
4036 "two.rs": r#"// Rust file two"#,
4037 "two.ts": r#"// TypeScript file two"#,
4038 }),
4039 )
4040 .await;
4041 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4042
4043 assert_eq!(
4044 search(
4045 &project,
4046 SearchQuery::text(
4047 search_query,
4048 false,
4049 true,
4050 false,
4051 Vec::new(),
4052 vec![PathMatcher::new("*.odd").unwrap()],
4053 )
4054 .unwrap(),
4055 cx
4056 )
4057 .await
4058 .unwrap(),
4059 HashMap::from_iter([
4060 ("one.rs".to_string(), vec![8..12]),
4061 ("one.ts".to_string(), vec![14..18]),
4062 ("two.rs".to_string(), vec![8..12]),
4063 ("two.ts".to_string(), vec![14..18]),
4064 ]),
4065 "If no exclusions match, all files should be returned"
4066 );
4067
4068 assert_eq!(
4069 search(
4070 &project,
4071 SearchQuery::text(
4072 search_query,
4073 false,
4074 true,
4075 false,
4076 Vec::new(),
4077 vec![PathMatcher::new("*.rs").unwrap()],
4078 )
4079 .unwrap(),
4080 cx
4081 )
4082 .await
4083 .unwrap(),
4084 HashMap::from_iter([
4085 ("one.ts".to_string(), vec![14..18]),
4086 ("two.ts".to_string(), vec![14..18]),
4087 ]),
4088 "Rust exclusion search should give only TypeScript files"
4089 );
4090
4091 assert_eq!(
4092 search(
4093 &project,
4094 SearchQuery::text(
4095 search_query,
4096 false,
4097 true,
4098 false,
4099 Vec::new(),
4100 vec![
4101 PathMatcher::new("*.ts").unwrap(),
4102 PathMatcher::new("*.odd").unwrap(),
4103 ],
4104 ).unwrap(),
4105 cx
4106 )
4107 .await
4108 .unwrap(),
4109 HashMap::from_iter([
4110 ("one.rs".to_string(), vec![8..12]),
4111 ("two.rs".to_string(), vec![8..12]),
4112 ]),
4113 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4114 );
4115
4116 assert!(
4117 search(
4118 &project,
4119 SearchQuery::text(
4120 search_query,
4121 false,
4122 true,
4123 false,
4124 Vec::new(),
4125 vec![
4126 PathMatcher::new("*.rs").unwrap(),
4127 PathMatcher::new("*.ts").unwrap(),
4128 PathMatcher::new("*.odd").unwrap(),
4129 ],
4130 ).unwrap(),
4131 cx
4132 )
4133 .await
4134 .unwrap().is_empty(),
4135 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4136 );
4137}
4138
4139#[gpui::test]
4140async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4141 init_test(cx);
4142
4143 let search_query = "file";
4144
4145 let fs = FakeFs::new(cx.executor());
4146 fs.insert_tree(
4147 "/dir",
4148 json!({
4149 "one.rs": r#"// Rust file one"#,
4150 "one.ts": r#"// TypeScript file one"#,
4151 "two.rs": r#"// Rust file two"#,
4152 "two.ts": r#"// TypeScript file two"#,
4153 }),
4154 )
4155 .await;
4156 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4157
4158 assert!(
4159 search(
4160 &project,
4161 SearchQuery::text(
4162 search_query,
4163 false,
4164 true,
4165 false,
4166 vec![PathMatcher::new("*.odd").unwrap()],
4167 vec![PathMatcher::new("*.odd").unwrap()],
4168 )
4169 .unwrap(),
4170 cx
4171 )
4172 .await
4173 .unwrap()
4174 .is_empty(),
4175 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4176 );
4177
4178 assert!(
4179 search(
4180 &project,
4181 SearchQuery::text(
4182 search_query,
4183 false,
4184 true,
4185 false,
4186 vec![PathMatcher::new("*.ts").unwrap()],
4187 vec![PathMatcher::new("*.ts").unwrap()],
4188 ).unwrap(),
4189 cx
4190 )
4191 .await
4192 .unwrap()
4193 .is_empty(),
4194 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4195 );
4196
4197 assert!(
4198 search(
4199 &project,
4200 SearchQuery::text(
4201 search_query,
4202 false,
4203 true,
4204 false,
4205 vec![
4206 PathMatcher::new("*.ts").unwrap(),
4207 PathMatcher::new("*.odd").unwrap()
4208 ],
4209 vec![
4210 PathMatcher::new("*.ts").unwrap(),
4211 PathMatcher::new("*.odd").unwrap()
4212 ],
4213 )
4214 .unwrap(),
4215 cx
4216 )
4217 .await
4218 .unwrap()
4219 .is_empty(),
4220 "Non-matching inclusions and exclusions should not change that."
4221 );
4222
4223 assert_eq!(
4224 search(
4225 &project,
4226 SearchQuery::text(
4227 search_query,
4228 false,
4229 true,
4230 false,
4231 vec![
4232 PathMatcher::new("*.ts").unwrap(),
4233 PathMatcher::new("*.odd").unwrap()
4234 ],
4235 vec![
4236 PathMatcher::new("*.rs").unwrap(),
4237 PathMatcher::new("*.odd").unwrap()
4238 ],
4239 )
4240 .unwrap(),
4241 cx
4242 )
4243 .await
4244 .unwrap(),
4245 HashMap::from_iter([
4246 ("one.ts".to_string(), vec![14..18]),
4247 ("two.ts".to_string(), vec![14..18]),
4248 ]),
4249 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4250 );
4251}
4252
4253#[gpui::test]
4254async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4255 init_test(cx);
4256
4257 let fs = FakeFs::new(cx.background_executor.clone());
4258 fs.insert_tree(
4259 "/dir",
4260 json!({
4261 ".git": {},
4262 ".gitignore": "**/target\n/node_modules\n",
4263 "target": {
4264 "index.txt": "index_key:index_value"
4265 },
4266 "node_modules": {
4267 "eslint": {
4268 "index.ts": "const eslint_key = 'eslint value'",
4269 "package.json": r#"{ "some_key": "some value" }"#,
4270 },
4271 "prettier": {
4272 "index.ts": "const prettier_key = 'prettier value'",
4273 "package.json": r#"{ "other_key": "other value" }"#,
4274 },
4275 },
4276 "package.json": r#"{ "main_key": "main value" }"#,
4277 }),
4278 )
4279 .await;
4280 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4281
4282 let query = "key";
4283 assert_eq!(
4284 search(
4285 &project,
4286 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4287 cx
4288 )
4289 .await
4290 .unwrap(),
4291 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4292 "Only one non-ignored file should have the query"
4293 );
4294
4295 assert_eq!(
4296 search(
4297 &project,
4298 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4299 cx
4300 )
4301 .await
4302 .unwrap(),
4303 HashMap::from_iter([
4304 ("package.json".to_string(), vec![8..11]),
4305 ("target/index.txt".to_string(), vec![6..9]),
4306 (
4307 "node_modules/prettier/package.json".to_string(),
4308 vec![9..12]
4309 ),
4310 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4311 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4312 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4313 ]),
4314 "Unrestricted search with ignored directories should find every file with the query"
4315 );
4316
4317 assert_eq!(
4318 search(
4319 &project,
4320 SearchQuery::text(
4321 query,
4322 false,
4323 false,
4324 true,
4325 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4326 vec![PathMatcher::new("*.ts").unwrap()],
4327 )
4328 .unwrap(),
4329 cx
4330 )
4331 .await
4332 .unwrap(),
4333 HashMap::from_iter([(
4334 "node_modules/prettier/package.json".to_string(),
4335 vec![9..12]
4336 )]),
4337 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4338 );
4339}
4340
4341#[test]
4342fn test_glob_literal_prefix() {
4343 assert_eq!(glob_literal_prefix("**/*.js"), "");
4344 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4345 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4346 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4347}
4348
4349#[gpui::test]
4350async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4351 init_test(cx);
4352
4353 let fs = FakeFs::new(cx.executor().clone());
4354 fs.insert_tree(
4355 "/one/two",
4356 json!({
4357 "three": {
4358 "a.txt": "",
4359 "four": {}
4360 },
4361 "c.rs": ""
4362 }),
4363 )
4364 .await;
4365
4366 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4367 project
4368 .update(cx, |project, cx| {
4369 let id = project.worktrees().next().unwrap().read(cx).id();
4370 project.create_entry((id, "b.."), true, cx)
4371 })
4372 .unwrap()
4373 .await
4374 .unwrap();
4375
4376 // Can't create paths outside the project
4377 let result = project
4378 .update(cx, |project, cx| {
4379 let id = project.worktrees().next().unwrap().read(cx).id();
4380 project.create_entry((id, "../../boop"), true, cx)
4381 })
4382 .await;
4383 assert!(result.is_err());
4384
4385 // Can't create paths with '..'
4386 let result = project
4387 .update(cx, |project, cx| {
4388 let id = project.worktrees().next().unwrap().read(cx).id();
4389 project.create_entry((id, "four/../beep"), true, cx)
4390 })
4391 .await;
4392 assert!(result.is_err());
4393
4394 assert_eq!(
4395 fs.paths(true),
4396 vec![
4397 PathBuf::from("/"),
4398 PathBuf::from("/one"),
4399 PathBuf::from("/one/two"),
4400 PathBuf::from("/one/two/c.rs"),
4401 PathBuf::from("/one/two/three"),
4402 PathBuf::from("/one/two/three/a.txt"),
4403 PathBuf::from("/one/two/three/b.."),
4404 PathBuf::from("/one/two/three/four"),
4405 ]
4406 );
4407
4408 // And we cannot open buffers with '..'
4409 let result = project
4410 .update(cx, |project, cx| {
4411 let id = project.worktrees().next().unwrap().read(cx).id();
4412 project.open_buffer((id, "../c.rs"), cx)
4413 })
4414 .await;
4415 assert!(result.is_err())
4416}
4417
4418async fn search(
4419 project: &Model<Project>,
4420 query: SearchQuery,
4421 cx: &mut gpui::TestAppContext,
4422) -> Result<HashMap<String, Vec<Range<usize>>>> {
4423 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4424 let mut result = HashMap::default();
4425 while let Some((buffer, range)) = search_rx.next().await {
4426 result.entry(buffer).or_insert(range);
4427 }
4428 Ok(result
4429 .into_iter()
4430 .map(|(buffer, ranges)| {
4431 buffer.update(cx, |buffer, _| {
4432 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4433 let ranges = ranges
4434 .into_iter()
4435 .map(|range| range.to_offset(buffer))
4436 .collect::<Vec<_>>();
4437 (path, ranges)
4438 })
4439 })
4440 .collect())
4441}
4442
4443fn init_test(cx: &mut gpui::TestAppContext) {
4444 if std::env::var("RUST_LOG").is_ok() {
4445 env_logger::try_init().ok();
4446 }
4447
4448 cx.update(|cx| {
4449 let settings_store = SettingsStore::test(cx);
4450 cx.set_global(settings_store);
4451 release_channel::init("0.0.0", cx);
4452 language::init(cx);
4453 Project::init_settings(cx);
4454 });
4455}