1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[gpui::test]
49async fn test_symlinks(cx: &mut gpui::TestAppContext) {
50 init_test(cx);
51 cx.executor().allow_parking();
52
53 let dir = temp_tree(json!({
54 "root": {
55 "apple": "",
56 "banana": {
57 "carrot": {
58 "date": "",
59 "endive": "",
60 }
61 },
62 "fennel": {
63 "grape": "",
64 }
65 }
66 }));
67
68 let root_link_path = dir.path().join("root_link");
69 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
70 os::unix::fs::symlink(
71 &dir.path().join("root/fennel"),
72 &dir.path().join("root/finnochio"),
73 )
74 .unwrap();
75
76 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
77
78 project.update(cx, |project, cx| {
79 let tree = project.worktrees().next().unwrap().read(cx);
80 assert_eq!(tree.file_count(), 5);
81 assert_eq!(
82 tree.inode_for_path("fennel/grape"),
83 tree.inode_for_path("finnochio/grape")
84 );
85 });
86}
87
88#[gpui::test]
89async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91
92 let fs = FakeFs::new(cx.executor());
93 fs.insert_tree(
94 "/the-root",
95 json!({
96 ".zed": {
97 "settings.json": r#"{ "tab_size": 8 }"#
98 },
99 "a": {
100 "a.rs": "fn a() {\n A\n}"
101 },
102 "b": {
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 2 }"#
105 },
106 "b.rs": "fn b() {\n B\n}"
107 }
108 }),
109 )
110 .await;
111
112 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
113 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
114
115 cx.executor().run_until_parked();
116 cx.update(|cx| {
117 let tree = worktree.read(cx);
118
119 let settings_a = language_settings(
120 None,
121 Some(
122 &(File::for_entry(
123 tree.entry_for_path("a/a.rs").unwrap().clone(),
124 worktree.clone(),
125 ) as _),
126 ),
127 cx,
128 );
129 let settings_b = language_settings(
130 None,
131 Some(
132 &(File::for_entry(
133 tree.entry_for_path("b/b.rs").unwrap().clone(),
134 worktree.clone(),
135 ) as _),
136 ),
137 cx,
138 );
139
140 assert_eq!(settings_a.tab_size.get(), 8);
141 assert_eq!(settings_b.tab_size.get(), 2);
142 });
143}
144
145#[gpui::test]
146async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let mut rust_language = Language::new(
150 LanguageConfig {
151 name: "Rust".into(),
152 matcher: LanguageMatcher {
153 path_suffixes: vec!["rs".to_string()],
154 ..Default::default()
155 },
156 ..Default::default()
157 },
158 Some(tree_sitter_rust::language()),
159 );
160 let mut json_language = Language::new(
161 LanguageConfig {
162 name: "JSON".into(),
163 matcher: LanguageMatcher {
164 path_suffixes: vec!["json".to_string()],
165 ..Default::default()
166 },
167 ..Default::default()
168 },
169 None,
170 );
171 let mut fake_rust_servers = rust_language
172 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
173 name: "the-rust-language-server",
174 capabilities: lsp::ServerCapabilities {
175 completion_provider: Some(lsp::CompletionOptions {
176 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
177 ..Default::default()
178 }),
179 ..Default::default()
180 },
181 ..Default::default()
182 }))
183 .await;
184 let mut fake_json_servers = json_language
185 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
186 name: "the-json-language-server",
187 capabilities: lsp::ServerCapabilities {
188 completion_provider: Some(lsp::CompletionOptions {
189 trigger_characters: Some(vec![":".to_string()]),
190 ..Default::default()
191 }),
192 ..Default::default()
193 },
194 ..Default::default()
195 }))
196 .await;
197
198 let fs = FakeFs::new(cx.executor());
199 fs.insert_tree(
200 "/the-root",
201 json!({
202 "test.rs": "const A: i32 = 1;",
203 "test2.rs": "",
204 "Cargo.toml": "a = 1",
205 "package.json": "{\"a\": 1}",
206 }),
207 )
208 .await;
209
210 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
211
212 // Open a buffer without an associated language server.
213 let toml_buffer = project
214 .update(cx, |project, cx| {
215 project.open_local_buffer("/the-root/Cargo.toml", cx)
216 })
217 .await
218 .unwrap();
219
220 // Open a buffer with an associated language server before the language for it has been loaded.
221 let rust_buffer = project
222 .update(cx, |project, cx| {
223 project.open_local_buffer("/the-root/test.rs", cx)
224 })
225 .await
226 .unwrap();
227 rust_buffer.update(cx, |buffer, _| {
228 assert_eq!(buffer.language().map(|l| l.name()), None);
229 });
230
231 // Now we add the languages to the project, and ensure they get assigned to all
232 // the relevant open buffers.
233 project.update(cx, |project, _| {
234 project.languages.add(Arc::new(json_language));
235 project.languages.add(Arc::new(rust_language));
236 });
237 cx.executor().run_until_parked();
238 rust_buffer.update(cx, |buffer, _| {
239 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
240 });
241
242 // A server is started up, and it is notified about Rust files.
243 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
244 assert_eq!(
245 fake_rust_server
246 .receive_notification::<lsp::notification::DidOpenTextDocument>()
247 .await
248 .text_document,
249 lsp::TextDocumentItem {
250 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
251 version: 0,
252 text: "const A: i32 = 1;".to_string(),
253 language_id: Default::default()
254 }
255 );
256
257 // The buffer is configured based on the language server's capabilities.
258 rust_buffer.update(cx, |buffer, _| {
259 assert_eq!(
260 buffer.completion_triggers(),
261 &[".".to_string(), "::".to_string()]
262 );
263 });
264 toml_buffer.update(cx, |buffer, _| {
265 assert!(buffer.completion_triggers().is_empty());
266 });
267
268 // Edit a buffer. The changes are reported to the language server.
269 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
270 assert_eq!(
271 fake_rust_server
272 .receive_notification::<lsp::notification::DidChangeTextDocument>()
273 .await
274 .text_document,
275 lsp::VersionedTextDocumentIdentifier::new(
276 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
277 1
278 )
279 );
280
281 // Open a third buffer with a different associated language server.
282 let json_buffer = project
283 .update(cx, |project, cx| {
284 project.open_local_buffer("/the-root/package.json", cx)
285 })
286 .await
287 .unwrap();
288
289 // A json language server is started up and is only notified about the json buffer.
290 let mut fake_json_server = fake_json_servers.next().await.unwrap();
291 assert_eq!(
292 fake_json_server
293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
294 .await
295 .text_document,
296 lsp::TextDocumentItem {
297 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
298 version: 0,
299 text: "{\"a\": 1}".to_string(),
300 language_id: Default::default()
301 }
302 );
303
304 // This buffer is configured based on the second language server's
305 // capabilities.
306 json_buffer.update(cx, |buffer, _| {
307 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
308 });
309
310 // When opening another buffer whose language server is already running,
311 // it is also configured based on the existing language server's capabilities.
312 let rust_buffer2 = project
313 .update(cx, |project, cx| {
314 project.open_local_buffer("/the-root/test2.rs", cx)
315 })
316 .await
317 .unwrap();
318 rust_buffer2.update(cx, |buffer, _| {
319 assert_eq!(
320 buffer.completion_triggers(),
321 &[".".to_string(), "::".to_string()]
322 );
323 });
324
325 // Changes are reported only to servers matching the buffer's language.
326 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
327 rust_buffer2.update(cx, |buffer, cx| {
328 buffer.edit([(0..0, "let x = 1;")], None, cx)
329 });
330 assert_eq!(
331 fake_rust_server
332 .receive_notification::<lsp::notification::DidChangeTextDocument>()
333 .await
334 .text_document,
335 lsp::VersionedTextDocumentIdentifier::new(
336 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
337 1
338 )
339 );
340
341 // Save notifications are reported to all servers.
342 project
343 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
344 .await
345 .unwrap();
346 assert_eq!(
347 fake_rust_server
348 .receive_notification::<lsp::notification::DidSaveTextDocument>()
349 .await
350 .text_document,
351 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
352 );
353 assert_eq!(
354 fake_json_server
355 .receive_notification::<lsp::notification::DidSaveTextDocument>()
356 .await
357 .text_document,
358 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
359 );
360
361 // Renames are reported only to servers matching the buffer's language.
362 fs.rename(
363 Path::new("/the-root/test2.rs"),
364 Path::new("/the-root/test3.rs"),
365 Default::default(),
366 )
367 .await
368 .unwrap();
369 assert_eq!(
370 fake_rust_server
371 .receive_notification::<lsp::notification::DidCloseTextDocument>()
372 .await
373 .text_document,
374 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
375 );
376 assert_eq!(
377 fake_rust_server
378 .receive_notification::<lsp::notification::DidOpenTextDocument>()
379 .await
380 .text_document,
381 lsp::TextDocumentItem {
382 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
383 version: 0,
384 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
385 language_id: Default::default()
386 },
387 );
388
389 rust_buffer2.update(cx, |buffer, cx| {
390 buffer.update_diagnostics(
391 LanguageServerId(0),
392 DiagnosticSet::from_sorted_entries(
393 vec![DiagnosticEntry {
394 diagnostic: Default::default(),
395 range: Anchor::MIN..Anchor::MAX,
396 }],
397 &buffer.snapshot(),
398 ),
399 cx,
400 );
401 assert_eq!(
402 buffer
403 .snapshot()
404 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
405 .count(),
406 1
407 );
408 });
409
410 // When the rename changes the extension of the file, the buffer gets closed on the old
411 // language server and gets opened on the new one.
412 fs.rename(
413 Path::new("/the-root/test3.rs"),
414 Path::new("/the-root/test3.json"),
415 Default::default(),
416 )
417 .await
418 .unwrap();
419 assert_eq!(
420 fake_rust_server
421 .receive_notification::<lsp::notification::DidCloseTextDocument>()
422 .await
423 .text_document,
424 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
425 );
426 assert_eq!(
427 fake_json_server
428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
429 .await
430 .text_document,
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 0,
434 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 },
437 );
438
439 // We clear the diagnostics, since the language has changed.
440 rust_buffer2.update(cx, |buffer, _| {
441 assert_eq!(
442 buffer
443 .snapshot()
444 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
445 .count(),
446 0
447 );
448 });
449
450 // The renamed file's version resets after changing language server.
451 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
452 assert_eq!(
453 fake_json_server
454 .receive_notification::<lsp::notification::DidChangeTextDocument>()
455 .await
456 .text_document,
457 lsp::VersionedTextDocumentIdentifier::new(
458 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
459 1
460 )
461 );
462
463 // Restart language servers
464 project.update(cx, |project, cx| {
465 project.restart_language_servers_for_buffers(
466 vec![rust_buffer.clone(), json_buffer.clone()],
467 cx,
468 );
469 });
470
471 let mut rust_shutdown_requests = fake_rust_server
472 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
473 let mut json_shutdown_requests = fake_json_server
474 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
475 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
476
477 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
478 let mut fake_json_server = fake_json_servers.next().await.unwrap();
479
480 // Ensure rust document is reopened in new rust language server
481 assert_eq!(
482 fake_rust_server
483 .receive_notification::<lsp::notification::DidOpenTextDocument>()
484 .await
485 .text_document,
486 lsp::TextDocumentItem {
487 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
488 version: 0,
489 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
490 language_id: Default::default()
491 }
492 );
493
494 // Ensure json documents are reopened in new json language server
495 assert_set_eq!(
496 [
497 fake_json_server
498 .receive_notification::<lsp::notification::DidOpenTextDocument>()
499 .await
500 .text_document,
501 fake_json_server
502 .receive_notification::<lsp::notification::DidOpenTextDocument>()
503 .await
504 .text_document,
505 ],
506 [
507 lsp::TextDocumentItem {
508 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
509 version: 0,
510 text: json_buffer.update(cx, |buffer, _| buffer.text()),
511 language_id: Default::default()
512 },
513 lsp::TextDocumentItem {
514 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
515 version: 0,
516 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
517 language_id: Default::default()
518 }
519 ]
520 );
521
522 // Close notifications are reported only to servers matching the buffer's language.
523 cx.update(|_| drop(json_buffer));
524 let close_message = lsp::DidCloseTextDocumentParams {
525 text_document: lsp::TextDocumentIdentifier::new(
526 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
527 ),
528 };
529 assert_eq!(
530 fake_json_server
531 .receive_notification::<lsp::notification::DidCloseTextDocument>()
532 .await,
533 close_message,
534 );
535}
536
537#[gpui::test]
538async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
539 init_test(cx);
540
541 let mut language = Language::new(
542 LanguageConfig {
543 name: "Rust".into(),
544 matcher: LanguageMatcher {
545 path_suffixes: vec!["rs".to_string()],
546 ..Default::default()
547 },
548 ..Default::default()
549 },
550 Some(tree_sitter_rust::language()),
551 );
552 let mut fake_servers = language
553 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
554 name: "the-language-server",
555 ..Default::default()
556 }))
557 .await;
558
559 let fs = FakeFs::new(cx.executor());
560 fs.insert_tree(
561 "/the-root",
562 json!({
563 ".gitignore": "target\n",
564 "src": {
565 "a.rs": "",
566 "b.rs": "",
567 },
568 "target": {
569 "x": {
570 "out": {
571 "x.rs": ""
572 }
573 },
574 "y": {
575 "out": {
576 "y.rs": "",
577 }
578 },
579 "z": {
580 "out": {
581 "z.rs": ""
582 }
583 }
584 }
585 }),
586 )
587 .await;
588
589 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
590 project.update(cx, |project, _| {
591 project.languages.add(Arc::new(language));
592 });
593 cx.executor().run_until_parked();
594
595 // Start the language server by opening a buffer with a compatible file extension.
596 let _buffer = project
597 .update(cx, |project, cx| {
598 project.open_local_buffer("/the-root/src/a.rs", cx)
599 })
600 .await
601 .unwrap();
602
603 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
604 project.update(cx, |project, cx| {
605 let worktree = project.worktrees().next().unwrap();
606 assert_eq!(
607 worktree
608 .read(cx)
609 .snapshot()
610 .entries(true)
611 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
612 .collect::<Vec<_>>(),
613 &[
614 (Path::new(""), false),
615 (Path::new(".gitignore"), false),
616 (Path::new("src"), false),
617 (Path::new("src/a.rs"), false),
618 (Path::new("src/b.rs"), false),
619 (Path::new("target"), true),
620 ]
621 );
622 });
623
624 let prev_read_dir_count = fs.read_dir_call_count();
625
626 // Keep track of the FS events reported to the language server.
627 let fake_server = fake_servers.next().await.unwrap();
628 let file_changes = Arc::new(Mutex::new(Vec::new()));
629 fake_server
630 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
631 registrations: vec![lsp::Registration {
632 id: Default::default(),
633 method: "workspace/didChangeWatchedFiles".to_string(),
634 register_options: serde_json::to_value(
635 lsp::DidChangeWatchedFilesRegistrationOptions {
636 watchers: vec![
637 lsp::FileSystemWatcher {
638 glob_pattern: lsp::GlobPattern::String(
639 "/the-root/Cargo.toml".to_string(),
640 ),
641 kind: None,
642 },
643 lsp::FileSystemWatcher {
644 glob_pattern: lsp::GlobPattern::String(
645 "/the-root/src/*.{rs,c}".to_string(),
646 ),
647 kind: None,
648 },
649 lsp::FileSystemWatcher {
650 glob_pattern: lsp::GlobPattern::String(
651 "/the-root/target/y/**/*.rs".to_string(),
652 ),
653 kind: None,
654 },
655 ],
656 },
657 )
658 .ok(),
659 }],
660 })
661 .await
662 .unwrap();
663 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
664 let file_changes = file_changes.clone();
665 move |params, _| {
666 let mut file_changes = file_changes.lock();
667 file_changes.extend(params.changes);
668 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
669 }
670 });
671
672 cx.executor().run_until_parked();
673 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
674 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
675
676 // Now the language server has asked us to watch an ignored directory path,
677 // so we recursively load it.
678 project.update(cx, |project, cx| {
679 let worktree = project.worktrees().next().unwrap();
680 assert_eq!(
681 worktree
682 .read(cx)
683 .snapshot()
684 .entries(true)
685 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
686 .collect::<Vec<_>>(),
687 &[
688 (Path::new(""), false),
689 (Path::new(".gitignore"), false),
690 (Path::new("src"), false),
691 (Path::new("src/a.rs"), false),
692 (Path::new("src/b.rs"), false),
693 (Path::new("target"), true),
694 (Path::new("target/x"), true),
695 (Path::new("target/y"), true),
696 (Path::new("target/y/out"), true),
697 (Path::new("target/y/out/y.rs"), true),
698 (Path::new("target/z"), true),
699 ]
700 );
701 });
702
703 // Perform some file system mutations, two of which match the watched patterns,
704 // and one of which does not.
705 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
706 .await
707 .unwrap();
708 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
709 .await
710 .unwrap();
711 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
712 .await
713 .unwrap();
714 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
715 .await
716 .unwrap();
717 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
718 .await
719 .unwrap();
720
721 // The language server receives events for the FS mutations that match its watch patterns.
722 cx.executor().run_until_parked();
723 assert_eq!(
724 &*file_changes.lock(),
725 &[
726 lsp::FileEvent {
727 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
728 typ: lsp::FileChangeType::DELETED,
729 },
730 lsp::FileEvent {
731 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
732 typ: lsp::FileChangeType::CREATED,
733 },
734 lsp::FileEvent {
735 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
736 typ: lsp::FileChangeType::CREATED,
737 },
738 ]
739 );
740}
741
742#[gpui::test]
743async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
744 init_test(cx);
745
746 let fs = FakeFs::new(cx.executor());
747 fs.insert_tree(
748 "/dir",
749 json!({
750 "a.rs": "let a = 1;",
751 "b.rs": "let b = 2;"
752 }),
753 )
754 .await;
755
756 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
757
758 let buffer_a = project
759 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
760 .await
761 .unwrap();
762 let buffer_b = project
763 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
764 .await
765 .unwrap();
766
767 project.update(cx, |project, cx| {
768 project
769 .update_diagnostics(
770 LanguageServerId(0),
771 lsp::PublishDiagnosticsParams {
772 uri: Url::from_file_path("/dir/a.rs").unwrap(),
773 version: None,
774 diagnostics: vec![lsp::Diagnostic {
775 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
776 severity: Some(lsp::DiagnosticSeverity::ERROR),
777 message: "error 1".to_string(),
778 ..Default::default()
779 }],
780 },
781 &[],
782 cx,
783 )
784 .unwrap();
785 project
786 .update_diagnostics(
787 LanguageServerId(0),
788 lsp::PublishDiagnosticsParams {
789 uri: Url::from_file_path("/dir/b.rs").unwrap(),
790 version: None,
791 diagnostics: vec![lsp::Diagnostic {
792 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
793 severity: Some(lsp::DiagnosticSeverity::WARNING),
794 message: "error 2".to_string(),
795 ..Default::default()
796 }],
797 },
798 &[],
799 cx,
800 )
801 .unwrap();
802 });
803
804 buffer_a.update(cx, |buffer, _| {
805 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
806 assert_eq!(
807 chunks
808 .iter()
809 .map(|(s, d)| (s.as_str(), *d))
810 .collect::<Vec<_>>(),
811 &[
812 ("let ", None),
813 ("a", Some(DiagnosticSeverity::ERROR)),
814 (" = 1;", None),
815 ]
816 );
817 });
818 buffer_b.update(cx, |buffer, _| {
819 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
820 assert_eq!(
821 chunks
822 .iter()
823 .map(|(s, d)| (s.as_str(), *d))
824 .collect::<Vec<_>>(),
825 &[
826 ("let ", None),
827 ("b", Some(DiagnosticSeverity::WARNING)),
828 (" = 2;", None),
829 ]
830 );
831 });
832}
833
834#[gpui::test]
835async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
836 init_test(cx);
837
838 let fs = FakeFs::new(cx.executor());
839 fs.insert_tree(
840 "/root",
841 json!({
842 "dir": {
843 ".git": {
844 "HEAD": "ref: refs/heads/main",
845 },
846 ".gitignore": "b.rs",
847 "a.rs": "let a = 1;",
848 "b.rs": "let b = 2;",
849 },
850 "other.rs": "let b = c;"
851 }),
852 )
853 .await;
854
855 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
856 let (worktree, _) = project
857 .update(cx, |project, cx| {
858 project.find_or_create_local_worktree("/root/dir", true, cx)
859 })
860 .await
861 .unwrap();
862 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
863
864 let (worktree, _) = project
865 .update(cx, |project, cx| {
866 project.find_or_create_local_worktree("/root/other.rs", false, cx)
867 })
868 .await
869 .unwrap();
870 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
871
872 let server_id = LanguageServerId(0);
873 project.update(cx, |project, cx| {
874 project
875 .update_diagnostics(
876 server_id,
877 lsp::PublishDiagnosticsParams {
878 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
879 version: None,
880 diagnostics: vec![lsp::Diagnostic {
881 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
882 severity: Some(lsp::DiagnosticSeverity::ERROR),
883 message: "unused variable 'b'".to_string(),
884 ..Default::default()
885 }],
886 },
887 &[],
888 cx,
889 )
890 .unwrap();
891 project
892 .update_diagnostics(
893 server_id,
894 lsp::PublishDiagnosticsParams {
895 uri: Url::from_file_path("/root/other.rs").unwrap(),
896 version: None,
897 diagnostics: vec![lsp::Diagnostic {
898 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
899 severity: Some(lsp::DiagnosticSeverity::ERROR),
900 message: "unknown variable 'c'".to_string(),
901 ..Default::default()
902 }],
903 },
904 &[],
905 cx,
906 )
907 .unwrap();
908 });
909
910 let main_ignored_buffer = project
911 .update(cx, |project, cx| {
912 project.open_buffer((main_worktree_id, "b.rs"), cx)
913 })
914 .await
915 .unwrap();
916 main_ignored_buffer.update(cx, |buffer, _| {
917 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
918 assert_eq!(
919 chunks
920 .iter()
921 .map(|(s, d)| (s.as_str(), *d))
922 .collect::<Vec<_>>(),
923 &[
924 ("let ", None),
925 ("b", Some(DiagnosticSeverity::ERROR)),
926 (" = 2;", None),
927 ],
928 "Gigitnored buffers should still get in-buffer diagnostics",
929 );
930 });
931 let other_buffer = project
932 .update(cx, |project, cx| {
933 project.open_buffer((other_worktree_id, ""), cx)
934 })
935 .await
936 .unwrap();
937 other_buffer.update(cx, |buffer, _| {
938 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
939 assert_eq!(
940 chunks
941 .iter()
942 .map(|(s, d)| (s.as_str(), *d))
943 .collect::<Vec<_>>(),
944 &[
945 ("let b = ", None),
946 ("c", Some(DiagnosticSeverity::ERROR)),
947 (";", None),
948 ],
949 "Buffers from hidden projects should still get in-buffer diagnostics"
950 );
951 });
952
953 project.update(cx, |project, cx| {
954 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
955 assert_eq!(
956 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
957 vec![(
958 ProjectPath {
959 worktree_id: main_worktree_id,
960 path: Arc::from(Path::new("b.rs")),
961 },
962 server_id,
963 DiagnosticSummary {
964 error_count: 1,
965 warning_count: 0,
966 }
967 )]
968 );
969 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
970 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
971 });
972}
973
974#[gpui::test]
975async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977
978 let progress_token = "the-progress-token";
979 let mut language = Language::new(
980 LanguageConfig {
981 name: "Rust".into(),
982 matcher: LanguageMatcher {
983 path_suffixes: vec!["rs".to_string()],
984 ..Default::default()
985 },
986 ..Default::default()
987 },
988 Some(tree_sitter_rust::language()),
989 );
990 let mut fake_servers = language
991 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
992 disk_based_diagnostics_progress_token: Some(progress_token.into()),
993 disk_based_diagnostics_sources: vec!["disk".into()],
994 ..Default::default()
995 }))
996 .await;
997
998 let fs = FakeFs::new(cx.executor());
999 fs.insert_tree(
1000 "/dir",
1001 json!({
1002 "a.rs": "fn a() { A }",
1003 "b.rs": "const y: i32 = 1",
1004 }),
1005 )
1006 .await;
1007
1008 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1009 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1010 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1011
1012 // Cause worktree to start the fake language server
1013 let _buffer = project
1014 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1015 .await
1016 .unwrap();
1017
1018 let mut events = cx.events(&project);
1019
1020 let fake_server = fake_servers.next().await.unwrap();
1021 assert_eq!(
1022 events.next().await.unwrap(),
1023 Event::LanguageServerAdded(LanguageServerId(0)),
1024 );
1025
1026 fake_server
1027 .start_progress(format!("{}/0", progress_token))
1028 .await;
1029 assert_eq!(
1030 events.next().await.unwrap(),
1031 Event::DiskBasedDiagnosticsStarted {
1032 language_server_id: LanguageServerId(0),
1033 }
1034 );
1035
1036 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1037 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1038 version: None,
1039 diagnostics: vec![lsp::Diagnostic {
1040 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1041 severity: Some(lsp::DiagnosticSeverity::ERROR),
1042 message: "undefined variable 'A'".to_string(),
1043 ..Default::default()
1044 }],
1045 });
1046 assert_eq!(
1047 events.next().await.unwrap(),
1048 Event::DiagnosticsUpdated {
1049 language_server_id: LanguageServerId(0),
1050 path: (worktree_id, Path::new("a.rs")).into()
1051 }
1052 );
1053
1054 fake_server.end_progress(format!("{}/0", progress_token));
1055 assert_eq!(
1056 events.next().await.unwrap(),
1057 Event::DiskBasedDiagnosticsFinished {
1058 language_server_id: LanguageServerId(0)
1059 }
1060 );
1061
1062 let buffer = project
1063 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1064 .await
1065 .unwrap();
1066
1067 buffer.update(cx, |buffer, _| {
1068 let snapshot = buffer.snapshot();
1069 let diagnostics = snapshot
1070 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1071 .collect::<Vec<_>>();
1072 assert_eq!(
1073 diagnostics,
1074 &[DiagnosticEntry {
1075 range: Point::new(0, 9)..Point::new(0, 10),
1076 diagnostic: Diagnostic {
1077 severity: lsp::DiagnosticSeverity::ERROR,
1078 message: "undefined variable 'A'".to_string(),
1079 group_id: 0,
1080 is_primary: true,
1081 ..Default::default()
1082 }
1083 }]
1084 )
1085 });
1086
1087 // Ensure publishing empty diagnostics twice only results in one update event.
1088 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1089 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1090 version: None,
1091 diagnostics: Default::default(),
1092 });
1093 assert_eq!(
1094 events.next().await.unwrap(),
1095 Event::DiagnosticsUpdated {
1096 language_server_id: LanguageServerId(0),
1097 path: (worktree_id, Path::new("a.rs")).into()
1098 }
1099 );
1100
1101 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1102 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1103 version: None,
1104 diagnostics: Default::default(),
1105 });
1106 cx.executor().run_until_parked();
1107 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1108}
1109
1110#[gpui::test]
1111async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1112 init_test(cx);
1113
1114 let progress_token = "the-progress-token";
1115 let mut language = Language::new(
1116 LanguageConfig {
1117 matcher: LanguageMatcher {
1118 path_suffixes: vec!["rs".to_string()],
1119 ..Default::default()
1120 },
1121 ..Default::default()
1122 },
1123 None,
1124 );
1125 let mut fake_servers = language
1126 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1127 disk_based_diagnostics_sources: vec!["disk".into()],
1128 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1129 ..Default::default()
1130 }))
1131 .await;
1132
1133 let fs = FakeFs::new(cx.executor());
1134 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1135
1136 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1137 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1138
1139 let buffer = project
1140 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1141 .await
1142 .unwrap();
1143
1144 // Simulate diagnostics starting to update.
1145 let fake_server = fake_servers.next().await.unwrap();
1146 fake_server.start_progress(progress_token).await;
1147
1148 // Restart the server before the diagnostics finish updating.
1149 project.update(cx, |project, cx| {
1150 project.restart_language_servers_for_buffers([buffer], cx);
1151 });
1152 let mut events = cx.events(&project);
1153
1154 // Simulate the newly started server sending more diagnostics.
1155 let fake_server = fake_servers.next().await.unwrap();
1156 assert_eq!(
1157 events.next().await.unwrap(),
1158 Event::LanguageServerAdded(LanguageServerId(1))
1159 );
1160 fake_server.start_progress(progress_token).await;
1161 assert_eq!(
1162 events.next().await.unwrap(),
1163 Event::DiskBasedDiagnosticsStarted {
1164 language_server_id: LanguageServerId(1)
1165 }
1166 );
1167 project.update(cx, |project, _| {
1168 assert_eq!(
1169 project
1170 .language_servers_running_disk_based_diagnostics()
1171 .collect::<Vec<_>>(),
1172 [LanguageServerId(1)]
1173 );
1174 });
1175
1176 // All diagnostics are considered done, despite the old server's diagnostic
1177 // task never completing.
1178 fake_server.end_progress(progress_token);
1179 assert_eq!(
1180 events.next().await.unwrap(),
1181 Event::DiskBasedDiagnosticsFinished {
1182 language_server_id: LanguageServerId(1)
1183 }
1184 );
1185 project.update(cx, |project, _| {
1186 assert_eq!(
1187 project
1188 .language_servers_running_disk_based_diagnostics()
1189 .collect::<Vec<_>>(),
1190 [LanguageServerId(0); 0]
1191 );
1192 });
1193}
1194
1195#[gpui::test]
1196async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1197 init_test(cx);
1198
1199 let mut language = Language::new(
1200 LanguageConfig {
1201 matcher: LanguageMatcher {
1202 path_suffixes: vec!["rs".to_string()],
1203 ..Default::default()
1204 },
1205 ..Default::default()
1206 },
1207 None,
1208 );
1209 let mut fake_servers = language
1210 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1211 ..Default::default()
1212 }))
1213 .await;
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1217
1218 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1219 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1220
1221 let buffer = project
1222 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1223 .await
1224 .unwrap();
1225
1226 // Publish diagnostics
1227 let fake_server = fake_servers.next().await.unwrap();
1228 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1229 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1230 version: None,
1231 diagnostics: vec![lsp::Diagnostic {
1232 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1233 severity: Some(lsp::DiagnosticSeverity::ERROR),
1234 message: "the message".to_string(),
1235 ..Default::default()
1236 }],
1237 });
1238
1239 cx.executor().run_until_parked();
1240 buffer.update(cx, |buffer, _| {
1241 assert_eq!(
1242 buffer
1243 .snapshot()
1244 .diagnostics_in_range::<_, usize>(0..1, false)
1245 .map(|entry| entry.diagnostic.message.clone())
1246 .collect::<Vec<_>>(),
1247 ["the message".to_string()]
1248 );
1249 });
1250 project.update(cx, |project, cx| {
1251 assert_eq!(
1252 project.diagnostic_summary(false, cx),
1253 DiagnosticSummary {
1254 error_count: 1,
1255 warning_count: 0,
1256 }
1257 );
1258 });
1259
1260 project.update(cx, |project, cx| {
1261 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1262 });
1263
1264 // The diagnostics are cleared.
1265 cx.executor().run_until_parked();
1266 buffer.update(cx, |buffer, _| {
1267 assert_eq!(
1268 buffer
1269 .snapshot()
1270 .diagnostics_in_range::<_, usize>(0..1, false)
1271 .map(|entry| entry.diagnostic.message.clone())
1272 .collect::<Vec<_>>(),
1273 Vec::<String>::new(),
1274 );
1275 });
1276 project.update(cx, |project, cx| {
1277 assert_eq!(
1278 project.diagnostic_summary(false, cx),
1279 DiagnosticSummary {
1280 error_count: 0,
1281 warning_count: 0,
1282 }
1283 );
1284 });
1285}
1286
1287#[gpui::test]
1288async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1289 init_test(cx);
1290
1291 let mut language = Language::new(
1292 LanguageConfig {
1293 matcher: LanguageMatcher {
1294 path_suffixes: vec!["rs".to_string()],
1295 ..Default::default()
1296 },
1297 ..Default::default()
1298 },
1299 None,
1300 );
1301 let mut fake_servers = language
1302 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1303 name: "the-lsp",
1304 ..Default::default()
1305 }))
1306 .await;
1307
1308 let fs = FakeFs::new(cx.executor());
1309 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1310
1311 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1312 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1313
1314 let buffer = project
1315 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1316 .await
1317 .unwrap();
1318
1319 // Before restarting the server, report diagnostics with an unknown buffer version.
1320 let fake_server = fake_servers.next().await.unwrap();
1321 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1322 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1323 version: Some(10000),
1324 diagnostics: Vec::new(),
1325 });
1326 cx.executor().run_until_parked();
1327
1328 project.update(cx, |project, cx| {
1329 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1330 });
1331 let mut fake_server = fake_servers.next().await.unwrap();
1332 let notification = fake_server
1333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1334 .await
1335 .text_document;
1336 assert_eq!(notification.version, 0);
1337}
1338
1339#[gpui::test]
1340async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let mut rust = Language::new(
1344 LanguageConfig {
1345 name: Arc::from("Rust"),
1346 matcher: LanguageMatcher {
1347 path_suffixes: vec!["rs".to_string()],
1348 ..Default::default()
1349 },
1350 ..Default::default()
1351 },
1352 None,
1353 );
1354 let mut fake_rust_servers = rust
1355 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1356 name: "rust-lsp",
1357 ..Default::default()
1358 }))
1359 .await;
1360 let mut js = Language::new(
1361 LanguageConfig {
1362 name: Arc::from("JavaScript"),
1363 matcher: LanguageMatcher {
1364 path_suffixes: vec!["js".to_string()],
1365 ..Default::default()
1366 },
1367 ..Default::default()
1368 },
1369 None,
1370 );
1371 let mut fake_js_servers = js
1372 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1373 name: "js-lsp",
1374 ..Default::default()
1375 }))
1376 .await;
1377
1378 let fs = FakeFs::new(cx.executor());
1379 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1380 .await;
1381
1382 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1383 project.update(cx, |project, _| {
1384 project.languages.add(Arc::new(rust));
1385 project.languages.add(Arc::new(js));
1386 });
1387
1388 let _rs_buffer = project
1389 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1390 .await
1391 .unwrap();
1392 let _js_buffer = project
1393 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1394 .await
1395 .unwrap();
1396
1397 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1398 assert_eq!(
1399 fake_rust_server_1
1400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1401 .await
1402 .text_document
1403 .uri
1404 .as_str(),
1405 "file:///dir/a.rs"
1406 );
1407
1408 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1409 assert_eq!(
1410 fake_js_server
1411 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1412 .await
1413 .text_document
1414 .uri
1415 .as_str(),
1416 "file:///dir/b.js"
1417 );
1418
1419 // Disable Rust language server, ensuring only that server gets stopped.
1420 cx.update(|cx| {
1421 cx.update_global(|settings: &mut SettingsStore, cx| {
1422 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1423 settings.languages.insert(
1424 Arc::from("Rust"),
1425 LanguageSettingsContent {
1426 enable_language_server: Some(false),
1427 ..Default::default()
1428 },
1429 );
1430 });
1431 })
1432 });
1433 fake_rust_server_1
1434 .receive_notification::<lsp::notification::Exit>()
1435 .await;
1436
1437 // Enable Rust and disable JavaScript language servers, ensuring that the
1438 // former gets started again and that the latter stops.
1439 cx.update(|cx| {
1440 cx.update_global(|settings: &mut SettingsStore, cx| {
1441 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1442 settings.languages.insert(
1443 Arc::from("Rust"),
1444 LanguageSettingsContent {
1445 enable_language_server: Some(true),
1446 ..Default::default()
1447 },
1448 );
1449 settings.languages.insert(
1450 Arc::from("JavaScript"),
1451 LanguageSettingsContent {
1452 enable_language_server: Some(false),
1453 ..Default::default()
1454 },
1455 );
1456 });
1457 })
1458 });
1459 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1460 assert_eq!(
1461 fake_rust_server_2
1462 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1463 .await
1464 .text_document
1465 .uri
1466 .as_str(),
1467 "file:///dir/a.rs"
1468 );
1469 fake_js_server
1470 .receive_notification::<lsp::notification::Exit>()
1471 .await;
1472}
1473
1474#[gpui::test(iterations = 3)]
1475async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1476 init_test(cx);
1477
1478 let mut language = Language::new(
1479 LanguageConfig {
1480 name: "Rust".into(),
1481 matcher: LanguageMatcher {
1482 path_suffixes: vec!["rs".to_string()],
1483 ..Default::default()
1484 },
1485 ..Default::default()
1486 },
1487 Some(tree_sitter_rust::language()),
1488 );
1489 let mut fake_servers = language
1490 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1491 disk_based_diagnostics_sources: vec!["disk".into()],
1492 ..Default::default()
1493 }))
1494 .await;
1495
1496 let text = "
1497 fn a() { A }
1498 fn b() { BB }
1499 fn c() { CCC }
1500 "
1501 .unindent();
1502
1503 let fs = FakeFs::new(cx.executor());
1504 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1505
1506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1507 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1508
1509 let buffer = project
1510 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1511 .await
1512 .unwrap();
1513
1514 let mut fake_server = fake_servers.next().await.unwrap();
1515 let open_notification = fake_server
1516 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1517 .await;
1518
1519 // Edit the buffer, moving the content down
1520 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1521 let change_notification_1 = fake_server
1522 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1523 .await;
1524 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1525
1526 // Report some diagnostics for the initial version of the buffer
1527 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1528 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1529 version: Some(open_notification.text_document.version),
1530 diagnostics: vec![
1531 lsp::Diagnostic {
1532 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1533 severity: Some(DiagnosticSeverity::ERROR),
1534 message: "undefined variable 'A'".to_string(),
1535 source: Some("disk".to_string()),
1536 ..Default::default()
1537 },
1538 lsp::Diagnostic {
1539 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1540 severity: Some(DiagnosticSeverity::ERROR),
1541 message: "undefined variable 'BB'".to_string(),
1542 source: Some("disk".to_string()),
1543 ..Default::default()
1544 },
1545 lsp::Diagnostic {
1546 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1547 severity: Some(DiagnosticSeverity::ERROR),
1548 source: Some("disk".to_string()),
1549 message: "undefined variable 'CCC'".to_string(),
1550 ..Default::default()
1551 },
1552 ],
1553 });
1554
1555 // The diagnostics have moved down since they were created.
1556 cx.executor().run_until_parked();
1557 buffer.update(cx, |buffer, _| {
1558 assert_eq!(
1559 buffer
1560 .snapshot()
1561 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1562 .collect::<Vec<_>>(),
1563 &[
1564 DiagnosticEntry {
1565 range: Point::new(3, 9)..Point::new(3, 11),
1566 diagnostic: Diagnostic {
1567 source: Some("disk".into()),
1568 severity: DiagnosticSeverity::ERROR,
1569 message: "undefined variable 'BB'".to_string(),
1570 is_disk_based: true,
1571 group_id: 1,
1572 is_primary: true,
1573 ..Default::default()
1574 },
1575 },
1576 DiagnosticEntry {
1577 range: Point::new(4, 9)..Point::new(4, 12),
1578 diagnostic: Diagnostic {
1579 source: Some("disk".into()),
1580 severity: DiagnosticSeverity::ERROR,
1581 message: "undefined variable 'CCC'".to_string(),
1582 is_disk_based: true,
1583 group_id: 2,
1584 is_primary: true,
1585 ..Default::default()
1586 }
1587 }
1588 ]
1589 );
1590 assert_eq!(
1591 chunks_with_diagnostics(buffer, 0..buffer.len()),
1592 [
1593 ("\n\nfn a() { ".to_string(), None),
1594 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1595 (" }\nfn b() { ".to_string(), None),
1596 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1597 (" }\nfn c() { ".to_string(), None),
1598 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1599 (" }\n".to_string(), None),
1600 ]
1601 );
1602 assert_eq!(
1603 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1604 [
1605 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1606 (" }\nfn c() { ".to_string(), None),
1607 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1608 ]
1609 );
1610 });
1611
1612 // Ensure overlapping diagnostics are highlighted correctly.
1613 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1614 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1615 version: Some(open_notification.text_document.version),
1616 diagnostics: vec![
1617 lsp::Diagnostic {
1618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1619 severity: Some(DiagnosticSeverity::ERROR),
1620 message: "undefined variable 'A'".to_string(),
1621 source: Some("disk".to_string()),
1622 ..Default::default()
1623 },
1624 lsp::Diagnostic {
1625 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1626 severity: Some(DiagnosticSeverity::WARNING),
1627 message: "unreachable statement".to_string(),
1628 source: Some("disk".to_string()),
1629 ..Default::default()
1630 },
1631 ],
1632 });
1633
1634 cx.executor().run_until_parked();
1635 buffer.update(cx, |buffer, _| {
1636 assert_eq!(
1637 buffer
1638 .snapshot()
1639 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1640 .collect::<Vec<_>>(),
1641 &[
1642 DiagnosticEntry {
1643 range: Point::new(2, 9)..Point::new(2, 12),
1644 diagnostic: Diagnostic {
1645 source: Some("disk".into()),
1646 severity: DiagnosticSeverity::WARNING,
1647 message: "unreachable statement".to_string(),
1648 is_disk_based: true,
1649 group_id: 4,
1650 is_primary: true,
1651 ..Default::default()
1652 }
1653 },
1654 DiagnosticEntry {
1655 range: Point::new(2, 9)..Point::new(2, 10),
1656 diagnostic: Diagnostic {
1657 source: Some("disk".into()),
1658 severity: DiagnosticSeverity::ERROR,
1659 message: "undefined variable 'A'".to_string(),
1660 is_disk_based: true,
1661 group_id: 3,
1662 is_primary: true,
1663 ..Default::default()
1664 },
1665 }
1666 ]
1667 );
1668 assert_eq!(
1669 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1670 [
1671 ("fn a() { ".to_string(), None),
1672 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1673 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1674 ("\n".to_string(), None),
1675 ]
1676 );
1677 assert_eq!(
1678 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1679 [
1680 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1681 ("\n".to_string(), None),
1682 ]
1683 );
1684 });
1685
1686 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1687 // changes since the last save.
1688 buffer.update(cx, |buffer, cx| {
1689 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1690 buffer.edit(
1691 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1692 None,
1693 cx,
1694 );
1695 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1696 });
1697 let change_notification_2 = fake_server
1698 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1699 .await;
1700 assert!(
1701 change_notification_2.text_document.version > change_notification_1.text_document.version
1702 );
1703
1704 // Handle out-of-order diagnostics
1705 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1706 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1707 version: Some(change_notification_2.text_document.version),
1708 diagnostics: vec![
1709 lsp::Diagnostic {
1710 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1711 severity: Some(DiagnosticSeverity::ERROR),
1712 message: "undefined variable 'BB'".to_string(),
1713 source: Some("disk".to_string()),
1714 ..Default::default()
1715 },
1716 lsp::Diagnostic {
1717 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1718 severity: Some(DiagnosticSeverity::WARNING),
1719 message: "undefined variable 'A'".to_string(),
1720 source: Some("disk".to_string()),
1721 ..Default::default()
1722 },
1723 ],
1724 });
1725
1726 cx.executor().run_until_parked();
1727 buffer.update(cx, |buffer, _| {
1728 assert_eq!(
1729 buffer
1730 .snapshot()
1731 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1732 .collect::<Vec<_>>(),
1733 &[
1734 DiagnosticEntry {
1735 range: Point::new(2, 21)..Point::new(2, 22),
1736 diagnostic: Diagnostic {
1737 source: Some("disk".into()),
1738 severity: DiagnosticSeverity::WARNING,
1739 message: "undefined variable 'A'".to_string(),
1740 is_disk_based: true,
1741 group_id: 6,
1742 is_primary: true,
1743 ..Default::default()
1744 }
1745 },
1746 DiagnosticEntry {
1747 range: Point::new(3, 9)..Point::new(3, 14),
1748 diagnostic: Diagnostic {
1749 source: Some("disk".into()),
1750 severity: DiagnosticSeverity::ERROR,
1751 message: "undefined variable 'BB'".to_string(),
1752 is_disk_based: true,
1753 group_id: 5,
1754 is_primary: true,
1755 ..Default::default()
1756 },
1757 }
1758 ]
1759 );
1760 });
1761}
1762
1763#[gpui::test]
1764async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1765 init_test(cx);
1766
1767 let text = concat!(
1768 "let one = ;\n", //
1769 "let two = \n",
1770 "let three = 3;\n",
1771 );
1772
1773 let fs = FakeFs::new(cx.executor());
1774 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1775
1776 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1777 let buffer = project
1778 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1779 .await
1780 .unwrap();
1781
1782 project.update(cx, |project, cx| {
1783 project
1784 .update_buffer_diagnostics(
1785 &buffer,
1786 LanguageServerId(0),
1787 None,
1788 vec![
1789 DiagnosticEntry {
1790 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1791 diagnostic: Diagnostic {
1792 severity: DiagnosticSeverity::ERROR,
1793 message: "syntax error 1".to_string(),
1794 ..Default::default()
1795 },
1796 },
1797 DiagnosticEntry {
1798 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1799 diagnostic: Diagnostic {
1800 severity: DiagnosticSeverity::ERROR,
1801 message: "syntax error 2".to_string(),
1802 ..Default::default()
1803 },
1804 },
1805 ],
1806 cx,
1807 )
1808 .unwrap();
1809 });
1810
1811 // An empty range is extended forward to include the following character.
1812 // At the end of a line, an empty range is extended backward to include
1813 // the preceding character.
1814 buffer.update(cx, |buffer, _| {
1815 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1816 assert_eq!(
1817 chunks
1818 .iter()
1819 .map(|(s, d)| (s.as_str(), *d))
1820 .collect::<Vec<_>>(),
1821 &[
1822 ("let one = ", None),
1823 (";", Some(DiagnosticSeverity::ERROR)),
1824 ("\nlet two =", None),
1825 (" ", Some(DiagnosticSeverity::ERROR)),
1826 ("\nlet three = 3;\n", None)
1827 ]
1828 );
1829 });
1830}
1831
1832#[gpui::test]
1833async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1834 init_test(cx);
1835
1836 let fs = FakeFs::new(cx.executor());
1837 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1838 .await;
1839
1840 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1841
1842 project.update(cx, |project, cx| {
1843 project
1844 .update_diagnostic_entries(
1845 LanguageServerId(0),
1846 Path::new("/dir/a.rs").to_owned(),
1847 None,
1848 vec![DiagnosticEntry {
1849 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1850 diagnostic: Diagnostic {
1851 severity: DiagnosticSeverity::ERROR,
1852 is_primary: true,
1853 message: "syntax error a1".to_string(),
1854 ..Default::default()
1855 },
1856 }],
1857 cx,
1858 )
1859 .unwrap();
1860 project
1861 .update_diagnostic_entries(
1862 LanguageServerId(1),
1863 Path::new("/dir/a.rs").to_owned(),
1864 None,
1865 vec![DiagnosticEntry {
1866 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1867 diagnostic: Diagnostic {
1868 severity: DiagnosticSeverity::ERROR,
1869 is_primary: true,
1870 message: "syntax error b1".to_string(),
1871 ..Default::default()
1872 },
1873 }],
1874 cx,
1875 )
1876 .unwrap();
1877
1878 assert_eq!(
1879 project.diagnostic_summary(false, cx),
1880 DiagnosticSummary {
1881 error_count: 2,
1882 warning_count: 0,
1883 }
1884 );
1885 });
1886}
1887
1888#[gpui::test]
1889async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1890 init_test(cx);
1891
1892 let mut language = Language::new(
1893 LanguageConfig {
1894 name: "Rust".into(),
1895 matcher: LanguageMatcher {
1896 path_suffixes: vec!["rs".to_string()],
1897 ..Default::default()
1898 },
1899 ..Default::default()
1900 },
1901 Some(tree_sitter_rust::language()),
1902 );
1903 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1904
1905 let text = "
1906 fn a() {
1907 f1();
1908 }
1909 fn b() {
1910 f2();
1911 }
1912 fn c() {
1913 f3();
1914 }
1915 "
1916 .unindent();
1917
1918 let fs = FakeFs::new(cx.executor());
1919 fs.insert_tree(
1920 "/dir",
1921 json!({
1922 "a.rs": text.clone(),
1923 }),
1924 )
1925 .await;
1926
1927 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1928 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1929 let buffer = project
1930 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1931 .await
1932 .unwrap();
1933
1934 let mut fake_server = fake_servers.next().await.unwrap();
1935 let lsp_document_version = fake_server
1936 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1937 .await
1938 .text_document
1939 .version;
1940
1941 // Simulate editing the buffer after the language server computes some edits.
1942 buffer.update(cx, |buffer, cx| {
1943 buffer.edit(
1944 [(
1945 Point::new(0, 0)..Point::new(0, 0),
1946 "// above first function\n",
1947 )],
1948 None,
1949 cx,
1950 );
1951 buffer.edit(
1952 [(
1953 Point::new(2, 0)..Point::new(2, 0),
1954 " // inside first function\n",
1955 )],
1956 None,
1957 cx,
1958 );
1959 buffer.edit(
1960 [(
1961 Point::new(6, 4)..Point::new(6, 4),
1962 "// inside second function ",
1963 )],
1964 None,
1965 cx,
1966 );
1967
1968 assert_eq!(
1969 buffer.text(),
1970 "
1971 // above first function
1972 fn a() {
1973 // inside first function
1974 f1();
1975 }
1976 fn b() {
1977 // inside second function f2();
1978 }
1979 fn c() {
1980 f3();
1981 }
1982 "
1983 .unindent()
1984 );
1985 });
1986
1987 let edits = project
1988 .update(cx, |project, cx| {
1989 project.edits_from_lsp(
1990 &buffer,
1991 vec![
1992 // replace body of first function
1993 lsp::TextEdit {
1994 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1995 new_text: "
1996 fn a() {
1997 f10();
1998 }
1999 "
2000 .unindent(),
2001 },
2002 // edit inside second function
2003 lsp::TextEdit {
2004 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2005 new_text: "00".into(),
2006 },
2007 // edit inside third function via two distinct edits
2008 lsp::TextEdit {
2009 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2010 new_text: "4000".into(),
2011 },
2012 lsp::TextEdit {
2013 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2014 new_text: "".into(),
2015 },
2016 ],
2017 LanguageServerId(0),
2018 Some(lsp_document_version),
2019 cx,
2020 )
2021 })
2022 .await
2023 .unwrap();
2024
2025 buffer.update(cx, |buffer, cx| {
2026 for (range, new_text) in edits {
2027 buffer.edit([(range, new_text)], None, cx);
2028 }
2029 assert_eq!(
2030 buffer.text(),
2031 "
2032 // above first function
2033 fn a() {
2034 // inside first function
2035 f10();
2036 }
2037 fn b() {
2038 // inside second function f200();
2039 }
2040 fn c() {
2041 f4000();
2042 }
2043 "
2044 .unindent()
2045 );
2046 });
2047}
2048
2049#[gpui::test]
2050async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2051 init_test(cx);
2052
2053 let text = "
2054 use a::b;
2055 use a::c;
2056
2057 fn f() {
2058 b();
2059 c();
2060 }
2061 "
2062 .unindent();
2063
2064 let fs = FakeFs::new(cx.executor());
2065 fs.insert_tree(
2066 "/dir",
2067 json!({
2068 "a.rs": text.clone(),
2069 }),
2070 )
2071 .await;
2072
2073 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2074 let buffer = project
2075 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2076 .await
2077 .unwrap();
2078
2079 // Simulate the language server sending us a small edit in the form of a very large diff.
2080 // Rust-analyzer does this when performing a merge-imports code action.
2081 let edits = project
2082 .update(cx, |project, cx| {
2083 project.edits_from_lsp(
2084 &buffer,
2085 [
2086 // Replace the first use statement without editing the semicolon.
2087 lsp::TextEdit {
2088 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2089 new_text: "a::{b, c}".into(),
2090 },
2091 // Reinsert the remainder of the file between the semicolon and the final
2092 // newline of the file.
2093 lsp::TextEdit {
2094 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2095 new_text: "\n\n".into(),
2096 },
2097 lsp::TextEdit {
2098 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2099 new_text: "
2100 fn f() {
2101 b();
2102 c();
2103 }"
2104 .unindent(),
2105 },
2106 // Delete everything after the first newline of the file.
2107 lsp::TextEdit {
2108 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2109 new_text: "".into(),
2110 },
2111 ],
2112 LanguageServerId(0),
2113 None,
2114 cx,
2115 )
2116 })
2117 .await
2118 .unwrap();
2119
2120 buffer.update(cx, |buffer, cx| {
2121 let edits = edits
2122 .into_iter()
2123 .map(|(range, text)| {
2124 (
2125 range.start.to_point(buffer)..range.end.to_point(buffer),
2126 text,
2127 )
2128 })
2129 .collect::<Vec<_>>();
2130
2131 assert_eq!(
2132 edits,
2133 [
2134 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2135 (Point::new(1, 0)..Point::new(2, 0), "".into())
2136 ]
2137 );
2138
2139 for (range, new_text) in edits {
2140 buffer.edit([(range, new_text)], None, cx);
2141 }
2142 assert_eq!(
2143 buffer.text(),
2144 "
2145 use a::{b, c};
2146
2147 fn f() {
2148 b();
2149 c();
2150 }
2151 "
2152 .unindent()
2153 );
2154 });
2155}
2156
2157#[gpui::test]
2158async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2159 init_test(cx);
2160
2161 let text = "
2162 use a::b;
2163 use a::c;
2164
2165 fn f() {
2166 b();
2167 c();
2168 }
2169 "
2170 .unindent();
2171
2172 let fs = FakeFs::new(cx.executor());
2173 fs.insert_tree(
2174 "/dir",
2175 json!({
2176 "a.rs": text.clone(),
2177 }),
2178 )
2179 .await;
2180
2181 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2182 let buffer = project
2183 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2184 .await
2185 .unwrap();
2186
2187 // Simulate the language server sending us edits in a non-ordered fashion,
2188 // with ranges sometimes being inverted or pointing to invalid locations.
2189 let edits = project
2190 .update(cx, |project, cx| {
2191 project.edits_from_lsp(
2192 &buffer,
2193 [
2194 lsp::TextEdit {
2195 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2196 new_text: "\n\n".into(),
2197 },
2198 lsp::TextEdit {
2199 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2200 new_text: "a::{b, c}".into(),
2201 },
2202 lsp::TextEdit {
2203 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2204 new_text: "".into(),
2205 },
2206 lsp::TextEdit {
2207 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2208 new_text: "
2209 fn f() {
2210 b();
2211 c();
2212 }"
2213 .unindent(),
2214 },
2215 ],
2216 LanguageServerId(0),
2217 None,
2218 cx,
2219 )
2220 })
2221 .await
2222 .unwrap();
2223
2224 buffer.update(cx, |buffer, cx| {
2225 let edits = edits
2226 .into_iter()
2227 .map(|(range, text)| {
2228 (
2229 range.start.to_point(buffer)..range.end.to_point(buffer),
2230 text,
2231 )
2232 })
2233 .collect::<Vec<_>>();
2234
2235 assert_eq!(
2236 edits,
2237 [
2238 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2239 (Point::new(1, 0)..Point::new(2, 0), "".into())
2240 ]
2241 );
2242
2243 for (range, new_text) in edits {
2244 buffer.edit([(range, new_text)], None, cx);
2245 }
2246 assert_eq!(
2247 buffer.text(),
2248 "
2249 use a::{b, c};
2250
2251 fn f() {
2252 b();
2253 c();
2254 }
2255 "
2256 .unindent()
2257 );
2258 });
2259}
2260
2261fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2262 buffer: &Buffer,
2263 range: Range<T>,
2264) -> Vec<(String, Option<DiagnosticSeverity>)> {
2265 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2266 for chunk in buffer.snapshot().chunks(range, true) {
2267 if chunks.last().map_or(false, |prev_chunk| {
2268 prev_chunk.1 == chunk.diagnostic_severity
2269 }) {
2270 chunks.last_mut().unwrap().0.push_str(chunk.text);
2271 } else {
2272 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2273 }
2274 }
2275 chunks
2276}
2277
2278#[gpui::test(iterations = 10)]
2279async fn test_definition(cx: &mut gpui::TestAppContext) {
2280 init_test(cx);
2281
2282 let mut language = Language::new(
2283 LanguageConfig {
2284 name: "Rust".into(),
2285 matcher: LanguageMatcher {
2286 path_suffixes: vec!["rs".to_string()],
2287 ..Default::default()
2288 },
2289 ..Default::default()
2290 },
2291 Some(tree_sitter_rust::language()),
2292 );
2293 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2294
2295 let fs = FakeFs::new(cx.executor());
2296 fs.insert_tree(
2297 "/dir",
2298 json!({
2299 "a.rs": "const fn a() { A }",
2300 "b.rs": "const y: i32 = crate::a()",
2301 }),
2302 )
2303 .await;
2304
2305 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2306 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2307
2308 let buffer = project
2309 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2310 .await
2311 .unwrap();
2312
2313 let fake_server = fake_servers.next().await.unwrap();
2314 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2315 let params = params.text_document_position_params;
2316 assert_eq!(
2317 params.text_document.uri.to_file_path().unwrap(),
2318 Path::new("/dir/b.rs"),
2319 );
2320 assert_eq!(params.position, lsp::Position::new(0, 22));
2321
2322 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2323 lsp::Location::new(
2324 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2325 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2326 ),
2327 )))
2328 });
2329
2330 let mut definitions = project
2331 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2332 .await
2333 .unwrap();
2334
2335 // Assert no new language server started
2336 cx.executor().run_until_parked();
2337 assert!(fake_servers.try_next().is_err());
2338
2339 assert_eq!(definitions.len(), 1);
2340 let definition = definitions.pop().unwrap();
2341 cx.update(|cx| {
2342 let target_buffer = definition.target.buffer.read(cx);
2343 assert_eq!(
2344 target_buffer
2345 .file()
2346 .unwrap()
2347 .as_local()
2348 .unwrap()
2349 .abs_path(cx),
2350 Path::new("/dir/a.rs"),
2351 );
2352 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2353 assert_eq!(
2354 list_worktrees(&project, cx),
2355 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2356 );
2357
2358 drop(definition);
2359 });
2360 cx.update(|cx| {
2361 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2362 });
2363
2364 fn list_worktrees<'a>(
2365 project: &'a Model<Project>,
2366 cx: &'a AppContext,
2367 ) -> Vec<(&'a Path, bool)> {
2368 project
2369 .read(cx)
2370 .worktrees()
2371 .map(|worktree| {
2372 let worktree = worktree.read(cx);
2373 (
2374 worktree.as_local().unwrap().abs_path().as_ref(),
2375 worktree.is_visible(),
2376 )
2377 })
2378 .collect::<Vec<_>>()
2379 }
2380}
2381
2382#[gpui::test]
2383async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let mut language = Language::new(
2387 LanguageConfig {
2388 name: "TypeScript".into(),
2389 matcher: LanguageMatcher {
2390 path_suffixes: vec!["ts".to_string()],
2391 ..Default::default()
2392 },
2393 ..Default::default()
2394 },
2395 Some(tree_sitter_typescript::language_typescript()),
2396 );
2397 let mut fake_language_servers = language
2398 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2399 capabilities: lsp::ServerCapabilities {
2400 completion_provider: Some(lsp::CompletionOptions {
2401 trigger_characters: Some(vec![":".to_string()]),
2402 ..Default::default()
2403 }),
2404 ..Default::default()
2405 },
2406 ..Default::default()
2407 }))
2408 .await;
2409
2410 let fs = FakeFs::new(cx.executor());
2411 fs.insert_tree(
2412 "/dir",
2413 json!({
2414 "a.ts": "",
2415 }),
2416 )
2417 .await;
2418
2419 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2420 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2421 let buffer = project
2422 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2423 .await
2424 .unwrap();
2425
2426 let fake_server = fake_language_servers.next().await.unwrap();
2427
2428 let text = "let a = b.fqn";
2429 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2430 let completions = project.update(cx, |project, cx| {
2431 project.completions(&buffer, text.len(), cx)
2432 });
2433
2434 fake_server
2435 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2436 Ok(Some(lsp::CompletionResponse::Array(vec![
2437 lsp::CompletionItem {
2438 label: "fullyQualifiedName?".into(),
2439 insert_text: Some("fullyQualifiedName".into()),
2440 ..Default::default()
2441 },
2442 ])))
2443 })
2444 .next()
2445 .await;
2446 let completions = completions.await.unwrap();
2447 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2448 assert_eq!(completions.len(), 1);
2449 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2450 assert_eq!(
2451 completions[0].old_range.to_offset(&snapshot),
2452 text.len() - 3..text.len()
2453 );
2454
2455 let text = "let a = \"atoms/cmp\"";
2456 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2457 let completions = project.update(cx, |project, cx| {
2458 project.completions(&buffer, text.len() - 1, cx)
2459 });
2460
2461 fake_server
2462 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2463 Ok(Some(lsp::CompletionResponse::Array(vec![
2464 lsp::CompletionItem {
2465 label: "component".into(),
2466 ..Default::default()
2467 },
2468 ])))
2469 })
2470 .next()
2471 .await;
2472 let completions = completions.await.unwrap();
2473 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2474 assert_eq!(completions.len(), 1);
2475 assert_eq!(completions[0].new_text, "component");
2476 assert_eq!(
2477 completions[0].old_range.to_offset(&snapshot),
2478 text.len() - 4..text.len() - 1
2479 );
2480}
2481
2482#[gpui::test]
2483async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2484 init_test(cx);
2485
2486 let mut language = Language::new(
2487 LanguageConfig {
2488 name: "TypeScript".into(),
2489 matcher: LanguageMatcher {
2490 path_suffixes: vec!["ts".to_string()],
2491 ..Default::default()
2492 },
2493 ..Default::default()
2494 },
2495 Some(tree_sitter_typescript::language_typescript()),
2496 );
2497 let mut fake_language_servers = language
2498 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2499 capabilities: lsp::ServerCapabilities {
2500 completion_provider: Some(lsp::CompletionOptions {
2501 trigger_characters: Some(vec![":".to_string()]),
2502 ..Default::default()
2503 }),
2504 ..Default::default()
2505 },
2506 ..Default::default()
2507 }))
2508 .await;
2509
2510 let fs = FakeFs::new(cx.executor());
2511 fs.insert_tree(
2512 "/dir",
2513 json!({
2514 "a.ts": "",
2515 }),
2516 )
2517 .await;
2518
2519 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2520 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2521 let buffer = project
2522 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2523 .await
2524 .unwrap();
2525
2526 let fake_server = fake_language_servers.next().await.unwrap();
2527
2528 let text = "let a = b.fqn";
2529 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2530 let completions = project.update(cx, |project, cx| {
2531 project.completions(&buffer, text.len(), cx)
2532 });
2533
2534 fake_server
2535 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2536 Ok(Some(lsp::CompletionResponse::Array(vec![
2537 lsp::CompletionItem {
2538 label: "fullyQualifiedName?".into(),
2539 insert_text: Some("fully\rQualified\r\nName".into()),
2540 ..Default::default()
2541 },
2542 ])))
2543 })
2544 .next()
2545 .await;
2546 let completions = completions.await.unwrap();
2547 assert_eq!(completions.len(), 1);
2548 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2549}
2550
2551#[gpui::test(iterations = 10)]
2552async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2553 init_test(cx);
2554
2555 let mut language = Language::new(
2556 LanguageConfig {
2557 name: "TypeScript".into(),
2558 matcher: LanguageMatcher {
2559 path_suffixes: vec!["ts".to_string()],
2560 ..Default::default()
2561 },
2562 ..Default::default()
2563 },
2564 None,
2565 );
2566 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2567
2568 let fs = FakeFs::new(cx.executor());
2569 fs.insert_tree(
2570 "/dir",
2571 json!({
2572 "a.ts": "a",
2573 }),
2574 )
2575 .await;
2576
2577 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2578 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2579 let buffer = project
2580 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2581 .await
2582 .unwrap();
2583
2584 let fake_server = fake_language_servers.next().await.unwrap();
2585
2586 // Language server returns code actions that contain commands, and not edits.
2587 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2588 fake_server
2589 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2590 Ok(Some(vec![
2591 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2592 title: "The code action".into(),
2593 command: Some(lsp::Command {
2594 title: "The command".into(),
2595 command: "_the/command".into(),
2596 arguments: Some(vec![json!("the-argument")]),
2597 }),
2598 ..Default::default()
2599 }),
2600 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2601 title: "two".into(),
2602 ..Default::default()
2603 }),
2604 ]))
2605 })
2606 .next()
2607 .await;
2608
2609 let action = actions.await.unwrap()[0].clone();
2610 let apply = project.update(cx, |project, cx| {
2611 project.apply_code_action(buffer.clone(), action, true, cx)
2612 });
2613
2614 // Resolving the code action does not populate its edits. In absence of
2615 // edits, we must execute the given command.
2616 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2617 |action, _| async move { Ok(action) },
2618 );
2619
2620 // While executing the command, the language server sends the editor
2621 // a `workspaceEdit` request.
2622 fake_server
2623 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2624 let fake = fake_server.clone();
2625 move |params, _| {
2626 assert_eq!(params.command, "_the/command");
2627 let fake = fake.clone();
2628 async move {
2629 fake.server
2630 .request::<lsp::request::ApplyWorkspaceEdit>(
2631 lsp::ApplyWorkspaceEditParams {
2632 label: None,
2633 edit: lsp::WorkspaceEdit {
2634 changes: Some(
2635 [(
2636 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2637 vec![lsp::TextEdit {
2638 range: lsp::Range::new(
2639 lsp::Position::new(0, 0),
2640 lsp::Position::new(0, 0),
2641 ),
2642 new_text: "X".into(),
2643 }],
2644 )]
2645 .into_iter()
2646 .collect(),
2647 ),
2648 ..Default::default()
2649 },
2650 },
2651 )
2652 .await
2653 .unwrap();
2654 Ok(Some(json!(null)))
2655 }
2656 }
2657 })
2658 .next()
2659 .await;
2660
2661 // Applying the code action returns a project transaction containing the edits
2662 // sent by the language server in its `workspaceEdit` request.
2663 let transaction = apply.await.unwrap();
2664 assert!(transaction.0.contains_key(&buffer));
2665 buffer.update(cx, |buffer, cx| {
2666 assert_eq!(buffer.text(), "Xa");
2667 buffer.undo(cx);
2668 assert_eq!(buffer.text(), "a");
2669 });
2670}
2671
2672#[gpui::test(iterations = 10)]
2673async fn test_save_file(cx: &mut gpui::TestAppContext) {
2674 init_test(cx);
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree(
2678 "/dir",
2679 json!({
2680 "file1": "the old contents",
2681 }),
2682 )
2683 .await;
2684
2685 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2686 let buffer = project
2687 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2688 .await
2689 .unwrap();
2690 buffer.update(cx, |buffer, cx| {
2691 assert_eq!(buffer.text(), "the old contents");
2692 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2693 });
2694
2695 project
2696 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2697 .await
2698 .unwrap();
2699
2700 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2701 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2702}
2703
2704#[gpui::test(iterations = 30)]
2705async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2706 init_test(cx);
2707
2708 let fs = FakeFs::new(cx.executor().clone());
2709 fs.insert_tree(
2710 "/dir",
2711 json!({
2712 "file1": "the original contents",
2713 }),
2714 )
2715 .await;
2716
2717 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2718 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2719 let buffer = project
2720 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2721 .await
2722 .unwrap();
2723
2724 // Simulate buffer diffs being slow, so that they don't complete before
2725 // the next file change occurs.
2726 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2727
2728 // Change the buffer's file on disk, and then wait for the file change
2729 // to be detected by the worktree, so that the buffer starts reloading.
2730 fs.save(
2731 "/dir/file1".as_ref(),
2732 &"the first contents".into(),
2733 Default::default(),
2734 )
2735 .await
2736 .unwrap();
2737 worktree.next_event(cx);
2738
2739 // Change the buffer's file again. Depending on the random seed, the
2740 // previous file change may still be in progress.
2741 fs.save(
2742 "/dir/file1".as_ref(),
2743 &"the second contents".into(),
2744 Default::default(),
2745 )
2746 .await
2747 .unwrap();
2748 worktree.next_event(cx);
2749
2750 cx.executor().run_until_parked();
2751 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2752 buffer.read_with(cx, |buffer, _| {
2753 assert_eq!(buffer.text(), on_disk_text);
2754 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2755 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2756 });
2757}
2758
2759#[gpui::test(iterations = 30)]
2760async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2761 init_test(cx);
2762
2763 let fs = FakeFs::new(cx.executor().clone());
2764 fs.insert_tree(
2765 "/dir",
2766 json!({
2767 "file1": "the original contents",
2768 }),
2769 )
2770 .await;
2771
2772 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2773 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2774 let buffer = project
2775 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2776 .await
2777 .unwrap();
2778
2779 // Simulate buffer diffs being slow, so that they don't complete before
2780 // the next file change occurs.
2781 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2782
2783 // Change the buffer's file on disk, and then wait for the file change
2784 // to be detected by the worktree, so that the buffer starts reloading.
2785 fs.save(
2786 "/dir/file1".as_ref(),
2787 &"the first contents".into(),
2788 Default::default(),
2789 )
2790 .await
2791 .unwrap();
2792 worktree.next_event(cx);
2793
2794 cx.executor()
2795 .spawn(cx.executor().simulate_random_delay())
2796 .await;
2797
2798 // Perform a noop edit, causing the buffer's version to increase.
2799 buffer.update(cx, |buffer, cx| {
2800 buffer.edit([(0..0, " ")], None, cx);
2801 buffer.undo(cx);
2802 });
2803
2804 cx.executor().run_until_parked();
2805 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2806 buffer.read_with(cx, |buffer, _| {
2807 let buffer_text = buffer.text();
2808 if buffer_text == on_disk_text {
2809 assert!(
2810 !buffer.is_dirty() && !buffer.has_conflict(),
2811 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2812 );
2813 }
2814 // If the file change occurred while the buffer was processing the first
2815 // change, the buffer will be in a conflicting state.
2816 else {
2817 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2818 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2819 }
2820 });
2821}
2822
2823#[gpui::test]
2824async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2825 init_test(cx);
2826
2827 let fs = FakeFs::new(cx.executor());
2828 fs.insert_tree(
2829 "/dir",
2830 json!({
2831 "file1": "the old contents",
2832 }),
2833 )
2834 .await;
2835
2836 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2837 let buffer = project
2838 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2839 .await
2840 .unwrap();
2841 buffer.update(cx, |buffer, cx| {
2842 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2843 });
2844
2845 project
2846 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2847 .await
2848 .unwrap();
2849
2850 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2851 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2852}
2853
2854#[gpui::test]
2855async fn test_save_as(cx: &mut gpui::TestAppContext) {
2856 init_test(cx);
2857
2858 let fs = FakeFs::new(cx.executor());
2859 fs.insert_tree("/dir", json!({})).await;
2860
2861 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2862
2863 let languages = project.update(cx, |project, _| project.languages().clone());
2864 languages.register_native_grammars([("rust", tree_sitter_rust::language())]);
2865 languages.register_test_language(LanguageConfig {
2866 name: "Rust".into(),
2867 grammar: Some("rust".into()),
2868 matcher: LanguageMatcher {
2869 path_suffixes: vec!["rs".into()],
2870 ..Default::default()
2871 },
2872 ..Default::default()
2873 });
2874
2875 let buffer = project.update(cx, |project, cx| {
2876 project.create_buffer("", None, cx).unwrap()
2877 });
2878 buffer.update(cx, |buffer, cx| {
2879 buffer.edit([(0..0, "abc")], None, cx);
2880 assert!(buffer.is_dirty());
2881 assert!(!buffer.has_conflict());
2882 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2883 });
2884 project
2885 .update(cx, |project, cx| {
2886 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2887 })
2888 .await
2889 .unwrap();
2890 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2891
2892 cx.executor().run_until_parked();
2893 buffer.update(cx, |buffer, cx| {
2894 assert_eq!(
2895 buffer.file().unwrap().full_path(cx),
2896 Path::new("dir/file1.rs")
2897 );
2898 assert!(!buffer.is_dirty());
2899 assert!(!buffer.has_conflict());
2900 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2901 });
2902
2903 let opened_buffer = project
2904 .update(cx, |project, cx| {
2905 project.open_local_buffer("/dir/file1.rs", cx)
2906 })
2907 .await
2908 .unwrap();
2909 assert_eq!(opened_buffer, buffer);
2910}
2911
2912#[gpui::test(retries = 5)]
2913async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2914 init_test(cx);
2915 cx.executor().allow_parking();
2916
2917 let dir = temp_tree(json!({
2918 "a": {
2919 "file1": "",
2920 "file2": "",
2921 "file3": "",
2922 },
2923 "b": {
2924 "c": {
2925 "file4": "",
2926 "file5": "",
2927 }
2928 }
2929 }));
2930
2931 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2932 let rpc = project.update(cx, |p, _| p.client.clone());
2933
2934 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2935 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2936 async move { buffer.await.unwrap() }
2937 };
2938 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2939 project.update(cx, |project, cx| {
2940 let tree = project.worktrees().next().unwrap();
2941 tree.read(cx)
2942 .entry_for_path(path)
2943 .unwrap_or_else(|| panic!("no entry for path {}", path))
2944 .id
2945 })
2946 };
2947
2948 let buffer2 = buffer_for_path("a/file2", cx).await;
2949 let buffer3 = buffer_for_path("a/file3", cx).await;
2950 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2951 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2952
2953 let file2_id = id_for_path("a/file2", cx);
2954 let file3_id = id_for_path("a/file3", cx);
2955 let file4_id = id_for_path("b/c/file4", cx);
2956
2957 // Create a remote copy of this worktree.
2958 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2959
2960 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2961
2962 let updates = Arc::new(Mutex::new(Vec::new()));
2963 tree.update(cx, |tree, cx| {
2964 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2965 let updates = updates.clone();
2966 move |update| {
2967 updates.lock().push(update);
2968 async { true }
2969 }
2970 });
2971 });
2972
2973 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2974
2975 cx.executor().run_until_parked();
2976
2977 cx.update(|cx| {
2978 assert!(!buffer2.read(cx).is_dirty());
2979 assert!(!buffer3.read(cx).is_dirty());
2980 assert!(!buffer4.read(cx).is_dirty());
2981 assert!(!buffer5.read(cx).is_dirty());
2982 });
2983
2984 // Rename and delete files and directories.
2985 tree.flush_fs_events(cx).await;
2986 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2987 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2988 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2989 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2990 tree.flush_fs_events(cx).await;
2991
2992 let expected_paths = vec![
2993 "a",
2994 "a/file1",
2995 "a/file2.new",
2996 "b",
2997 "d",
2998 "d/file3",
2999 "d/file4",
3000 ];
3001
3002 cx.update(|app| {
3003 assert_eq!(
3004 tree.read(app)
3005 .paths()
3006 .map(|p| p.to_str().unwrap())
3007 .collect::<Vec<_>>(),
3008 expected_paths
3009 );
3010 });
3011
3012 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3013 assert_eq!(id_for_path("d/file3", cx), file3_id);
3014 assert_eq!(id_for_path("d/file4", cx), file4_id);
3015
3016 cx.update(|cx| {
3017 assert_eq!(
3018 buffer2.read(cx).file().unwrap().path().as_ref(),
3019 Path::new("a/file2.new")
3020 );
3021 assert_eq!(
3022 buffer3.read(cx).file().unwrap().path().as_ref(),
3023 Path::new("d/file3")
3024 );
3025 assert_eq!(
3026 buffer4.read(cx).file().unwrap().path().as_ref(),
3027 Path::new("d/file4")
3028 );
3029 assert_eq!(
3030 buffer5.read(cx).file().unwrap().path().as_ref(),
3031 Path::new("b/c/file5")
3032 );
3033
3034 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3035 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3036 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3037 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3038 });
3039
3040 // Update the remote worktree. Check that it becomes consistent with the
3041 // local worktree.
3042 cx.executor().run_until_parked();
3043
3044 remote.update(cx, |remote, _| {
3045 for update in updates.lock().drain(..) {
3046 remote.as_remote_mut().unwrap().update_from_remote(update);
3047 }
3048 });
3049 cx.executor().run_until_parked();
3050 remote.update(cx, |remote, _| {
3051 assert_eq!(
3052 remote
3053 .paths()
3054 .map(|p| p.to_str().unwrap())
3055 .collect::<Vec<_>>(),
3056 expected_paths
3057 );
3058 });
3059}
3060
3061#[gpui::test(iterations = 10)]
3062async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3063 init_test(cx);
3064
3065 let fs = FakeFs::new(cx.executor());
3066 fs.insert_tree(
3067 "/dir",
3068 json!({
3069 "a": {
3070 "file1": "",
3071 }
3072 }),
3073 )
3074 .await;
3075
3076 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3077 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3078 let tree_id = tree.update(cx, |tree, _| tree.id());
3079
3080 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3081 project.update(cx, |project, cx| {
3082 let tree = project.worktrees().next().unwrap();
3083 tree.read(cx)
3084 .entry_for_path(path)
3085 .unwrap_or_else(|| panic!("no entry for path {}", path))
3086 .id
3087 })
3088 };
3089
3090 let dir_id = id_for_path("a", cx);
3091 let file_id = id_for_path("a/file1", cx);
3092 let buffer = project
3093 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3094 .await
3095 .unwrap();
3096 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3097
3098 project
3099 .update(cx, |project, cx| {
3100 project.rename_entry(dir_id, Path::new("b"), cx)
3101 })
3102 .unwrap()
3103 .await
3104 .unwrap();
3105 cx.executor().run_until_parked();
3106
3107 assert_eq!(id_for_path("b", cx), dir_id);
3108 assert_eq!(id_for_path("b/file1", cx), file_id);
3109 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3110}
3111
3112#[gpui::test]
3113async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3114 init_test(cx);
3115
3116 let fs = FakeFs::new(cx.executor());
3117 fs.insert_tree(
3118 "/dir",
3119 json!({
3120 "a.txt": "a-contents",
3121 "b.txt": "b-contents",
3122 }),
3123 )
3124 .await;
3125
3126 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3127
3128 // Spawn multiple tasks to open paths, repeating some paths.
3129 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3130 (
3131 p.open_local_buffer("/dir/a.txt", cx),
3132 p.open_local_buffer("/dir/b.txt", cx),
3133 p.open_local_buffer("/dir/a.txt", cx),
3134 )
3135 });
3136
3137 let buffer_a_1 = buffer_a_1.await.unwrap();
3138 let buffer_a_2 = buffer_a_2.await.unwrap();
3139 let buffer_b = buffer_b.await.unwrap();
3140 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3141 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3142
3143 // There is only one buffer per path.
3144 let buffer_a_id = buffer_a_1.entity_id();
3145 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3146
3147 // Open the same path again while it is still open.
3148 drop(buffer_a_1);
3149 let buffer_a_3 = project
3150 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3151 .await
3152 .unwrap();
3153
3154 // There's still only one buffer per path.
3155 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3156}
3157
3158#[gpui::test]
3159async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3160 init_test(cx);
3161
3162 let fs = FakeFs::new(cx.executor());
3163 fs.insert_tree(
3164 "/dir",
3165 json!({
3166 "file1": "abc",
3167 "file2": "def",
3168 "file3": "ghi",
3169 }),
3170 )
3171 .await;
3172
3173 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3174
3175 let buffer1 = project
3176 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3177 .await
3178 .unwrap();
3179 let events = Arc::new(Mutex::new(Vec::new()));
3180
3181 // initially, the buffer isn't dirty.
3182 buffer1.update(cx, |buffer, cx| {
3183 cx.subscribe(&buffer1, {
3184 let events = events.clone();
3185 move |_, _, event, _| match event {
3186 BufferEvent::Operation(_) => {}
3187 _ => events.lock().push(event.clone()),
3188 }
3189 })
3190 .detach();
3191
3192 assert!(!buffer.is_dirty());
3193 assert!(events.lock().is_empty());
3194
3195 buffer.edit([(1..2, "")], None, cx);
3196 });
3197
3198 // after the first edit, the buffer is dirty, and emits a dirtied event.
3199 buffer1.update(cx, |buffer, cx| {
3200 assert!(buffer.text() == "ac");
3201 assert!(buffer.is_dirty());
3202 assert_eq!(
3203 *events.lock(),
3204 &[language::Event::Edited, language::Event::DirtyChanged]
3205 );
3206 events.lock().clear();
3207 buffer.did_save(
3208 buffer.version(),
3209 buffer.as_rope().fingerprint(),
3210 buffer.file().unwrap().mtime(),
3211 cx,
3212 );
3213 });
3214
3215 // after saving, the buffer is not dirty, and emits a saved event.
3216 buffer1.update(cx, |buffer, cx| {
3217 assert!(!buffer.is_dirty());
3218 assert_eq!(*events.lock(), &[language::Event::Saved]);
3219 events.lock().clear();
3220
3221 buffer.edit([(1..1, "B")], None, cx);
3222 buffer.edit([(2..2, "D")], None, cx);
3223 });
3224
3225 // after editing again, the buffer is dirty, and emits another dirty event.
3226 buffer1.update(cx, |buffer, cx| {
3227 assert!(buffer.text() == "aBDc");
3228 assert!(buffer.is_dirty());
3229 assert_eq!(
3230 *events.lock(),
3231 &[
3232 language::Event::Edited,
3233 language::Event::DirtyChanged,
3234 language::Event::Edited,
3235 ],
3236 );
3237 events.lock().clear();
3238
3239 // After restoring the buffer to its previously-saved state,
3240 // the buffer is not considered dirty anymore.
3241 buffer.edit([(1..3, "")], None, cx);
3242 assert!(buffer.text() == "ac");
3243 assert!(!buffer.is_dirty());
3244 });
3245
3246 assert_eq!(
3247 *events.lock(),
3248 &[language::Event::Edited, language::Event::DirtyChanged]
3249 );
3250
3251 // When a file is deleted, the buffer is considered dirty.
3252 let events = Arc::new(Mutex::new(Vec::new()));
3253 let buffer2 = project
3254 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3255 .await
3256 .unwrap();
3257 buffer2.update(cx, |_, cx| {
3258 cx.subscribe(&buffer2, {
3259 let events = events.clone();
3260 move |_, _, event, _| events.lock().push(event.clone())
3261 })
3262 .detach();
3263 });
3264
3265 fs.remove_file("/dir/file2".as_ref(), Default::default())
3266 .await
3267 .unwrap();
3268 cx.executor().run_until_parked();
3269 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3270 assert_eq!(
3271 *events.lock(),
3272 &[
3273 language::Event::DirtyChanged,
3274 language::Event::FileHandleChanged
3275 ]
3276 );
3277
3278 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3279 let events = Arc::new(Mutex::new(Vec::new()));
3280 let buffer3 = project
3281 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3282 .await
3283 .unwrap();
3284 buffer3.update(cx, |_, cx| {
3285 cx.subscribe(&buffer3, {
3286 let events = events.clone();
3287 move |_, _, event, _| events.lock().push(event.clone())
3288 })
3289 .detach();
3290 });
3291
3292 buffer3.update(cx, |buffer, cx| {
3293 buffer.edit([(0..0, "x")], None, cx);
3294 });
3295 events.lock().clear();
3296 fs.remove_file("/dir/file3".as_ref(), Default::default())
3297 .await
3298 .unwrap();
3299 cx.executor().run_until_parked();
3300 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3301 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3302}
3303
3304#[gpui::test]
3305async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3306 init_test(cx);
3307
3308 let initial_contents = "aaa\nbbbbb\nc\n";
3309 let fs = FakeFs::new(cx.executor());
3310 fs.insert_tree(
3311 "/dir",
3312 json!({
3313 "the-file": initial_contents,
3314 }),
3315 )
3316 .await;
3317 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3318 let buffer = project
3319 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3320 .await
3321 .unwrap();
3322
3323 let anchors = (0..3)
3324 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3325 .collect::<Vec<_>>();
3326
3327 // Change the file on disk, adding two new lines of text, and removing
3328 // one line.
3329 buffer.update(cx, |buffer, _| {
3330 assert!(!buffer.is_dirty());
3331 assert!(!buffer.has_conflict());
3332 });
3333 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3334 fs.save(
3335 "/dir/the-file".as_ref(),
3336 &new_contents.into(),
3337 LineEnding::Unix,
3338 )
3339 .await
3340 .unwrap();
3341
3342 // Because the buffer was not modified, it is reloaded from disk. Its
3343 // contents are edited according to the diff between the old and new
3344 // file contents.
3345 cx.executor().run_until_parked();
3346 buffer.update(cx, |buffer, _| {
3347 assert_eq!(buffer.text(), new_contents);
3348 assert!(!buffer.is_dirty());
3349 assert!(!buffer.has_conflict());
3350
3351 let anchor_positions = anchors
3352 .iter()
3353 .map(|anchor| anchor.to_point(&*buffer))
3354 .collect::<Vec<_>>();
3355 assert_eq!(
3356 anchor_positions,
3357 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3358 );
3359 });
3360
3361 // Modify the buffer
3362 buffer.update(cx, |buffer, cx| {
3363 buffer.edit([(0..0, " ")], None, cx);
3364 assert!(buffer.is_dirty());
3365 assert!(!buffer.has_conflict());
3366 });
3367
3368 // Change the file on disk again, adding blank lines to the beginning.
3369 fs.save(
3370 "/dir/the-file".as_ref(),
3371 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3372 LineEnding::Unix,
3373 )
3374 .await
3375 .unwrap();
3376
3377 // Because the buffer is modified, it doesn't reload from disk, but is
3378 // marked as having a conflict.
3379 cx.executor().run_until_parked();
3380 buffer.update(cx, |buffer, _| {
3381 assert!(buffer.has_conflict());
3382 });
3383}
3384
3385#[gpui::test]
3386async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3387 init_test(cx);
3388
3389 let fs = FakeFs::new(cx.executor());
3390 fs.insert_tree(
3391 "/dir",
3392 json!({
3393 "file1": "a\nb\nc\n",
3394 "file2": "one\r\ntwo\r\nthree\r\n",
3395 }),
3396 )
3397 .await;
3398
3399 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3400 let buffer1 = project
3401 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3402 .await
3403 .unwrap();
3404 let buffer2 = project
3405 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3406 .await
3407 .unwrap();
3408
3409 buffer1.update(cx, |buffer, _| {
3410 assert_eq!(buffer.text(), "a\nb\nc\n");
3411 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3412 });
3413 buffer2.update(cx, |buffer, _| {
3414 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3415 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3416 });
3417
3418 // Change a file's line endings on disk from unix to windows. The buffer's
3419 // state updates correctly.
3420 fs.save(
3421 "/dir/file1".as_ref(),
3422 &"aaa\nb\nc\n".into(),
3423 LineEnding::Windows,
3424 )
3425 .await
3426 .unwrap();
3427 cx.executor().run_until_parked();
3428 buffer1.update(cx, |buffer, _| {
3429 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3430 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3431 });
3432
3433 // Save a file with windows line endings. The file is written correctly.
3434 buffer2.update(cx, |buffer, cx| {
3435 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3436 });
3437 project
3438 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3439 .await
3440 .unwrap();
3441 assert_eq!(
3442 fs.load("/dir/file2".as_ref()).await.unwrap(),
3443 "one\r\ntwo\r\nthree\r\nfour\r\n",
3444 );
3445}
3446
3447#[gpui::test]
3448async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3449 init_test(cx);
3450
3451 let fs = FakeFs::new(cx.executor());
3452 fs.insert_tree(
3453 "/the-dir",
3454 json!({
3455 "a.rs": "
3456 fn foo(mut v: Vec<usize>) {
3457 for x in &v {
3458 v.push(1);
3459 }
3460 }
3461 "
3462 .unindent(),
3463 }),
3464 )
3465 .await;
3466
3467 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3468 let buffer = project
3469 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3470 .await
3471 .unwrap();
3472
3473 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3474 let message = lsp::PublishDiagnosticsParams {
3475 uri: buffer_uri.clone(),
3476 diagnostics: vec![
3477 lsp::Diagnostic {
3478 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3479 severity: Some(DiagnosticSeverity::WARNING),
3480 message: "error 1".to_string(),
3481 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3482 location: lsp::Location {
3483 uri: buffer_uri.clone(),
3484 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3485 },
3486 message: "error 1 hint 1".to_string(),
3487 }]),
3488 ..Default::default()
3489 },
3490 lsp::Diagnostic {
3491 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3492 severity: Some(DiagnosticSeverity::HINT),
3493 message: "error 1 hint 1".to_string(),
3494 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3495 location: lsp::Location {
3496 uri: buffer_uri.clone(),
3497 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3498 },
3499 message: "original diagnostic".to_string(),
3500 }]),
3501 ..Default::default()
3502 },
3503 lsp::Diagnostic {
3504 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3505 severity: Some(DiagnosticSeverity::ERROR),
3506 message: "error 2".to_string(),
3507 related_information: Some(vec![
3508 lsp::DiagnosticRelatedInformation {
3509 location: lsp::Location {
3510 uri: buffer_uri.clone(),
3511 range: lsp::Range::new(
3512 lsp::Position::new(1, 13),
3513 lsp::Position::new(1, 15),
3514 ),
3515 },
3516 message: "error 2 hint 1".to_string(),
3517 },
3518 lsp::DiagnosticRelatedInformation {
3519 location: lsp::Location {
3520 uri: buffer_uri.clone(),
3521 range: lsp::Range::new(
3522 lsp::Position::new(1, 13),
3523 lsp::Position::new(1, 15),
3524 ),
3525 },
3526 message: "error 2 hint 2".to_string(),
3527 },
3528 ]),
3529 ..Default::default()
3530 },
3531 lsp::Diagnostic {
3532 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3533 severity: Some(DiagnosticSeverity::HINT),
3534 message: "error 2 hint 1".to_string(),
3535 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3536 location: lsp::Location {
3537 uri: buffer_uri.clone(),
3538 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3539 },
3540 message: "original diagnostic".to_string(),
3541 }]),
3542 ..Default::default()
3543 },
3544 lsp::Diagnostic {
3545 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3546 severity: Some(DiagnosticSeverity::HINT),
3547 message: "error 2 hint 2".to_string(),
3548 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3549 location: lsp::Location {
3550 uri: buffer_uri,
3551 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3552 },
3553 message: "original diagnostic".to_string(),
3554 }]),
3555 ..Default::default()
3556 },
3557 ],
3558 version: None,
3559 };
3560
3561 project
3562 .update(cx, |p, cx| {
3563 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3564 })
3565 .unwrap();
3566 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3567
3568 assert_eq!(
3569 buffer
3570 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3571 .collect::<Vec<_>>(),
3572 &[
3573 DiagnosticEntry {
3574 range: Point::new(1, 8)..Point::new(1, 9),
3575 diagnostic: Diagnostic {
3576 severity: DiagnosticSeverity::WARNING,
3577 message: "error 1".to_string(),
3578 group_id: 1,
3579 is_primary: true,
3580 ..Default::default()
3581 }
3582 },
3583 DiagnosticEntry {
3584 range: Point::new(1, 8)..Point::new(1, 9),
3585 diagnostic: Diagnostic {
3586 severity: DiagnosticSeverity::HINT,
3587 message: "error 1 hint 1".to_string(),
3588 group_id: 1,
3589 is_primary: false,
3590 ..Default::default()
3591 }
3592 },
3593 DiagnosticEntry {
3594 range: Point::new(1, 13)..Point::new(1, 15),
3595 diagnostic: Diagnostic {
3596 severity: DiagnosticSeverity::HINT,
3597 message: "error 2 hint 1".to_string(),
3598 group_id: 0,
3599 is_primary: false,
3600 ..Default::default()
3601 }
3602 },
3603 DiagnosticEntry {
3604 range: Point::new(1, 13)..Point::new(1, 15),
3605 diagnostic: Diagnostic {
3606 severity: DiagnosticSeverity::HINT,
3607 message: "error 2 hint 2".to_string(),
3608 group_id: 0,
3609 is_primary: false,
3610 ..Default::default()
3611 }
3612 },
3613 DiagnosticEntry {
3614 range: Point::new(2, 8)..Point::new(2, 17),
3615 diagnostic: Diagnostic {
3616 severity: DiagnosticSeverity::ERROR,
3617 message: "error 2".to_string(),
3618 group_id: 0,
3619 is_primary: true,
3620 ..Default::default()
3621 }
3622 }
3623 ]
3624 );
3625
3626 assert_eq!(
3627 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3628 &[
3629 DiagnosticEntry {
3630 range: Point::new(1, 13)..Point::new(1, 15),
3631 diagnostic: Diagnostic {
3632 severity: DiagnosticSeverity::HINT,
3633 message: "error 2 hint 1".to_string(),
3634 group_id: 0,
3635 is_primary: false,
3636 ..Default::default()
3637 }
3638 },
3639 DiagnosticEntry {
3640 range: Point::new(1, 13)..Point::new(1, 15),
3641 diagnostic: Diagnostic {
3642 severity: DiagnosticSeverity::HINT,
3643 message: "error 2 hint 2".to_string(),
3644 group_id: 0,
3645 is_primary: false,
3646 ..Default::default()
3647 }
3648 },
3649 DiagnosticEntry {
3650 range: Point::new(2, 8)..Point::new(2, 17),
3651 diagnostic: Diagnostic {
3652 severity: DiagnosticSeverity::ERROR,
3653 message: "error 2".to_string(),
3654 group_id: 0,
3655 is_primary: true,
3656 ..Default::default()
3657 }
3658 }
3659 ]
3660 );
3661
3662 assert_eq!(
3663 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3664 &[
3665 DiagnosticEntry {
3666 range: Point::new(1, 8)..Point::new(1, 9),
3667 diagnostic: Diagnostic {
3668 severity: DiagnosticSeverity::WARNING,
3669 message: "error 1".to_string(),
3670 group_id: 1,
3671 is_primary: true,
3672 ..Default::default()
3673 }
3674 },
3675 DiagnosticEntry {
3676 range: Point::new(1, 8)..Point::new(1, 9),
3677 diagnostic: Diagnostic {
3678 severity: DiagnosticSeverity::HINT,
3679 message: "error 1 hint 1".to_string(),
3680 group_id: 1,
3681 is_primary: false,
3682 ..Default::default()
3683 }
3684 },
3685 ]
3686 );
3687}
3688
3689#[gpui::test]
3690async fn test_rename(cx: &mut gpui::TestAppContext) {
3691 init_test(cx);
3692
3693 let mut language = Language::new(
3694 LanguageConfig {
3695 name: "Rust".into(),
3696 matcher: LanguageMatcher {
3697 path_suffixes: vec!["rs".to_string()],
3698 ..Default::default()
3699 },
3700 ..Default::default()
3701 },
3702 Some(tree_sitter_rust::language()),
3703 );
3704 let mut fake_servers = language
3705 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3706 capabilities: lsp::ServerCapabilities {
3707 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3708 prepare_provider: Some(true),
3709 work_done_progress_options: Default::default(),
3710 })),
3711 ..Default::default()
3712 },
3713 ..Default::default()
3714 }))
3715 .await;
3716
3717 let fs = FakeFs::new(cx.executor());
3718 fs.insert_tree(
3719 "/dir",
3720 json!({
3721 "one.rs": "const ONE: usize = 1;",
3722 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3723 }),
3724 )
3725 .await;
3726
3727 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3728 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3729 let buffer = project
3730 .update(cx, |project, cx| {
3731 project.open_local_buffer("/dir/one.rs", cx)
3732 })
3733 .await
3734 .unwrap();
3735
3736 let fake_server = fake_servers.next().await.unwrap();
3737
3738 let response = project.update(cx, |project, cx| {
3739 project.prepare_rename(buffer.clone(), 7, cx)
3740 });
3741 fake_server
3742 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3743 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3744 assert_eq!(params.position, lsp::Position::new(0, 7));
3745 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3746 lsp::Position::new(0, 6),
3747 lsp::Position::new(0, 9),
3748 ))))
3749 })
3750 .next()
3751 .await
3752 .unwrap();
3753 let range = response.await.unwrap().unwrap();
3754 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3755 assert_eq!(range, 6..9);
3756
3757 let response = project.update(cx, |project, cx| {
3758 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3759 });
3760 fake_server
3761 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3762 assert_eq!(
3763 params.text_document_position.text_document.uri.as_str(),
3764 "file:///dir/one.rs"
3765 );
3766 assert_eq!(
3767 params.text_document_position.position,
3768 lsp::Position::new(0, 7)
3769 );
3770 assert_eq!(params.new_name, "THREE");
3771 Ok(Some(lsp::WorkspaceEdit {
3772 changes: Some(
3773 [
3774 (
3775 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3776 vec![lsp::TextEdit::new(
3777 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3778 "THREE".to_string(),
3779 )],
3780 ),
3781 (
3782 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3783 vec![
3784 lsp::TextEdit::new(
3785 lsp::Range::new(
3786 lsp::Position::new(0, 24),
3787 lsp::Position::new(0, 27),
3788 ),
3789 "THREE".to_string(),
3790 ),
3791 lsp::TextEdit::new(
3792 lsp::Range::new(
3793 lsp::Position::new(0, 35),
3794 lsp::Position::new(0, 38),
3795 ),
3796 "THREE".to_string(),
3797 ),
3798 ],
3799 ),
3800 ]
3801 .into_iter()
3802 .collect(),
3803 ),
3804 ..Default::default()
3805 }))
3806 })
3807 .next()
3808 .await
3809 .unwrap();
3810 let mut transaction = response.await.unwrap().0;
3811 assert_eq!(transaction.len(), 2);
3812 assert_eq!(
3813 transaction
3814 .remove_entry(&buffer)
3815 .unwrap()
3816 .0
3817 .update(cx, |buffer, _| buffer.text()),
3818 "const THREE: usize = 1;"
3819 );
3820 assert_eq!(
3821 transaction
3822 .into_keys()
3823 .next()
3824 .unwrap()
3825 .update(cx, |buffer, _| buffer.text()),
3826 "const TWO: usize = one::THREE + one::THREE;"
3827 );
3828}
3829
3830#[gpui::test]
3831async fn test_search(cx: &mut gpui::TestAppContext) {
3832 init_test(cx);
3833
3834 let fs = FakeFs::new(cx.executor());
3835 fs.insert_tree(
3836 "/dir",
3837 json!({
3838 "one.rs": "const ONE: usize = 1;",
3839 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3840 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3841 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3842 }),
3843 )
3844 .await;
3845 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3846 assert_eq!(
3847 search(
3848 &project,
3849 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3850 cx
3851 )
3852 .await
3853 .unwrap(),
3854 HashMap::from_iter([
3855 ("two.rs".to_string(), vec![6..9]),
3856 ("three.rs".to_string(), vec![37..40])
3857 ])
3858 );
3859
3860 let buffer_4 = project
3861 .update(cx, |project, cx| {
3862 project.open_local_buffer("/dir/four.rs", cx)
3863 })
3864 .await
3865 .unwrap();
3866 buffer_4.update(cx, |buffer, cx| {
3867 let text = "two::TWO";
3868 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3869 });
3870
3871 assert_eq!(
3872 search(
3873 &project,
3874 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3875 cx
3876 )
3877 .await
3878 .unwrap(),
3879 HashMap::from_iter([
3880 ("two.rs".to_string(), vec![6..9]),
3881 ("three.rs".to_string(), vec![37..40]),
3882 ("four.rs".to_string(), vec![25..28, 36..39])
3883 ])
3884 );
3885}
3886
3887#[gpui::test]
3888async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3889 init_test(cx);
3890
3891 let search_query = "file";
3892
3893 let fs = FakeFs::new(cx.executor());
3894 fs.insert_tree(
3895 "/dir",
3896 json!({
3897 "one.rs": r#"// Rust file one"#,
3898 "one.ts": r#"// TypeScript file one"#,
3899 "two.rs": r#"// Rust file two"#,
3900 "two.ts": r#"// TypeScript file two"#,
3901 }),
3902 )
3903 .await;
3904 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3905
3906 assert!(
3907 search(
3908 &project,
3909 SearchQuery::text(
3910 search_query,
3911 false,
3912 true,
3913 false,
3914 vec![PathMatcher::new("*.odd").unwrap()],
3915 Vec::new()
3916 )
3917 .unwrap(),
3918 cx
3919 )
3920 .await
3921 .unwrap()
3922 .is_empty(),
3923 "If no inclusions match, no files should be returned"
3924 );
3925
3926 assert_eq!(
3927 search(
3928 &project,
3929 SearchQuery::text(
3930 search_query,
3931 false,
3932 true,
3933 false,
3934 vec![PathMatcher::new("*.rs").unwrap()],
3935 Vec::new()
3936 )
3937 .unwrap(),
3938 cx
3939 )
3940 .await
3941 .unwrap(),
3942 HashMap::from_iter([
3943 ("one.rs".to_string(), vec![8..12]),
3944 ("two.rs".to_string(), vec![8..12]),
3945 ]),
3946 "Rust only search should give only Rust files"
3947 );
3948
3949 assert_eq!(
3950 search(
3951 &project,
3952 SearchQuery::text(
3953 search_query,
3954 false,
3955 true,
3956 false,
3957 vec![
3958 PathMatcher::new("*.ts").unwrap(),
3959 PathMatcher::new("*.odd").unwrap(),
3960 ],
3961 Vec::new()
3962 ).unwrap(),
3963 cx
3964 )
3965 .await
3966 .unwrap(),
3967 HashMap::from_iter([
3968 ("one.ts".to_string(), vec![14..18]),
3969 ("two.ts".to_string(), vec![14..18]),
3970 ]),
3971 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3972 );
3973
3974 assert_eq!(
3975 search(
3976 &project,
3977 SearchQuery::text(
3978 search_query,
3979 false,
3980 true,
3981 false,
3982 vec![
3983 PathMatcher::new("*.rs").unwrap(),
3984 PathMatcher::new("*.ts").unwrap(),
3985 PathMatcher::new("*.odd").unwrap(),
3986 ],
3987 Vec::new()
3988 ).unwrap(),
3989 cx
3990 )
3991 .await
3992 .unwrap(),
3993 HashMap::from_iter([
3994 ("one.rs".to_string(), vec![8..12]),
3995 ("one.ts".to_string(), vec![14..18]),
3996 ("two.rs".to_string(), vec![8..12]),
3997 ("two.ts".to_string(), vec![14..18]),
3998 ]),
3999 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4000 );
4001}
4002
4003#[gpui::test]
4004async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4005 init_test(cx);
4006
4007 let search_query = "file";
4008
4009 let fs = FakeFs::new(cx.executor());
4010 fs.insert_tree(
4011 "/dir",
4012 json!({
4013 "one.rs": r#"// Rust file one"#,
4014 "one.ts": r#"// TypeScript file one"#,
4015 "two.rs": r#"// Rust file two"#,
4016 "two.ts": r#"// TypeScript file two"#,
4017 }),
4018 )
4019 .await;
4020 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4021
4022 assert_eq!(
4023 search(
4024 &project,
4025 SearchQuery::text(
4026 search_query,
4027 false,
4028 true,
4029 false,
4030 Vec::new(),
4031 vec![PathMatcher::new("*.odd").unwrap()],
4032 )
4033 .unwrap(),
4034 cx
4035 )
4036 .await
4037 .unwrap(),
4038 HashMap::from_iter([
4039 ("one.rs".to_string(), vec![8..12]),
4040 ("one.ts".to_string(), vec![14..18]),
4041 ("two.rs".to_string(), vec![8..12]),
4042 ("two.ts".to_string(), vec![14..18]),
4043 ]),
4044 "If no exclusions match, all files should be returned"
4045 );
4046
4047 assert_eq!(
4048 search(
4049 &project,
4050 SearchQuery::text(
4051 search_query,
4052 false,
4053 true,
4054 false,
4055 Vec::new(),
4056 vec![PathMatcher::new("*.rs").unwrap()],
4057 )
4058 .unwrap(),
4059 cx
4060 )
4061 .await
4062 .unwrap(),
4063 HashMap::from_iter([
4064 ("one.ts".to_string(), vec![14..18]),
4065 ("two.ts".to_string(), vec![14..18]),
4066 ]),
4067 "Rust exclusion search should give only TypeScript files"
4068 );
4069
4070 assert_eq!(
4071 search(
4072 &project,
4073 SearchQuery::text(
4074 search_query,
4075 false,
4076 true,
4077 false,
4078 Vec::new(),
4079 vec![
4080 PathMatcher::new("*.ts").unwrap(),
4081 PathMatcher::new("*.odd").unwrap(),
4082 ],
4083 ).unwrap(),
4084 cx
4085 )
4086 .await
4087 .unwrap(),
4088 HashMap::from_iter([
4089 ("one.rs".to_string(), vec![8..12]),
4090 ("two.rs".to_string(), vec![8..12]),
4091 ]),
4092 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4093 );
4094
4095 assert!(
4096 search(
4097 &project,
4098 SearchQuery::text(
4099 search_query,
4100 false,
4101 true,
4102 false,
4103 Vec::new(),
4104 vec![
4105 PathMatcher::new("*.rs").unwrap(),
4106 PathMatcher::new("*.ts").unwrap(),
4107 PathMatcher::new("*.odd").unwrap(),
4108 ],
4109 ).unwrap(),
4110 cx
4111 )
4112 .await
4113 .unwrap().is_empty(),
4114 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4115 );
4116}
4117
4118#[gpui::test]
4119async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4120 init_test(cx);
4121
4122 let search_query = "file";
4123
4124 let fs = FakeFs::new(cx.executor());
4125 fs.insert_tree(
4126 "/dir",
4127 json!({
4128 "one.rs": r#"// Rust file one"#,
4129 "one.ts": r#"// TypeScript file one"#,
4130 "two.rs": r#"// Rust file two"#,
4131 "two.ts": r#"// TypeScript file two"#,
4132 }),
4133 )
4134 .await;
4135 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4136
4137 assert!(
4138 search(
4139 &project,
4140 SearchQuery::text(
4141 search_query,
4142 false,
4143 true,
4144 false,
4145 vec![PathMatcher::new("*.odd").unwrap()],
4146 vec![PathMatcher::new("*.odd").unwrap()],
4147 )
4148 .unwrap(),
4149 cx
4150 )
4151 .await
4152 .unwrap()
4153 .is_empty(),
4154 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4155 );
4156
4157 assert!(
4158 search(
4159 &project,
4160 SearchQuery::text(
4161 search_query,
4162 false,
4163 true,
4164 false,
4165 vec![PathMatcher::new("*.ts").unwrap()],
4166 vec![PathMatcher::new("*.ts").unwrap()],
4167 ).unwrap(),
4168 cx
4169 )
4170 .await
4171 .unwrap()
4172 .is_empty(),
4173 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4174 );
4175
4176 assert!(
4177 search(
4178 &project,
4179 SearchQuery::text(
4180 search_query,
4181 false,
4182 true,
4183 false,
4184 vec![
4185 PathMatcher::new("*.ts").unwrap(),
4186 PathMatcher::new("*.odd").unwrap()
4187 ],
4188 vec![
4189 PathMatcher::new("*.ts").unwrap(),
4190 PathMatcher::new("*.odd").unwrap()
4191 ],
4192 )
4193 .unwrap(),
4194 cx
4195 )
4196 .await
4197 .unwrap()
4198 .is_empty(),
4199 "Non-matching inclusions and exclusions should not change that."
4200 );
4201
4202 assert_eq!(
4203 search(
4204 &project,
4205 SearchQuery::text(
4206 search_query,
4207 false,
4208 true,
4209 false,
4210 vec![
4211 PathMatcher::new("*.ts").unwrap(),
4212 PathMatcher::new("*.odd").unwrap()
4213 ],
4214 vec![
4215 PathMatcher::new("*.rs").unwrap(),
4216 PathMatcher::new("*.odd").unwrap()
4217 ],
4218 )
4219 .unwrap(),
4220 cx
4221 )
4222 .await
4223 .unwrap(),
4224 HashMap::from_iter([
4225 ("one.ts".to_string(), vec![14..18]),
4226 ("two.ts".to_string(), vec![14..18]),
4227 ]),
4228 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4229 );
4230}
4231
4232#[gpui::test]
4233async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4234 init_test(cx);
4235
4236 let fs = FakeFs::new(cx.background_executor.clone());
4237 fs.insert_tree(
4238 "/dir",
4239 json!({
4240 ".git": {},
4241 ".gitignore": "**/target\n/node_modules\n",
4242 "target": {
4243 "index.txt": "index_key:index_value"
4244 },
4245 "node_modules": {
4246 "eslint": {
4247 "index.ts": "const eslint_key = 'eslint value'",
4248 "package.json": r#"{ "some_key": "some value" }"#,
4249 },
4250 "prettier": {
4251 "index.ts": "const prettier_key = 'prettier value'",
4252 "package.json": r#"{ "other_key": "other value" }"#,
4253 },
4254 },
4255 "package.json": r#"{ "main_key": "main value" }"#,
4256 }),
4257 )
4258 .await;
4259 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4260
4261 let query = "key";
4262 assert_eq!(
4263 search(
4264 &project,
4265 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4266 cx
4267 )
4268 .await
4269 .unwrap(),
4270 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4271 "Only one non-ignored file should have the query"
4272 );
4273
4274 assert_eq!(
4275 search(
4276 &project,
4277 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4278 cx
4279 )
4280 .await
4281 .unwrap(),
4282 HashMap::from_iter([
4283 ("package.json".to_string(), vec![8..11]),
4284 ("target/index.txt".to_string(), vec![6..9]),
4285 (
4286 "node_modules/prettier/package.json".to_string(),
4287 vec![9..12]
4288 ),
4289 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4290 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4291 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4292 ]),
4293 "Unrestricted search with ignored directories should find every file with the query"
4294 );
4295
4296 assert_eq!(
4297 search(
4298 &project,
4299 SearchQuery::text(
4300 query,
4301 false,
4302 false,
4303 true,
4304 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4305 vec![PathMatcher::new("*.ts").unwrap()],
4306 )
4307 .unwrap(),
4308 cx
4309 )
4310 .await
4311 .unwrap(),
4312 HashMap::from_iter([(
4313 "node_modules/prettier/package.json".to_string(),
4314 vec![9..12]
4315 )]),
4316 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4317 );
4318}
4319
4320#[test]
4321fn test_glob_literal_prefix() {
4322 assert_eq!(glob_literal_prefix("**/*.js"), "");
4323 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4324 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4325 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4326}
4327
4328#[gpui::test]
4329async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4330 init_test(cx);
4331
4332 let fs = FakeFs::new(cx.executor().clone());
4333 fs.insert_tree(
4334 "/one/two",
4335 json!({
4336 "three": {
4337 "a.txt": "",
4338 "four": {}
4339 },
4340 "c.rs": ""
4341 }),
4342 )
4343 .await;
4344
4345 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4346 project
4347 .update(cx, |project, cx| {
4348 let id = project.worktrees().next().unwrap().read(cx).id();
4349 project.create_entry((id, "b.."), true, cx)
4350 })
4351 .unwrap()
4352 .await
4353 .unwrap();
4354
4355 // Can't create paths outside the project
4356 let result = project
4357 .update(cx, |project, cx| {
4358 let id = project.worktrees().next().unwrap().read(cx).id();
4359 project.create_entry((id, "../../boop"), true, cx)
4360 })
4361 .await;
4362 assert!(result.is_err());
4363
4364 // Can't create paths with '..'
4365 let result = project
4366 .update(cx, |project, cx| {
4367 let id = project.worktrees().next().unwrap().read(cx).id();
4368 project.create_entry((id, "four/../beep"), true, cx)
4369 })
4370 .await;
4371 assert!(result.is_err());
4372
4373 assert_eq!(
4374 fs.paths(true),
4375 vec![
4376 PathBuf::from("/"),
4377 PathBuf::from("/one"),
4378 PathBuf::from("/one/two"),
4379 PathBuf::from("/one/two/c.rs"),
4380 PathBuf::from("/one/two/three"),
4381 PathBuf::from("/one/two/three/a.txt"),
4382 PathBuf::from("/one/two/three/b.."),
4383 PathBuf::from("/one/two/three/four"),
4384 ]
4385 );
4386
4387 // And we cannot open buffers with '..'
4388 let result = project
4389 .update(cx, |project, cx| {
4390 let id = project.worktrees().next().unwrap().read(cx).id();
4391 project.open_buffer((id, "../c.rs"), cx)
4392 })
4393 .await;
4394 assert!(result.is_err())
4395}
4396
4397async fn search(
4398 project: &Model<Project>,
4399 query: SearchQuery,
4400 cx: &mut gpui::TestAppContext,
4401) -> Result<HashMap<String, Vec<Range<usize>>>> {
4402 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4403 let mut result = HashMap::default();
4404 while let Some((buffer, range)) = search_rx.next().await {
4405 result.entry(buffer).or_insert(range);
4406 }
4407 Ok(result
4408 .into_iter()
4409 .map(|(buffer, ranges)| {
4410 buffer.update(cx, |buffer, _| {
4411 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4412 let ranges = ranges
4413 .into_iter()
4414 .map(|range| range.to_offset(buffer))
4415 .collect::<Vec<_>>();
4416 (path, ranges)
4417 })
4418 })
4419 .collect())
4420}
4421
4422fn init_test(cx: &mut gpui::TestAppContext) {
4423 if std::env::var("RUST_LOG").is_ok() {
4424 env_logger::try_init().ok();
4425 }
4426
4427 cx.update(|cx| {
4428 let settings_store = SettingsStore::test(cx);
4429 cx.set_global(settings_store);
4430 release_channel::init("0.0.0", cx);
4431 language::init(cx);
4432 Project::init_settings(cx);
4433 });
4434}