1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[gpui::test]
49async fn test_symlinks(cx: &mut gpui::TestAppContext) {
50 init_test(cx);
51 cx.executor().allow_parking();
52
53 let dir = temp_tree(json!({
54 "root": {
55 "apple": "",
56 "banana": {
57 "carrot": {
58 "date": "",
59 "endive": "",
60 }
61 },
62 "fennel": {
63 "grape": "",
64 }
65 }
66 }));
67
68 let root_link_path = dir.path().join("root_link");
69 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
70 os::unix::fs::symlink(
71 &dir.path().join("root/fennel"),
72 &dir.path().join("root/finnochio"),
73 )
74 .unwrap();
75
76 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
77
78 project.update(cx, |project, cx| {
79 let tree = project.worktrees().next().unwrap().read(cx);
80 assert_eq!(tree.file_count(), 5);
81 assert_eq!(
82 tree.inode_for_path("fennel/grape"),
83 tree.inode_for_path("finnochio/grape")
84 );
85 });
86}
87
88#[gpui::test]
89async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91
92 let fs = FakeFs::new(cx.executor());
93 fs.insert_tree(
94 "/the-root",
95 json!({
96 ".zed": {
97 "settings.json": r#"{ "tab_size": 8 }"#
98 },
99 "a": {
100 "a.rs": "fn a() {\n A\n}"
101 },
102 "b": {
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 2 }"#
105 },
106 "b.rs": "fn b() {\n B\n}"
107 }
108 }),
109 )
110 .await;
111
112 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
113 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
114
115 cx.executor().run_until_parked();
116 cx.update(|cx| {
117 let tree = worktree.read(cx);
118
119 let settings_a = language_settings(
120 None,
121 Some(
122 &(File::for_entry(
123 tree.entry_for_path("a/a.rs").unwrap().clone(),
124 worktree.clone(),
125 ) as _),
126 ),
127 cx,
128 );
129 let settings_b = language_settings(
130 None,
131 Some(
132 &(File::for_entry(
133 tree.entry_for_path("b/b.rs").unwrap().clone(),
134 worktree.clone(),
135 ) as _),
136 ),
137 cx,
138 );
139
140 assert_eq!(settings_a.tab_size.get(), 8);
141 assert_eq!(settings_b.tab_size.get(), 2);
142 });
143}
144
145#[gpui::test]
146async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let mut rust_language = Language::new(
150 LanguageConfig {
151 name: "Rust".into(),
152 path_suffixes: vec!["rs".to_string()],
153 ..Default::default()
154 },
155 Some(tree_sitter_rust::language()),
156 );
157 let mut json_language = Language::new(
158 LanguageConfig {
159 name: "JSON".into(),
160 path_suffixes: vec!["json".to_string()],
161 ..Default::default()
162 },
163 None,
164 );
165 let mut fake_rust_servers = rust_language
166 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
167 name: "the-rust-language-server",
168 capabilities: lsp::ServerCapabilities {
169 completion_provider: Some(lsp::CompletionOptions {
170 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
171 ..Default::default()
172 }),
173 ..Default::default()
174 },
175 ..Default::default()
176 }))
177 .await;
178 let mut fake_json_servers = json_language
179 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
180 name: "the-json-language-server",
181 capabilities: lsp::ServerCapabilities {
182 completion_provider: Some(lsp::CompletionOptions {
183 trigger_characters: Some(vec![":".to_string()]),
184 ..Default::default()
185 }),
186 ..Default::default()
187 },
188 ..Default::default()
189 }))
190 .await;
191
192 let fs = FakeFs::new(cx.executor());
193 fs.insert_tree(
194 "/the-root",
195 json!({
196 "test.rs": "const A: i32 = 1;",
197 "test2.rs": "",
198 "Cargo.toml": "a = 1",
199 "package.json": "{\"a\": 1}",
200 }),
201 )
202 .await;
203
204 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
205
206 // Open a buffer without an associated language server.
207 let toml_buffer = project
208 .update(cx, |project, cx| {
209 project.open_local_buffer("/the-root/Cargo.toml", cx)
210 })
211 .await
212 .unwrap();
213
214 // Open a buffer with an associated language server before the language for it has been loaded.
215 let rust_buffer = project
216 .update(cx, |project, cx| {
217 project.open_local_buffer("/the-root/test.rs", cx)
218 })
219 .await
220 .unwrap();
221 rust_buffer.update(cx, |buffer, _| {
222 assert_eq!(buffer.language().map(|l| l.name()), None);
223 });
224
225 // Now we add the languages to the project, and ensure they get assigned to all
226 // the relevant open buffers.
227 project.update(cx, |project, _| {
228 project.languages.add(Arc::new(json_language));
229 project.languages.add(Arc::new(rust_language));
230 });
231 cx.executor().run_until_parked();
232 rust_buffer.update(cx, |buffer, _| {
233 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
234 });
235
236 // A server is started up, and it is notified about Rust files.
237 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
238 assert_eq!(
239 fake_rust_server
240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
241 .await
242 .text_document,
243 lsp::TextDocumentItem {
244 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
245 version: 0,
246 text: "const A: i32 = 1;".to_string(),
247 language_id: Default::default()
248 }
249 );
250
251 // The buffer is configured based on the language server's capabilities.
252 rust_buffer.update(cx, |buffer, _| {
253 assert_eq!(
254 buffer.completion_triggers(),
255 &[".".to_string(), "::".to_string()]
256 );
257 });
258 toml_buffer.update(cx, |buffer, _| {
259 assert!(buffer.completion_triggers().is_empty());
260 });
261
262 // Edit a buffer. The changes are reported to the language server.
263 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidChangeTextDocument>()
267 .await
268 .text_document,
269 lsp::VersionedTextDocumentIdentifier::new(
270 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
271 1
272 )
273 );
274
275 // Open a third buffer with a different associated language server.
276 let json_buffer = project
277 .update(cx, |project, cx| {
278 project.open_local_buffer("/the-root/package.json", cx)
279 })
280 .await
281 .unwrap();
282
283 // A json language server is started up and is only notified about the json buffer.
284 let mut fake_json_server = fake_json_servers.next().await.unwrap();
285 assert_eq!(
286 fake_json_server
287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
288 .await
289 .text_document,
290 lsp::TextDocumentItem {
291 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
292 version: 0,
293 text: "{\"a\": 1}".to_string(),
294 language_id: Default::default()
295 }
296 );
297
298 // This buffer is configured based on the second language server's
299 // capabilities.
300 json_buffer.update(cx, |buffer, _| {
301 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
302 });
303
304 // When opening another buffer whose language server is already running,
305 // it is also configured based on the existing language server's capabilities.
306 let rust_buffer2 = project
307 .update(cx, |project, cx| {
308 project.open_local_buffer("/the-root/test2.rs", cx)
309 })
310 .await
311 .unwrap();
312 rust_buffer2.update(cx, |buffer, _| {
313 assert_eq!(
314 buffer.completion_triggers(),
315 &[".".to_string(), "::".to_string()]
316 );
317 });
318
319 // Changes are reported only to servers matching the buffer's language.
320 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
321 rust_buffer2.update(cx, |buffer, cx| {
322 buffer.edit([(0..0, "let x = 1;")], None, cx)
323 });
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidChangeTextDocument>()
327 .await
328 .text_document,
329 lsp::VersionedTextDocumentIdentifier::new(
330 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
331 1
332 )
333 );
334
335 // Save notifications are reported to all servers.
336 project
337 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
338 .await
339 .unwrap();
340 assert_eq!(
341 fake_rust_server
342 .receive_notification::<lsp::notification::DidSaveTextDocument>()
343 .await
344 .text_document,
345 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
346 );
347 assert_eq!(
348 fake_json_server
349 .receive_notification::<lsp::notification::DidSaveTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
353 );
354
355 // Renames are reported only to servers matching the buffer's language.
356 fs.rename(
357 Path::new("/the-root/test2.rs"),
358 Path::new("/the-root/test3.rs"),
359 Default::default(),
360 )
361 .await
362 .unwrap();
363 assert_eq!(
364 fake_rust_server
365 .receive_notification::<lsp::notification::DidCloseTextDocument>()
366 .await
367 .text_document,
368 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
369 );
370 assert_eq!(
371 fake_rust_server
372 .receive_notification::<lsp::notification::DidOpenTextDocument>()
373 .await
374 .text_document,
375 lsp::TextDocumentItem {
376 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
377 version: 0,
378 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
379 language_id: Default::default()
380 },
381 );
382
383 rust_buffer2.update(cx, |buffer, cx| {
384 buffer.update_diagnostics(
385 LanguageServerId(0),
386 DiagnosticSet::from_sorted_entries(
387 vec![DiagnosticEntry {
388 diagnostic: Default::default(),
389 range: Anchor::MIN..Anchor::MAX,
390 }],
391 &buffer.snapshot(),
392 ),
393 cx,
394 );
395 assert_eq!(
396 buffer
397 .snapshot()
398 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
399 .count(),
400 1
401 );
402 });
403
404 // When the rename changes the extension of the file, the buffer gets closed on the old
405 // language server and gets opened on the new one.
406 fs.rename(
407 Path::new("/the-root/test3.rs"),
408 Path::new("/the-root/test3.json"),
409 Default::default(),
410 )
411 .await
412 .unwrap();
413 assert_eq!(
414 fake_rust_server
415 .receive_notification::<lsp::notification::DidCloseTextDocument>()
416 .await
417 .text_document,
418 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
419 );
420 assert_eq!(
421 fake_json_server
422 .receive_notification::<lsp::notification::DidOpenTextDocument>()
423 .await
424 .text_document,
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
427 version: 0,
428 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 );
432
433 // We clear the diagnostics, since the language has changed.
434 rust_buffer2.update(cx, |buffer, _| {
435 assert_eq!(
436 buffer
437 .snapshot()
438 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
439 .count(),
440 0
441 );
442 });
443
444 // The renamed file's version resets after changing language server.
445 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
446 assert_eq!(
447 fake_json_server
448 .receive_notification::<lsp::notification::DidChangeTextDocument>()
449 .await
450 .text_document,
451 lsp::VersionedTextDocumentIdentifier::new(
452 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
453 1
454 )
455 );
456
457 // Restart language servers
458 project.update(cx, |project, cx| {
459 project.restart_language_servers_for_buffers(
460 vec![rust_buffer.clone(), json_buffer.clone()],
461 cx,
462 );
463 });
464
465 let mut rust_shutdown_requests = fake_rust_server
466 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
467 let mut json_shutdown_requests = fake_json_server
468 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
469 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
470
471 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
472 let mut fake_json_server = fake_json_servers.next().await.unwrap();
473
474 // Ensure rust document is reopened in new rust language server
475 assert_eq!(
476 fake_rust_server
477 .receive_notification::<lsp::notification::DidOpenTextDocument>()
478 .await
479 .text_document,
480 lsp::TextDocumentItem {
481 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
482 version: 0,
483 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
484 language_id: Default::default()
485 }
486 );
487
488 // Ensure json documents are reopened in new json language server
489 assert_set_eq!(
490 [
491 fake_json_server
492 .receive_notification::<lsp::notification::DidOpenTextDocument>()
493 .await
494 .text_document,
495 fake_json_server
496 .receive_notification::<lsp::notification::DidOpenTextDocument>()
497 .await
498 .text_document,
499 ],
500 [
501 lsp::TextDocumentItem {
502 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
503 version: 0,
504 text: json_buffer.update(cx, |buffer, _| buffer.text()),
505 language_id: Default::default()
506 },
507 lsp::TextDocumentItem {
508 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
509 version: 0,
510 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
511 language_id: Default::default()
512 }
513 ]
514 );
515
516 // Close notifications are reported only to servers matching the buffer's language.
517 cx.update(|_| drop(json_buffer));
518 let close_message = lsp::DidCloseTextDocumentParams {
519 text_document: lsp::TextDocumentIdentifier::new(
520 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
521 ),
522 };
523 assert_eq!(
524 fake_json_server
525 .receive_notification::<lsp::notification::DidCloseTextDocument>()
526 .await,
527 close_message,
528 );
529}
530
531#[gpui::test]
532async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
533 init_test(cx);
534
535 let mut language = Language::new(
536 LanguageConfig {
537 name: "Rust".into(),
538 path_suffixes: vec!["rs".to_string()],
539 ..Default::default()
540 },
541 Some(tree_sitter_rust::language()),
542 );
543 let mut fake_servers = language
544 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
545 name: "the-language-server",
546 ..Default::default()
547 }))
548 .await;
549
550 let fs = FakeFs::new(cx.executor());
551 fs.insert_tree(
552 "/the-root",
553 json!({
554 ".gitignore": "target\n",
555 "src": {
556 "a.rs": "",
557 "b.rs": "",
558 },
559 "target": {
560 "x": {
561 "out": {
562 "x.rs": ""
563 }
564 },
565 "y": {
566 "out": {
567 "y.rs": "",
568 }
569 },
570 "z": {
571 "out": {
572 "z.rs": ""
573 }
574 }
575 }
576 }),
577 )
578 .await;
579
580 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
581 project.update(cx, |project, _| {
582 project.languages.add(Arc::new(language));
583 });
584 cx.executor().run_until_parked();
585
586 // Start the language server by opening a buffer with a compatible file extension.
587 let _buffer = project
588 .update(cx, |project, cx| {
589 project.open_local_buffer("/the-root/src/a.rs", cx)
590 })
591 .await
592 .unwrap();
593
594 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
595 project.update(cx, |project, cx| {
596 let worktree = project.worktrees().next().unwrap();
597 assert_eq!(
598 worktree
599 .read(cx)
600 .snapshot()
601 .entries(true)
602 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
603 .collect::<Vec<_>>(),
604 &[
605 (Path::new(""), false),
606 (Path::new(".gitignore"), false),
607 (Path::new("src"), false),
608 (Path::new("src/a.rs"), false),
609 (Path::new("src/b.rs"), false),
610 (Path::new("target"), true),
611 ]
612 );
613 });
614
615 let prev_read_dir_count = fs.read_dir_call_count();
616
617 // Keep track of the FS events reported to the language server.
618 let fake_server = fake_servers.next().await.unwrap();
619 let file_changes = Arc::new(Mutex::new(Vec::new()));
620 fake_server
621 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
622 registrations: vec![lsp::Registration {
623 id: Default::default(),
624 method: "workspace/didChangeWatchedFiles".to_string(),
625 register_options: serde_json::to_value(
626 lsp::DidChangeWatchedFilesRegistrationOptions {
627 watchers: vec![
628 lsp::FileSystemWatcher {
629 glob_pattern: lsp::GlobPattern::String(
630 "/the-root/Cargo.toml".to_string(),
631 ),
632 kind: None,
633 },
634 lsp::FileSystemWatcher {
635 glob_pattern: lsp::GlobPattern::String(
636 "/the-root/src/*.{rs,c}".to_string(),
637 ),
638 kind: None,
639 },
640 lsp::FileSystemWatcher {
641 glob_pattern: lsp::GlobPattern::String(
642 "/the-root/target/y/**/*.rs".to_string(),
643 ),
644 kind: None,
645 },
646 ],
647 },
648 )
649 .ok(),
650 }],
651 })
652 .await
653 .unwrap();
654 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
655 let file_changes = file_changes.clone();
656 move |params, _| {
657 let mut file_changes = file_changes.lock();
658 file_changes.extend(params.changes);
659 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
660 }
661 });
662
663 cx.executor().run_until_parked();
664 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
665 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
666
667 // Now the language server has asked us to watch an ignored directory path,
668 // so we recursively load it.
669 project.update(cx, |project, cx| {
670 let worktree = project.worktrees().next().unwrap();
671 assert_eq!(
672 worktree
673 .read(cx)
674 .snapshot()
675 .entries(true)
676 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
677 .collect::<Vec<_>>(),
678 &[
679 (Path::new(""), false),
680 (Path::new(".gitignore"), false),
681 (Path::new("src"), false),
682 (Path::new("src/a.rs"), false),
683 (Path::new("src/b.rs"), false),
684 (Path::new("target"), true),
685 (Path::new("target/x"), true),
686 (Path::new("target/y"), true),
687 (Path::new("target/y/out"), true),
688 (Path::new("target/y/out/y.rs"), true),
689 (Path::new("target/z"), true),
690 ]
691 );
692 });
693
694 // Perform some file system mutations, two of which match the watched patterns,
695 // and one of which does not.
696 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
697 .await
698 .unwrap();
699 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
700 .await
701 .unwrap();
702 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
703 .await
704 .unwrap();
705 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
706 .await
707 .unwrap();
708 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
709 .await
710 .unwrap();
711
712 // The language server receives events for the FS mutations that match its watch patterns.
713 cx.executor().run_until_parked();
714 assert_eq!(
715 &*file_changes.lock(),
716 &[
717 lsp::FileEvent {
718 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
719 typ: lsp::FileChangeType::DELETED,
720 },
721 lsp::FileEvent {
722 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
723 typ: lsp::FileChangeType::CREATED,
724 },
725 lsp::FileEvent {
726 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
727 typ: lsp::FileChangeType::CREATED,
728 },
729 ]
730 );
731}
732
733#[gpui::test]
734async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
735 init_test(cx);
736
737 let fs = FakeFs::new(cx.executor());
738 fs.insert_tree(
739 "/dir",
740 json!({
741 "a.rs": "let a = 1;",
742 "b.rs": "let b = 2;"
743 }),
744 )
745 .await;
746
747 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
748
749 let buffer_a = project
750 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
751 .await
752 .unwrap();
753 let buffer_b = project
754 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
755 .await
756 .unwrap();
757
758 project.update(cx, |project, cx| {
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/a.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::ERROR),
768 message: "error 1".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 project
777 .update_diagnostics(
778 LanguageServerId(0),
779 lsp::PublishDiagnosticsParams {
780 uri: Url::from_file_path("/dir/b.rs").unwrap(),
781 version: None,
782 diagnostics: vec![lsp::Diagnostic {
783 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
784 severity: Some(lsp::DiagnosticSeverity::WARNING),
785 message: "error 2".to_string(),
786 ..Default::default()
787 }],
788 },
789 &[],
790 cx,
791 )
792 .unwrap();
793 });
794
795 buffer_a.update(cx, |buffer, _| {
796 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
797 assert_eq!(
798 chunks
799 .iter()
800 .map(|(s, d)| (s.as_str(), *d))
801 .collect::<Vec<_>>(),
802 &[
803 ("let ", None),
804 ("a", Some(DiagnosticSeverity::ERROR)),
805 (" = 1;", None),
806 ]
807 );
808 });
809 buffer_b.update(cx, |buffer, _| {
810 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
811 assert_eq!(
812 chunks
813 .iter()
814 .map(|(s, d)| (s.as_str(), *d))
815 .collect::<Vec<_>>(),
816 &[
817 ("let ", None),
818 ("b", Some(DiagnosticSeverity::WARNING)),
819 (" = 2;", None),
820 ]
821 );
822 });
823}
824
825#[gpui::test]
826async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
827 init_test(cx);
828
829 let fs = FakeFs::new(cx.executor());
830 fs.insert_tree(
831 "/root",
832 json!({
833 "dir": {
834 "a.rs": "let a = 1;",
835 },
836 "other.rs": "let b = c;"
837 }),
838 )
839 .await;
840
841 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
842
843 let (worktree, _) = project
844 .update(cx, |project, cx| {
845 project.find_or_create_local_worktree("/root/other.rs", false, cx)
846 })
847 .await
848 .unwrap();
849 let worktree_id = worktree.update(cx, |tree, _| tree.id());
850
851 project.update(cx, |project, cx| {
852 project
853 .update_diagnostics(
854 LanguageServerId(0),
855 lsp::PublishDiagnosticsParams {
856 uri: Url::from_file_path("/root/other.rs").unwrap(),
857 version: None,
858 diagnostics: vec![lsp::Diagnostic {
859 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
860 severity: Some(lsp::DiagnosticSeverity::ERROR),
861 message: "unknown variable 'c'".to_string(),
862 ..Default::default()
863 }],
864 },
865 &[],
866 cx,
867 )
868 .unwrap();
869 });
870
871 let buffer = project
872 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
873 .await
874 .unwrap();
875 buffer.update(cx, |buffer, _| {
876 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
877 assert_eq!(
878 chunks
879 .iter()
880 .map(|(s, d)| (s.as_str(), *d))
881 .collect::<Vec<_>>(),
882 &[
883 ("let b = ", None),
884 ("c", Some(DiagnosticSeverity::ERROR)),
885 (";", None),
886 ]
887 );
888 });
889
890 project.update(cx, |project, cx| {
891 assert_eq!(project.diagnostic_summaries(cx).next(), None);
892 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
893 });
894}
895
896#[gpui::test]
897async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
898 init_test(cx);
899
900 let progress_token = "the-progress-token";
901 let mut language = Language::new(
902 LanguageConfig {
903 name: "Rust".into(),
904 path_suffixes: vec!["rs".to_string()],
905 ..Default::default()
906 },
907 Some(tree_sitter_rust::language()),
908 );
909 let mut fake_servers = language
910 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
911 disk_based_diagnostics_progress_token: Some(progress_token.into()),
912 disk_based_diagnostics_sources: vec!["disk".into()],
913 ..Default::default()
914 }))
915 .await;
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 "/dir",
920 json!({
921 "a.rs": "fn a() { A }",
922 "b.rs": "const y: i32 = 1",
923 }),
924 )
925 .await;
926
927 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
928 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
929 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
930
931 // Cause worktree to start the fake language server
932 let _buffer = project
933 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
934 .await
935 .unwrap();
936
937 let mut events = cx.events(&project);
938
939 let fake_server = fake_servers.next().await.unwrap();
940 assert_eq!(
941 events.next().await.unwrap(),
942 Event::LanguageServerAdded(LanguageServerId(0)),
943 );
944
945 fake_server
946 .start_progress(format!("{}/0", progress_token))
947 .await;
948 assert_eq!(
949 events.next().await.unwrap(),
950 Event::DiskBasedDiagnosticsStarted {
951 language_server_id: LanguageServerId(0),
952 }
953 );
954
955 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
956 uri: Url::from_file_path("/dir/a.rs").unwrap(),
957 version: None,
958 diagnostics: vec![lsp::Diagnostic {
959 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
960 severity: Some(lsp::DiagnosticSeverity::ERROR),
961 message: "undefined variable 'A'".to_string(),
962 ..Default::default()
963 }],
964 });
965 assert_eq!(
966 events.next().await.unwrap(),
967 Event::DiagnosticsUpdated {
968 language_server_id: LanguageServerId(0),
969 path: (worktree_id, Path::new("a.rs")).into()
970 }
971 );
972
973 fake_server.end_progress(format!("{}/0", progress_token));
974 assert_eq!(
975 events.next().await.unwrap(),
976 Event::DiskBasedDiagnosticsFinished {
977 language_server_id: LanguageServerId(0)
978 }
979 );
980
981 let buffer = project
982 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
983 .await
984 .unwrap();
985
986 buffer.update(cx, |buffer, _| {
987 let snapshot = buffer.snapshot();
988 let diagnostics = snapshot
989 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
990 .collect::<Vec<_>>();
991 assert_eq!(
992 diagnostics,
993 &[DiagnosticEntry {
994 range: Point::new(0, 9)..Point::new(0, 10),
995 diagnostic: Diagnostic {
996 severity: lsp::DiagnosticSeverity::ERROR,
997 message: "undefined variable 'A'".to_string(),
998 group_id: 0,
999 is_primary: true,
1000 ..Default::default()
1001 }
1002 }]
1003 )
1004 });
1005
1006 // Ensure publishing empty diagnostics twice only results in one update event.
1007 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1008 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1009 version: None,
1010 diagnostics: Default::default(),
1011 });
1012 assert_eq!(
1013 events.next().await.unwrap(),
1014 Event::DiagnosticsUpdated {
1015 language_server_id: LanguageServerId(0),
1016 path: (worktree_id, Path::new("a.rs")).into()
1017 }
1018 );
1019
1020 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1021 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1022 version: None,
1023 diagnostics: Default::default(),
1024 });
1025 cx.executor().run_until_parked();
1026 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1027}
1028
1029#[gpui::test]
1030async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1031 init_test(cx);
1032
1033 let progress_token = "the-progress-token";
1034 let mut language = Language::new(
1035 LanguageConfig {
1036 path_suffixes: vec!["rs".to_string()],
1037 ..Default::default()
1038 },
1039 None,
1040 );
1041 let mut fake_servers = language
1042 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1043 disk_based_diagnostics_sources: vec!["disk".into()],
1044 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1045 ..Default::default()
1046 }))
1047 .await;
1048
1049 let fs = FakeFs::new(cx.executor());
1050 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1051
1052 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1053 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1054
1055 let buffer = project
1056 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1057 .await
1058 .unwrap();
1059
1060 // Simulate diagnostics starting to update.
1061 let fake_server = fake_servers.next().await.unwrap();
1062 fake_server.start_progress(progress_token).await;
1063
1064 // Restart the server before the diagnostics finish updating.
1065 project.update(cx, |project, cx| {
1066 project.restart_language_servers_for_buffers([buffer], cx);
1067 });
1068 let mut events = cx.events(&project);
1069
1070 // Simulate the newly started server sending more diagnostics.
1071 let fake_server = fake_servers.next().await.unwrap();
1072 assert_eq!(
1073 events.next().await.unwrap(),
1074 Event::LanguageServerAdded(LanguageServerId(1))
1075 );
1076 fake_server.start_progress(progress_token).await;
1077 assert_eq!(
1078 events.next().await.unwrap(),
1079 Event::DiskBasedDiagnosticsStarted {
1080 language_server_id: LanguageServerId(1)
1081 }
1082 );
1083 project.update(cx, |project, _| {
1084 assert_eq!(
1085 project
1086 .language_servers_running_disk_based_diagnostics()
1087 .collect::<Vec<_>>(),
1088 [LanguageServerId(1)]
1089 );
1090 });
1091
1092 // All diagnostics are considered done, despite the old server's diagnostic
1093 // task never completing.
1094 fake_server.end_progress(progress_token);
1095 assert_eq!(
1096 events.next().await.unwrap(),
1097 Event::DiskBasedDiagnosticsFinished {
1098 language_server_id: LanguageServerId(1)
1099 }
1100 );
1101 project.update(cx, |project, _| {
1102 assert_eq!(
1103 project
1104 .language_servers_running_disk_based_diagnostics()
1105 .collect::<Vec<_>>(),
1106 [LanguageServerId(0); 0]
1107 );
1108 });
1109}
1110
1111#[gpui::test]
1112async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1113 init_test(cx);
1114
1115 let mut language = Language::new(
1116 LanguageConfig {
1117 path_suffixes: vec!["rs".to_string()],
1118 ..Default::default()
1119 },
1120 None,
1121 );
1122 let mut fake_servers = language
1123 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1124 ..Default::default()
1125 }))
1126 .await;
1127
1128 let fs = FakeFs::new(cx.executor());
1129 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1130
1131 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1132 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1133
1134 let buffer = project
1135 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1136 .await
1137 .unwrap();
1138
1139 // Publish diagnostics
1140 let fake_server = fake_servers.next().await.unwrap();
1141 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1142 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1143 version: None,
1144 diagnostics: vec![lsp::Diagnostic {
1145 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1146 severity: Some(lsp::DiagnosticSeverity::ERROR),
1147 message: "the message".to_string(),
1148 ..Default::default()
1149 }],
1150 });
1151
1152 cx.executor().run_until_parked();
1153 buffer.update(cx, |buffer, _| {
1154 assert_eq!(
1155 buffer
1156 .snapshot()
1157 .diagnostics_in_range::<_, usize>(0..1, false)
1158 .map(|entry| entry.diagnostic.message.clone())
1159 .collect::<Vec<_>>(),
1160 ["the message".to_string()]
1161 );
1162 });
1163 project.update(cx, |project, cx| {
1164 assert_eq!(
1165 project.diagnostic_summary(cx),
1166 DiagnosticSummary {
1167 error_count: 1,
1168 warning_count: 0,
1169 }
1170 );
1171 });
1172
1173 project.update(cx, |project, cx| {
1174 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1175 });
1176
1177 // The diagnostics are cleared.
1178 cx.executor().run_until_parked();
1179 buffer.update(cx, |buffer, _| {
1180 assert_eq!(
1181 buffer
1182 .snapshot()
1183 .diagnostics_in_range::<_, usize>(0..1, false)
1184 .map(|entry| entry.diagnostic.message.clone())
1185 .collect::<Vec<_>>(),
1186 Vec::<String>::new(),
1187 );
1188 });
1189 project.update(cx, |project, cx| {
1190 assert_eq!(
1191 project.diagnostic_summary(cx),
1192 DiagnosticSummary {
1193 error_count: 0,
1194 warning_count: 0,
1195 }
1196 );
1197 });
1198}
1199
1200#[gpui::test]
1201async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1202 init_test(cx);
1203
1204 let mut language = Language::new(
1205 LanguageConfig {
1206 path_suffixes: vec!["rs".to_string()],
1207 ..Default::default()
1208 },
1209 None,
1210 );
1211 let mut fake_servers = language
1212 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1213 name: "the-lsp",
1214 ..Default::default()
1215 }))
1216 .await;
1217
1218 let fs = FakeFs::new(cx.executor());
1219 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1220
1221 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1222 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1223
1224 let buffer = project
1225 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1226 .await
1227 .unwrap();
1228
1229 // Before restarting the server, report diagnostics with an unknown buffer version.
1230 let fake_server = fake_servers.next().await.unwrap();
1231 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1232 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1233 version: Some(10000),
1234 diagnostics: Vec::new(),
1235 });
1236 cx.executor().run_until_parked();
1237
1238 project.update(cx, |project, cx| {
1239 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1240 });
1241 let mut fake_server = fake_servers.next().await.unwrap();
1242 let notification = fake_server
1243 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1244 .await
1245 .text_document;
1246 assert_eq!(notification.version, 0);
1247}
1248
1249#[gpui::test]
1250async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1251 init_test(cx);
1252
1253 let mut rust = Language::new(
1254 LanguageConfig {
1255 name: Arc::from("Rust"),
1256 path_suffixes: vec!["rs".to_string()],
1257 ..Default::default()
1258 },
1259 None,
1260 );
1261 let mut fake_rust_servers = rust
1262 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1263 name: "rust-lsp",
1264 ..Default::default()
1265 }))
1266 .await;
1267 let mut js = Language::new(
1268 LanguageConfig {
1269 name: Arc::from("JavaScript"),
1270 path_suffixes: vec!["js".to_string()],
1271 ..Default::default()
1272 },
1273 None,
1274 );
1275 let mut fake_js_servers = js
1276 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1277 name: "js-lsp",
1278 ..Default::default()
1279 }))
1280 .await;
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1284 .await;
1285
1286 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1287 project.update(cx, |project, _| {
1288 project.languages.add(Arc::new(rust));
1289 project.languages.add(Arc::new(js));
1290 });
1291
1292 let _rs_buffer = project
1293 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1294 .await
1295 .unwrap();
1296 let _js_buffer = project
1297 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1298 .await
1299 .unwrap();
1300
1301 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1302 assert_eq!(
1303 fake_rust_server_1
1304 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1305 .await
1306 .text_document
1307 .uri
1308 .as_str(),
1309 "file:///dir/a.rs"
1310 );
1311
1312 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1313 assert_eq!(
1314 fake_js_server
1315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1316 .await
1317 .text_document
1318 .uri
1319 .as_str(),
1320 "file:///dir/b.js"
1321 );
1322
1323 // Disable Rust language server, ensuring only that server gets stopped.
1324 cx.update(|cx| {
1325 cx.update_global(|settings: &mut SettingsStore, cx| {
1326 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1327 settings.languages.insert(
1328 Arc::from("Rust"),
1329 LanguageSettingsContent {
1330 enable_language_server: Some(false),
1331 ..Default::default()
1332 },
1333 );
1334 });
1335 })
1336 });
1337 fake_rust_server_1
1338 .receive_notification::<lsp::notification::Exit>()
1339 .await;
1340
1341 // Enable Rust and disable JavaScript language servers, ensuring that the
1342 // former gets started again and that the latter stops.
1343 cx.update(|cx| {
1344 cx.update_global(|settings: &mut SettingsStore, cx| {
1345 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1346 settings.languages.insert(
1347 Arc::from("Rust"),
1348 LanguageSettingsContent {
1349 enable_language_server: Some(true),
1350 ..Default::default()
1351 },
1352 );
1353 settings.languages.insert(
1354 Arc::from("JavaScript"),
1355 LanguageSettingsContent {
1356 enable_language_server: Some(false),
1357 ..Default::default()
1358 },
1359 );
1360 });
1361 })
1362 });
1363 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1364 assert_eq!(
1365 fake_rust_server_2
1366 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1367 .await
1368 .text_document
1369 .uri
1370 .as_str(),
1371 "file:///dir/a.rs"
1372 );
1373 fake_js_server
1374 .receive_notification::<lsp::notification::Exit>()
1375 .await;
1376}
1377
1378#[gpui::test(iterations = 3)]
1379async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1380 init_test(cx);
1381
1382 let mut language = Language::new(
1383 LanguageConfig {
1384 name: "Rust".into(),
1385 path_suffixes: vec!["rs".to_string()],
1386 ..Default::default()
1387 },
1388 Some(tree_sitter_rust::language()),
1389 );
1390 let mut fake_servers = language
1391 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 ..Default::default()
1394 }))
1395 .await;
1396
1397 let text = "
1398 fn a() { A }
1399 fn b() { BB }
1400 fn c() { CCC }
1401 "
1402 .unindent();
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1406
1407 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1408 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1409
1410 let buffer = project
1411 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1412 .await
1413 .unwrap();
1414
1415 let mut fake_server = fake_servers.next().await.unwrap();
1416 let open_notification = fake_server
1417 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1418 .await;
1419
1420 // Edit the buffer, moving the content down
1421 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1422 let change_notification_1 = fake_server
1423 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1424 .await;
1425 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1426
1427 // Report some diagnostics for the initial version of the buffer
1428 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1429 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1430 version: Some(open_notification.text_document.version),
1431 diagnostics: vec![
1432 lsp::Diagnostic {
1433 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1434 severity: Some(DiagnosticSeverity::ERROR),
1435 message: "undefined variable 'A'".to_string(),
1436 source: Some("disk".to_string()),
1437 ..Default::default()
1438 },
1439 lsp::Diagnostic {
1440 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1441 severity: Some(DiagnosticSeverity::ERROR),
1442 message: "undefined variable 'BB'".to_string(),
1443 source: Some("disk".to_string()),
1444 ..Default::default()
1445 },
1446 lsp::Diagnostic {
1447 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1448 severity: Some(DiagnosticSeverity::ERROR),
1449 source: Some("disk".to_string()),
1450 message: "undefined variable 'CCC'".to_string(),
1451 ..Default::default()
1452 },
1453 ],
1454 });
1455
1456 // The diagnostics have moved down since they were created.
1457 cx.executor().run_until_parked();
1458 buffer.update(cx, |buffer, _| {
1459 assert_eq!(
1460 buffer
1461 .snapshot()
1462 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1463 .collect::<Vec<_>>(),
1464 &[
1465 DiagnosticEntry {
1466 range: Point::new(3, 9)..Point::new(3, 11),
1467 diagnostic: Diagnostic {
1468 source: Some("disk".into()),
1469 severity: DiagnosticSeverity::ERROR,
1470 message: "undefined variable 'BB'".to_string(),
1471 is_disk_based: true,
1472 group_id: 1,
1473 is_primary: true,
1474 ..Default::default()
1475 },
1476 },
1477 DiagnosticEntry {
1478 range: Point::new(4, 9)..Point::new(4, 12),
1479 diagnostic: Diagnostic {
1480 source: Some("disk".into()),
1481 severity: DiagnosticSeverity::ERROR,
1482 message: "undefined variable 'CCC'".to_string(),
1483 is_disk_based: true,
1484 group_id: 2,
1485 is_primary: true,
1486 ..Default::default()
1487 }
1488 }
1489 ]
1490 );
1491 assert_eq!(
1492 chunks_with_diagnostics(buffer, 0..buffer.len()),
1493 [
1494 ("\n\nfn a() { ".to_string(), None),
1495 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1496 (" }\nfn b() { ".to_string(), None),
1497 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1498 (" }\nfn c() { ".to_string(), None),
1499 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1500 (" }\n".to_string(), None),
1501 ]
1502 );
1503 assert_eq!(
1504 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1505 [
1506 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1507 (" }\nfn c() { ".to_string(), None),
1508 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1509 ]
1510 );
1511 });
1512
1513 // Ensure overlapping diagnostics are highlighted correctly.
1514 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1515 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1516 version: Some(open_notification.text_document.version),
1517 diagnostics: vec![
1518 lsp::Diagnostic {
1519 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1520 severity: Some(DiagnosticSeverity::ERROR),
1521 message: "undefined variable 'A'".to_string(),
1522 source: Some("disk".to_string()),
1523 ..Default::default()
1524 },
1525 lsp::Diagnostic {
1526 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1527 severity: Some(DiagnosticSeverity::WARNING),
1528 message: "unreachable statement".to_string(),
1529 source: Some("disk".to_string()),
1530 ..Default::default()
1531 },
1532 ],
1533 });
1534
1535 cx.executor().run_until_parked();
1536 buffer.update(cx, |buffer, _| {
1537 assert_eq!(
1538 buffer
1539 .snapshot()
1540 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1541 .collect::<Vec<_>>(),
1542 &[
1543 DiagnosticEntry {
1544 range: Point::new(2, 9)..Point::new(2, 12),
1545 diagnostic: Diagnostic {
1546 source: Some("disk".into()),
1547 severity: DiagnosticSeverity::WARNING,
1548 message: "unreachable statement".to_string(),
1549 is_disk_based: true,
1550 group_id: 4,
1551 is_primary: true,
1552 ..Default::default()
1553 }
1554 },
1555 DiagnosticEntry {
1556 range: Point::new(2, 9)..Point::new(2, 10),
1557 diagnostic: Diagnostic {
1558 source: Some("disk".into()),
1559 severity: DiagnosticSeverity::ERROR,
1560 message: "undefined variable 'A'".to_string(),
1561 is_disk_based: true,
1562 group_id: 3,
1563 is_primary: true,
1564 ..Default::default()
1565 },
1566 }
1567 ]
1568 );
1569 assert_eq!(
1570 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1571 [
1572 ("fn a() { ".to_string(), None),
1573 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1574 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1575 ("\n".to_string(), None),
1576 ]
1577 );
1578 assert_eq!(
1579 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1580 [
1581 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1582 ("\n".to_string(), None),
1583 ]
1584 );
1585 });
1586
1587 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1588 // changes since the last save.
1589 buffer.update(cx, |buffer, cx| {
1590 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1591 buffer.edit(
1592 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1593 None,
1594 cx,
1595 );
1596 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1597 });
1598 let change_notification_2 = fake_server
1599 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1600 .await;
1601 assert!(
1602 change_notification_2.text_document.version > change_notification_1.text_document.version
1603 );
1604
1605 // Handle out-of-order diagnostics
1606 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1607 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1608 version: Some(change_notification_2.text_document.version),
1609 diagnostics: vec![
1610 lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1612 severity: Some(DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'BB'".to_string(),
1614 source: Some("disk".to_string()),
1615 ..Default::default()
1616 },
1617 lsp::Diagnostic {
1618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1619 severity: Some(DiagnosticSeverity::WARNING),
1620 message: "undefined variable 'A'".to_string(),
1621 source: Some("disk".to_string()),
1622 ..Default::default()
1623 },
1624 ],
1625 });
1626
1627 cx.executor().run_until_parked();
1628 buffer.update(cx, |buffer, _| {
1629 assert_eq!(
1630 buffer
1631 .snapshot()
1632 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1633 .collect::<Vec<_>>(),
1634 &[
1635 DiagnosticEntry {
1636 range: Point::new(2, 21)..Point::new(2, 22),
1637 diagnostic: Diagnostic {
1638 source: Some("disk".into()),
1639 severity: DiagnosticSeverity::WARNING,
1640 message: "undefined variable 'A'".to_string(),
1641 is_disk_based: true,
1642 group_id: 6,
1643 is_primary: true,
1644 ..Default::default()
1645 }
1646 },
1647 DiagnosticEntry {
1648 range: Point::new(3, 9)..Point::new(3, 14),
1649 diagnostic: Diagnostic {
1650 source: Some("disk".into()),
1651 severity: DiagnosticSeverity::ERROR,
1652 message: "undefined variable 'BB'".to_string(),
1653 is_disk_based: true,
1654 group_id: 5,
1655 is_primary: true,
1656 ..Default::default()
1657 },
1658 }
1659 ]
1660 );
1661 });
1662}
1663
1664#[gpui::test]
1665async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1666 init_test(cx);
1667
1668 let text = concat!(
1669 "let one = ;\n", //
1670 "let two = \n",
1671 "let three = 3;\n",
1672 );
1673
1674 let fs = FakeFs::new(cx.executor());
1675 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1676
1677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1678 let buffer = project
1679 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1680 .await
1681 .unwrap();
1682
1683 project.update(cx, |project, cx| {
1684 project
1685 .update_buffer_diagnostics(
1686 &buffer,
1687 LanguageServerId(0),
1688 None,
1689 vec![
1690 DiagnosticEntry {
1691 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1692 diagnostic: Diagnostic {
1693 severity: DiagnosticSeverity::ERROR,
1694 message: "syntax error 1".to_string(),
1695 ..Default::default()
1696 },
1697 },
1698 DiagnosticEntry {
1699 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1700 diagnostic: Diagnostic {
1701 severity: DiagnosticSeverity::ERROR,
1702 message: "syntax error 2".to_string(),
1703 ..Default::default()
1704 },
1705 },
1706 ],
1707 cx,
1708 )
1709 .unwrap();
1710 });
1711
1712 // An empty range is extended forward to include the following character.
1713 // At the end of a line, an empty range is extended backward to include
1714 // the preceding character.
1715 buffer.update(cx, |buffer, _| {
1716 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1717 assert_eq!(
1718 chunks
1719 .iter()
1720 .map(|(s, d)| (s.as_str(), *d))
1721 .collect::<Vec<_>>(),
1722 &[
1723 ("let one = ", None),
1724 (";", Some(DiagnosticSeverity::ERROR)),
1725 ("\nlet two =", None),
1726 (" ", Some(DiagnosticSeverity::ERROR)),
1727 ("\nlet three = 3;\n", None)
1728 ]
1729 );
1730 });
1731}
1732
1733#[gpui::test]
1734async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1735 init_test(cx);
1736
1737 let fs = FakeFs::new(cx.executor());
1738 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1739 .await;
1740
1741 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1742
1743 project.update(cx, |project, cx| {
1744 project
1745 .update_diagnostic_entries(
1746 LanguageServerId(0),
1747 Path::new("/dir/a.rs").to_owned(),
1748 None,
1749 vec![DiagnosticEntry {
1750 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1751 diagnostic: Diagnostic {
1752 severity: DiagnosticSeverity::ERROR,
1753 is_primary: true,
1754 message: "syntax error a1".to_string(),
1755 ..Default::default()
1756 },
1757 }],
1758 cx,
1759 )
1760 .unwrap();
1761 project
1762 .update_diagnostic_entries(
1763 LanguageServerId(1),
1764 Path::new("/dir/a.rs").to_owned(),
1765 None,
1766 vec![DiagnosticEntry {
1767 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1768 diagnostic: Diagnostic {
1769 severity: DiagnosticSeverity::ERROR,
1770 is_primary: true,
1771 message: "syntax error b1".to_string(),
1772 ..Default::default()
1773 },
1774 }],
1775 cx,
1776 )
1777 .unwrap();
1778
1779 assert_eq!(
1780 project.diagnostic_summary(cx),
1781 DiagnosticSummary {
1782 error_count: 2,
1783 warning_count: 0,
1784 }
1785 );
1786 });
1787}
1788
1789#[gpui::test]
1790async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1791 init_test(cx);
1792
1793 let mut language = Language::new(
1794 LanguageConfig {
1795 name: "Rust".into(),
1796 path_suffixes: vec!["rs".to_string()],
1797 ..Default::default()
1798 },
1799 Some(tree_sitter_rust::language()),
1800 );
1801 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1802
1803 let text = "
1804 fn a() {
1805 f1();
1806 }
1807 fn b() {
1808 f2();
1809 }
1810 fn c() {
1811 f3();
1812 }
1813 "
1814 .unindent();
1815
1816 let fs = FakeFs::new(cx.executor());
1817 fs.insert_tree(
1818 "/dir",
1819 json!({
1820 "a.rs": text.clone(),
1821 }),
1822 )
1823 .await;
1824
1825 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1826 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1827 let buffer = project
1828 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1829 .await
1830 .unwrap();
1831
1832 let mut fake_server = fake_servers.next().await.unwrap();
1833 let lsp_document_version = fake_server
1834 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1835 .await
1836 .text_document
1837 .version;
1838
1839 // Simulate editing the buffer after the language server computes some edits.
1840 buffer.update(cx, |buffer, cx| {
1841 buffer.edit(
1842 [(
1843 Point::new(0, 0)..Point::new(0, 0),
1844 "// above first function\n",
1845 )],
1846 None,
1847 cx,
1848 );
1849 buffer.edit(
1850 [(
1851 Point::new(2, 0)..Point::new(2, 0),
1852 " // inside first function\n",
1853 )],
1854 None,
1855 cx,
1856 );
1857 buffer.edit(
1858 [(
1859 Point::new(6, 4)..Point::new(6, 4),
1860 "// inside second function ",
1861 )],
1862 None,
1863 cx,
1864 );
1865
1866 assert_eq!(
1867 buffer.text(),
1868 "
1869 // above first function
1870 fn a() {
1871 // inside first function
1872 f1();
1873 }
1874 fn b() {
1875 // inside second function f2();
1876 }
1877 fn c() {
1878 f3();
1879 }
1880 "
1881 .unindent()
1882 );
1883 });
1884
1885 let edits = project
1886 .update(cx, |project, cx| {
1887 project.edits_from_lsp(
1888 &buffer,
1889 vec![
1890 // replace body of first function
1891 lsp::TextEdit {
1892 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1893 new_text: "
1894 fn a() {
1895 f10();
1896 }
1897 "
1898 .unindent(),
1899 },
1900 // edit inside second function
1901 lsp::TextEdit {
1902 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1903 new_text: "00".into(),
1904 },
1905 // edit inside third function via two distinct edits
1906 lsp::TextEdit {
1907 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1908 new_text: "4000".into(),
1909 },
1910 lsp::TextEdit {
1911 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1912 new_text: "".into(),
1913 },
1914 ],
1915 LanguageServerId(0),
1916 Some(lsp_document_version),
1917 cx,
1918 )
1919 })
1920 .await
1921 .unwrap();
1922
1923 buffer.update(cx, |buffer, cx| {
1924 for (range, new_text) in edits {
1925 buffer.edit([(range, new_text)], None, cx);
1926 }
1927 assert_eq!(
1928 buffer.text(),
1929 "
1930 // above first function
1931 fn a() {
1932 // inside first function
1933 f10();
1934 }
1935 fn b() {
1936 // inside second function f200();
1937 }
1938 fn c() {
1939 f4000();
1940 }
1941 "
1942 .unindent()
1943 );
1944 });
1945}
1946
1947#[gpui::test]
1948async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1949 init_test(cx);
1950
1951 let text = "
1952 use a::b;
1953 use a::c;
1954
1955 fn f() {
1956 b();
1957 c();
1958 }
1959 "
1960 .unindent();
1961
1962 let fs = FakeFs::new(cx.executor());
1963 fs.insert_tree(
1964 "/dir",
1965 json!({
1966 "a.rs": text.clone(),
1967 }),
1968 )
1969 .await;
1970
1971 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1972 let buffer = project
1973 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1974 .await
1975 .unwrap();
1976
1977 // Simulate the language server sending us a small edit in the form of a very large diff.
1978 // Rust-analyzer does this when performing a merge-imports code action.
1979 let edits = project
1980 .update(cx, |project, cx| {
1981 project.edits_from_lsp(
1982 &buffer,
1983 [
1984 // Replace the first use statement without editing the semicolon.
1985 lsp::TextEdit {
1986 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1987 new_text: "a::{b, c}".into(),
1988 },
1989 // Reinsert the remainder of the file between the semicolon and the final
1990 // newline of the file.
1991 lsp::TextEdit {
1992 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1993 new_text: "\n\n".into(),
1994 },
1995 lsp::TextEdit {
1996 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1997 new_text: "
1998 fn f() {
1999 b();
2000 c();
2001 }"
2002 .unindent(),
2003 },
2004 // Delete everything after the first newline of the file.
2005 lsp::TextEdit {
2006 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2007 new_text: "".into(),
2008 },
2009 ],
2010 LanguageServerId(0),
2011 None,
2012 cx,
2013 )
2014 })
2015 .await
2016 .unwrap();
2017
2018 buffer.update(cx, |buffer, cx| {
2019 let edits = edits
2020 .into_iter()
2021 .map(|(range, text)| {
2022 (
2023 range.start.to_point(buffer)..range.end.to_point(buffer),
2024 text,
2025 )
2026 })
2027 .collect::<Vec<_>>();
2028
2029 assert_eq!(
2030 edits,
2031 [
2032 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2033 (Point::new(1, 0)..Point::new(2, 0), "".into())
2034 ]
2035 );
2036
2037 for (range, new_text) in edits {
2038 buffer.edit([(range, new_text)], None, cx);
2039 }
2040 assert_eq!(
2041 buffer.text(),
2042 "
2043 use a::{b, c};
2044
2045 fn f() {
2046 b();
2047 c();
2048 }
2049 "
2050 .unindent()
2051 );
2052 });
2053}
2054
2055#[gpui::test]
2056async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2057 init_test(cx);
2058
2059 let text = "
2060 use a::b;
2061 use a::c;
2062
2063 fn f() {
2064 b();
2065 c();
2066 }
2067 "
2068 .unindent();
2069
2070 let fs = FakeFs::new(cx.executor());
2071 fs.insert_tree(
2072 "/dir",
2073 json!({
2074 "a.rs": text.clone(),
2075 }),
2076 )
2077 .await;
2078
2079 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2080 let buffer = project
2081 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2082 .await
2083 .unwrap();
2084
2085 // Simulate the language server sending us edits in a non-ordered fashion,
2086 // with ranges sometimes being inverted or pointing to invalid locations.
2087 let edits = project
2088 .update(cx, |project, cx| {
2089 project.edits_from_lsp(
2090 &buffer,
2091 [
2092 lsp::TextEdit {
2093 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2094 new_text: "\n\n".into(),
2095 },
2096 lsp::TextEdit {
2097 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2098 new_text: "a::{b, c}".into(),
2099 },
2100 lsp::TextEdit {
2101 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2102 new_text: "".into(),
2103 },
2104 lsp::TextEdit {
2105 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2106 new_text: "
2107 fn f() {
2108 b();
2109 c();
2110 }"
2111 .unindent(),
2112 },
2113 ],
2114 LanguageServerId(0),
2115 None,
2116 cx,
2117 )
2118 })
2119 .await
2120 .unwrap();
2121
2122 buffer.update(cx, |buffer, cx| {
2123 let edits = edits
2124 .into_iter()
2125 .map(|(range, text)| {
2126 (
2127 range.start.to_point(buffer)..range.end.to_point(buffer),
2128 text,
2129 )
2130 })
2131 .collect::<Vec<_>>();
2132
2133 assert_eq!(
2134 edits,
2135 [
2136 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2137 (Point::new(1, 0)..Point::new(2, 0), "".into())
2138 ]
2139 );
2140
2141 for (range, new_text) in edits {
2142 buffer.edit([(range, new_text)], None, cx);
2143 }
2144 assert_eq!(
2145 buffer.text(),
2146 "
2147 use a::{b, c};
2148
2149 fn f() {
2150 b();
2151 c();
2152 }
2153 "
2154 .unindent()
2155 );
2156 });
2157}
2158
2159fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2160 buffer: &Buffer,
2161 range: Range<T>,
2162) -> Vec<(String, Option<DiagnosticSeverity>)> {
2163 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2164 for chunk in buffer.snapshot().chunks(range, true) {
2165 if chunks.last().map_or(false, |prev_chunk| {
2166 prev_chunk.1 == chunk.diagnostic_severity
2167 }) {
2168 chunks.last_mut().unwrap().0.push_str(chunk.text);
2169 } else {
2170 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2171 }
2172 }
2173 chunks
2174}
2175
2176#[gpui::test(iterations = 10)]
2177async fn test_definition(cx: &mut gpui::TestAppContext) {
2178 init_test(cx);
2179
2180 let mut language = Language::new(
2181 LanguageConfig {
2182 name: "Rust".into(),
2183 path_suffixes: vec!["rs".to_string()],
2184 ..Default::default()
2185 },
2186 Some(tree_sitter_rust::language()),
2187 );
2188 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2189
2190 let fs = FakeFs::new(cx.executor());
2191 fs.insert_tree(
2192 "/dir",
2193 json!({
2194 "a.rs": "const fn a() { A }",
2195 "b.rs": "const y: i32 = crate::a()",
2196 }),
2197 )
2198 .await;
2199
2200 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2201 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2202
2203 let buffer = project
2204 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2205 .await
2206 .unwrap();
2207
2208 let fake_server = fake_servers.next().await.unwrap();
2209 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2210 let params = params.text_document_position_params;
2211 assert_eq!(
2212 params.text_document.uri.to_file_path().unwrap(),
2213 Path::new("/dir/b.rs"),
2214 );
2215 assert_eq!(params.position, lsp::Position::new(0, 22));
2216
2217 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2218 lsp::Location::new(
2219 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2220 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2221 ),
2222 )))
2223 });
2224
2225 let mut definitions = project
2226 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2227 .await
2228 .unwrap();
2229
2230 // Assert no new language server started
2231 cx.executor().run_until_parked();
2232 assert!(fake_servers.try_next().is_err());
2233
2234 assert_eq!(definitions.len(), 1);
2235 let definition = definitions.pop().unwrap();
2236 cx.update(|cx| {
2237 let target_buffer = definition.target.buffer.read(cx);
2238 assert_eq!(
2239 target_buffer
2240 .file()
2241 .unwrap()
2242 .as_local()
2243 .unwrap()
2244 .abs_path(cx),
2245 Path::new("/dir/a.rs"),
2246 );
2247 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2248 assert_eq!(
2249 list_worktrees(&project, cx),
2250 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2251 );
2252
2253 drop(definition);
2254 });
2255 cx.update(|cx| {
2256 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2257 });
2258
2259 fn list_worktrees<'a>(
2260 project: &'a Model<Project>,
2261 cx: &'a AppContext,
2262 ) -> Vec<(&'a Path, bool)> {
2263 project
2264 .read(cx)
2265 .worktrees()
2266 .map(|worktree| {
2267 let worktree = worktree.read(cx);
2268 (
2269 worktree.as_local().unwrap().abs_path().as_ref(),
2270 worktree.is_visible(),
2271 )
2272 })
2273 .collect::<Vec<_>>()
2274 }
2275}
2276
2277#[gpui::test]
2278async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let mut language = Language::new(
2282 LanguageConfig {
2283 name: "TypeScript".into(),
2284 path_suffixes: vec!["ts".to_string()],
2285 ..Default::default()
2286 },
2287 Some(tree_sitter_typescript::language_typescript()),
2288 );
2289 let mut fake_language_servers = language
2290 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2291 capabilities: lsp::ServerCapabilities {
2292 completion_provider: Some(lsp::CompletionOptions {
2293 trigger_characters: Some(vec![":".to_string()]),
2294 ..Default::default()
2295 }),
2296 ..Default::default()
2297 },
2298 ..Default::default()
2299 }))
2300 .await;
2301
2302 let fs = FakeFs::new(cx.executor());
2303 fs.insert_tree(
2304 "/dir",
2305 json!({
2306 "a.ts": "",
2307 }),
2308 )
2309 .await;
2310
2311 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2312 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2313 let buffer = project
2314 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2315 .await
2316 .unwrap();
2317
2318 let fake_server = fake_language_servers.next().await.unwrap();
2319
2320 let text = "let a = b.fqn";
2321 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2322 let completions = project.update(cx, |project, cx| {
2323 project.completions(&buffer, text.len(), cx)
2324 });
2325
2326 fake_server
2327 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2328 Ok(Some(lsp::CompletionResponse::Array(vec![
2329 lsp::CompletionItem {
2330 label: "fullyQualifiedName?".into(),
2331 insert_text: Some("fullyQualifiedName".into()),
2332 ..Default::default()
2333 },
2334 ])))
2335 })
2336 .next()
2337 .await;
2338 let completions = completions.await.unwrap();
2339 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2340 assert_eq!(completions.len(), 1);
2341 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2342 assert_eq!(
2343 completions[0].old_range.to_offset(&snapshot),
2344 text.len() - 3..text.len()
2345 );
2346
2347 let text = "let a = \"atoms/cmp\"";
2348 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2349 let completions = project.update(cx, |project, cx| {
2350 project.completions(&buffer, text.len() - 1, cx)
2351 });
2352
2353 fake_server
2354 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2355 Ok(Some(lsp::CompletionResponse::Array(vec![
2356 lsp::CompletionItem {
2357 label: "component".into(),
2358 ..Default::default()
2359 },
2360 ])))
2361 })
2362 .next()
2363 .await;
2364 let completions = completions.await.unwrap();
2365 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2366 assert_eq!(completions.len(), 1);
2367 assert_eq!(completions[0].new_text, "component");
2368 assert_eq!(
2369 completions[0].old_range.to_offset(&snapshot),
2370 text.len() - 4..text.len() - 1
2371 );
2372}
2373
2374#[gpui::test]
2375async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2376 init_test(cx);
2377
2378 let mut language = Language::new(
2379 LanguageConfig {
2380 name: "TypeScript".into(),
2381 path_suffixes: vec!["ts".to_string()],
2382 ..Default::default()
2383 },
2384 Some(tree_sitter_typescript::language_typescript()),
2385 );
2386 let mut fake_language_servers = language
2387 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2388 capabilities: lsp::ServerCapabilities {
2389 completion_provider: Some(lsp::CompletionOptions {
2390 trigger_characters: Some(vec![":".to_string()]),
2391 ..Default::default()
2392 }),
2393 ..Default::default()
2394 },
2395 ..Default::default()
2396 }))
2397 .await;
2398
2399 let fs = FakeFs::new(cx.executor());
2400 fs.insert_tree(
2401 "/dir",
2402 json!({
2403 "a.ts": "",
2404 }),
2405 )
2406 .await;
2407
2408 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2409 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2410 let buffer = project
2411 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2412 .await
2413 .unwrap();
2414
2415 let fake_server = fake_language_servers.next().await.unwrap();
2416
2417 let text = "let a = b.fqn";
2418 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2419 let completions = project.update(cx, |project, cx| {
2420 project.completions(&buffer, text.len(), cx)
2421 });
2422
2423 fake_server
2424 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2425 Ok(Some(lsp::CompletionResponse::Array(vec![
2426 lsp::CompletionItem {
2427 label: "fullyQualifiedName?".into(),
2428 insert_text: Some("fully\rQualified\r\nName".into()),
2429 ..Default::default()
2430 },
2431 ])))
2432 })
2433 .next()
2434 .await;
2435 let completions = completions.await.unwrap();
2436 assert_eq!(completions.len(), 1);
2437 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2438}
2439
2440#[gpui::test(iterations = 10)]
2441async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2442 init_test(cx);
2443
2444 let mut language = Language::new(
2445 LanguageConfig {
2446 name: "TypeScript".into(),
2447 path_suffixes: vec!["ts".to_string()],
2448 ..Default::default()
2449 },
2450 None,
2451 );
2452 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2453
2454 let fs = FakeFs::new(cx.executor());
2455 fs.insert_tree(
2456 "/dir",
2457 json!({
2458 "a.ts": "a",
2459 }),
2460 )
2461 .await;
2462
2463 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2464 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2465 let buffer = project
2466 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2467 .await
2468 .unwrap();
2469
2470 let fake_server = fake_language_servers.next().await.unwrap();
2471
2472 // Language server returns code actions that contain commands, and not edits.
2473 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2474 fake_server
2475 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2476 Ok(Some(vec![
2477 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2478 title: "The code action".into(),
2479 command: Some(lsp::Command {
2480 title: "The command".into(),
2481 command: "_the/command".into(),
2482 arguments: Some(vec![json!("the-argument")]),
2483 }),
2484 ..Default::default()
2485 }),
2486 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2487 title: "two".into(),
2488 ..Default::default()
2489 }),
2490 ]))
2491 })
2492 .next()
2493 .await;
2494
2495 let action = actions.await.unwrap()[0].clone();
2496 let apply = project.update(cx, |project, cx| {
2497 project.apply_code_action(buffer.clone(), action, true, cx)
2498 });
2499
2500 // Resolving the code action does not populate its edits. In absence of
2501 // edits, we must execute the given command.
2502 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2503 |action, _| async move { Ok(action) },
2504 );
2505
2506 // While executing the command, the language server sends the editor
2507 // a `workspaceEdit` request.
2508 fake_server
2509 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2510 let fake = fake_server.clone();
2511 move |params, _| {
2512 assert_eq!(params.command, "_the/command");
2513 let fake = fake.clone();
2514 async move {
2515 fake.server
2516 .request::<lsp::request::ApplyWorkspaceEdit>(
2517 lsp::ApplyWorkspaceEditParams {
2518 label: None,
2519 edit: lsp::WorkspaceEdit {
2520 changes: Some(
2521 [(
2522 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2523 vec![lsp::TextEdit {
2524 range: lsp::Range::new(
2525 lsp::Position::new(0, 0),
2526 lsp::Position::new(0, 0),
2527 ),
2528 new_text: "X".into(),
2529 }],
2530 )]
2531 .into_iter()
2532 .collect(),
2533 ),
2534 ..Default::default()
2535 },
2536 },
2537 )
2538 .await
2539 .unwrap();
2540 Ok(Some(json!(null)))
2541 }
2542 }
2543 })
2544 .next()
2545 .await;
2546
2547 // Applying the code action returns a project transaction containing the edits
2548 // sent by the language server in its `workspaceEdit` request.
2549 let transaction = apply.await.unwrap();
2550 assert!(transaction.0.contains_key(&buffer));
2551 buffer.update(cx, |buffer, cx| {
2552 assert_eq!(buffer.text(), "Xa");
2553 buffer.undo(cx);
2554 assert_eq!(buffer.text(), "a");
2555 });
2556}
2557
2558#[gpui::test(iterations = 10)]
2559async fn test_save_file(cx: &mut gpui::TestAppContext) {
2560 init_test(cx);
2561
2562 let fs = FakeFs::new(cx.executor());
2563 fs.insert_tree(
2564 "/dir",
2565 json!({
2566 "file1": "the old contents",
2567 }),
2568 )
2569 .await;
2570
2571 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2572 let buffer = project
2573 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2574 .await
2575 .unwrap();
2576 buffer.update(cx, |buffer, cx| {
2577 assert_eq!(buffer.text(), "the old contents");
2578 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2579 });
2580
2581 project
2582 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2583 .await
2584 .unwrap();
2585
2586 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2587 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2588}
2589
2590#[gpui::test(iterations = 30)]
2591async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2592 init_test(cx);
2593
2594 let fs = FakeFs::new(cx.executor().clone());
2595 fs.insert_tree(
2596 "/dir",
2597 json!({
2598 "file1": "the original contents",
2599 }),
2600 )
2601 .await;
2602
2603 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2604 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2605 let buffer = project
2606 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2607 .await
2608 .unwrap();
2609
2610 // Simulate buffer diffs being slow, so that they don't complete before
2611 // the next file change occurs.
2612 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2613
2614 // Change the buffer's file on disk, and then wait for the file change
2615 // to be detected by the worktree, so that the buffer starts reloading.
2616 fs.save(
2617 "/dir/file1".as_ref(),
2618 &"the first contents".into(),
2619 Default::default(),
2620 )
2621 .await
2622 .unwrap();
2623 worktree.next_event(cx);
2624
2625 // Change the buffer's file again. Depending on the random seed, the
2626 // previous file change may still be in progress.
2627 fs.save(
2628 "/dir/file1".as_ref(),
2629 &"the second contents".into(),
2630 Default::default(),
2631 )
2632 .await
2633 .unwrap();
2634 worktree.next_event(cx);
2635
2636 cx.executor().run_until_parked();
2637 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2638 buffer.read_with(cx, |buffer, _| {
2639 assert_eq!(buffer.text(), on_disk_text);
2640 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2641 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2642 });
2643}
2644
2645#[gpui::test(iterations = 30)]
2646async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2647 init_test(cx);
2648
2649 let fs = FakeFs::new(cx.executor().clone());
2650 fs.insert_tree(
2651 "/dir",
2652 json!({
2653 "file1": "the original contents",
2654 }),
2655 )
2656 .await;
2657
2658 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2659 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2660 let buffer = project
2661 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2662 .await
2663 .unwrap();
2664
2665 // Simulate buffer diffs being slow, so that they don't complete before
2666 // the next file change occurs.
2667 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2668
2669 // Change the buffer's file on disk, and then wait for the file change
2670 // to be detected by the worktree, so that the buffer starts reloading.
2671 fs.save(
2672 "/dir/file1".as_ref(),
2673 &"the first contents".into(),
2674 Default::default(),
2675 )
2676 .await
2677 .unwrap();
2678 worktree.next_event(cx);
2679
2680 cx.executor()
2681 .spawn(cx.executor().simulate_random_delay())
2682 .await;
2683
2684 // Perform a noop edit, causing the buffer's version to increase.
2685 buffer.update(cx, |buffer, cx| {
2686 buffer.edit([(0..0, " ")], None, cx);
2687 buffer.undo(cx);
2688 });
2689
2690 cx.executor().run_until_parked();
2691 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2692 buffer.read_with(cx, |buffer, _| {
2693 let buffer_text = buffer.text();
2694 if buffer_text == on_disk_text {
2695 assert!(
2696 !buffer.is_dirty() && !buffer.has_conflict(),
2697 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2698 );
2699 }
2700 // If the file change occurred while the buffer was processing the first
2701 // change, the buffer will be in a conflicting state.
2702 else {
2703 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2704 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2705 }
2706 });
2707}
2708
2709#[gpui::test]
2710async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2711 init_test(cx);
2712
2713 let fs = FakeFs::new(cx.executor());
2714 fs.insert_tree(
2715 "/dir",
2716 json!({
2717 "file1": "the old contents",
2718 }),
2719 )
2720 .await;
2721
2722 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2723 let buffer = project
2724 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2725 .await
2726 .unwrap();
2727 buffer.update(cx, |buffer, cx| {
2728 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2729 });
2730
2731 project
2732 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2733 .await
2734 .unwrap();
2735
2736 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2737 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2738}
2739
2740#[gpui::test]
2741async fn test_save_as(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743
2744 let fs = FakeFs::new(cx.executor());
2745 fs.insert_tree("/dir", json!({})).await;
2746
2747 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2748
2749 let languages = project.update(cx, |project, _| project.languages().clone());
2750 languages.register(
2751 "/some/path",
2752 LanguageConfig {
2753 name: "Rust".into(),
2754 path_suffixes: vec!["rs".into()],
2755 ..Default::default()
2756 },
2757 tree_sitter_rust::language(),
2758 vec![],
2759 |_| Default::default(),
2760 );
2761
2762 let buffer = project.update(cx, |project, cx| {
2763 project.create_buffer("", None, cx).unwrap()
2764 });
2765 buffer.update(cx, |buffer, cx| {
2766 buffer.edit([(0..0, "abc")], None, cx);
2767 assert!(buffer.is_dirty());
2768 assert!(!buffer.has_conflict());
2769 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2770 });
2771 project
2772 .update(cx, |project, cx| {
2773 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2774 })
2775 .await
2776 .unwrap();
2777 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2778
2779 cx.executor().run_until_parked();
2780 buffer.update(cx, |buffer, cx| {
2781 assert_eq!(
2782 buffer.file().unwrap().full_path(cx),
2783 Path::new("dir/file1.rs")
2784 );
2785 assert!(!buffer.is_dirty());
2786 assert!(!buffer.has_conflict());
2787 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2788 });
2789
2790 let opened_buffer = project
2791 .update(cx, |project, cx| {
2792 project.open_local_buffer("/dir/file1.rs", cx)
2793 })
2794 .await
2795 .unwrap();
2796 assert_eq!(opened_buffer, buffer);
2797}
2798
2799#[gpui::test(retries = 5)]
2800async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2801 init_test(cx);
2802 cx.executor().allow_parking();
2803
2804 let dir = temp_tree(json!({
2805 "a": {
2806 "file1": "",
2807 "file2": "",
2808 "file3": "",
2809 },
2810 "b": {
2811 "c": {
2812 "file4": "",
2813 "file5": "",
2814 }
2815 }
2816 }));
2817
2818 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2819 let rpc = project.update(cx, |p, _| p.client.clone());
2820
2821 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2822 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2823 async move { buffer.await.unwrap() }
2824 };
2825 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2826 project.update(cx, |project, cx| {
2827 let tree = project.worktrees().next().unwrap();
2828 tree.read(cx)
2829 .entry_for_path(path)
2830 .unwrap_or_else(|| panic!("no entry for path {}", path))
2831 .id
2832 })
2833 };
2834
2835 let buffer2 = buffer_for_path("a/file2", cx).await;
2836 let buffer3 = buffer_for_path("a/file3", cx).await;
2837 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2838 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2839
2840 let file2_id = id_for_path("a/file2", cx);
2841 let file3_id = id_for_path("a/file3", cx);
2842 let file4_id = id_for_path("b/c/file4", cx);
2843
2844 // Create a remote copy of this worktree.
2845 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2846
2847 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2848
2849 let updates = Arc::new(Mutex::new(Vec::new()));
2850 tree.update(cx, |tree, cx| {
2851 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2852 let updates = updates.clone();
2853 move |update| {
2854 updates.lock().push(update);
2855 async { true }
2856 }
2857 });
2858 });
2859
2860 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2861
2862 cx.executor().run_until_parked();
2863
2864 cx.update(|cx| {
2865 assert!(!buffer2.read(cx).is_dirty());
2866 assert!(!buffer3.read(cx).is_dirty());
2867 assert!(!buffer4.read(cx).is_dirty());
2868 assert!(!buffer5.read(cx).is_dirty());
2869 });
2870
2871 // Rename and delete files and directories.
2872 tree.flush_fs_events(cx).await;
2873 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2874 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2875 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2876 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2877 tree.flush_fs_events(cx).await;
2878
2879 let expected_paths = vec![
2880 "a",
2881 "a/file1",
2882 "a/file2.new",
2883 "b",
2884 "d",
2885 "d/file3",
2886 "d/file4",
2887 ];
2888
2889 cx.update(|app| {
2890 assert_eq!(
2891 tree.read(app)
2892 .paths()
2893 .map(|p| p.to_str().unwrap())
2894 .collect::<Vec<_>>(),
2895 expected_paths
2896 );
2897 });
2898
2899 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2900 assert_eq!(id_for_path("d/file3", cx), file3_id);
2901 assert_eq!(id_for_path("d/file4", cx), file4_id);
2902
2903 cx.update(|cx| {
2904 assert_eq!(
2905 buffer2.read(cx).file().unwrap().path().as_ref(),
2906 Path::new("a/file2.new")
2907 );
2908 assert_eq!(
2909 buffer3.read(cx).file().unwrap().path().as_ref(),
2910 Path::new("d/file3")
2911 );
2912 assert_eq!(
2913 buffer4.read(cx).file().unwrap().path().as_ref(),
2914 Path::new("d/file4")
2915 );
2916 assert_eq!(
2917 buffer5.read(cx).file().unwrap().path().as_ref(),
2918 Path::new("b/c/file5")
2919 );
2920
2921 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2922 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2923 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2924 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2925 });
2926
2927 // Update the remote worktree. Check that it becomes consistent with the
2928 // local worktree.
2929 cx.executor().run_until_parked();
2930
2931 remote.update(cx, |remote, _| {
2932 for update in updates.lock().drain(..) {
2933 remote.as_remote_mut().unwrap().update_from_remote(update);
2934 }
2935 });
2936 cx.executor().run_until_parked();
2937 remote.update(cx, |remote, _| {
2938 assert_eq!(
2939 remote
2940 .paths()
2941 .map(|p| p.to_str().unwrap())
2942 .collect::<Vec<_>>(),
2943 expected_paths
2944 );
2945 });
2946}
2947
2948#[gpui::test(iterations = 10)]
2949async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2950 init_test(cx);
2951
2952 let fs = FakeFs::new(cx.executor());
2953 fs.insert_tree(
2954 "/dir",
2955 json!({
2956 "a": {
2957 "file1": "",
2958 }
2959 }),
2960 )
2961 .await;
2962
2963 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2964 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2965 let tree_id = tree.update(cx, |tree, _| tree.id());
2966
2967 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2968 project.update(cx, |project, cx| {
2969 let tree = project.worktrees().next().unwrap();
2970 tree.read(cx)
2971 .entry_for_path(path)
2972 .unwrap_or_else(|| panic!("no entry for path {}", path))
2973 .id
2974 })
2975 };
2976
2977 let dir_id = id_for_path("a", cx);
2978 let file_id = id_for_path("a/file1", cx);
2979 let buffer = project
2980 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2981 .await
2982 .unwrap();
2983 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2984
2985 project
2986 .update(cx, |project, cx| {
2987 project.rename_entry(dir_id, Path::new("b"), cx)
2988 })
2989 .unwrap()
2990 .await
2991 .unwrap();
2992 cx.executor().run_until_parked();
2993
2994 assert_eq!(id_for_path("b", cx), dir_id);
2995 assert_eq!(id_for_path("b/file1", cx), file_id);
2996 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2997}
2998
2999#[gpui::test]
3000async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3001 init_test(cx);
3002
3003 let fs = FakeFs::new(cx.executor());
3004 fs.insert_tree(
3005 "/dir",
3006 json!({
3007 "a.txt": "a-contents",
3008 "b.txt": "b-contents",
3009 }),
3010 )
3011 .await;
3012
3013 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3014
3015 // Spawn multiple tasks to open paths, repeating some paths.
3016 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3017 (
3018 p.open_local_buffer("/dir/a.txt", cx),
3019 p.open_local_buffer("/dir/b.txt", cx),
3020 p.open_local_buffer("/dir/a.txt", cx),
3021 )
3022 });
3023
3024 let buffer_a_1 = buffer_a_1.await.unwrap();
3025 let buffer_a_2 = buffer_a_2.await.unwrap();
3026 let buffer_b = buffer_b.await.unwrap();
3027 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3028 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3029
3030 // There is only one buffer per path.
3031 let buffer_a_id = buffer_a_1.entity_id();
3032 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3033
3034 // Open the same path again while it is still open.
3035 drop(buffer_a_1);
3036 let buffer_a_3 = project
3037 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3038 .await
3039 .unwrap();
3040
3041 // There's still only one buffer per path.
3042 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3043}
3044
3045#[gpui::test]
3046async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3047 init_test(cx);
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(
3051 "/dir",
3052 json!({
3053 "file1": "abc",
3054 "file2": "def",
3055 "file3": "ghi",
3056 }),
3057 )
3058 .await;
3059
3060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3061
3062 let buffer1 = project
3063 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3064 .await
3065 .unwrap();
3066 let events = Arc::new(Mutex::new(Vec::new()));
3067
3068 // initially, the buffer isn't dirty.
3069 buffer1.update(cx, |buffer, cx| {
3070 cx.subscribe(&buffer1, {
3071 let events = events.clone();
3072 move |_, _, event, _| match event {
3073 BufferEvent::Operation(_) => {}
3074 _ => events.lock().push(event.clone()),
3075 }
3076 })
3077 .detach();
3078
3079 assert!(!buffer.is_dirty());
3080 assert!(events.lock().is_empty());
3081
3082 buffer.edit([(1..2, "")], None, cx);
3083 });
3084
3085 // after the first edit, the buffer is dirty, and emits a dirtied event.
3086 buffer1.update(cx, |buffer, cx| {
3087 assert!(buffer.text() == "ac");
3088 assert!(buffer.is_dirty());
3089 assert_eq!(
3090 *events.lock(),
3091 &[language::Event::Edited, language::Event::DirtyChanged]
3092 );
3093 events.lock().clear();
3094 buffer.did_save(
3095 buffer.version(),
3096 buffer.as_rope().fingerprint(),
3097 buffer.file().unwrap().mtime(),
3098 cx,
3099 );
3100 });
3101
3102 // after saving, the buffer is not dirty, and emits a saved event.
3103 buffer1.update(cx, |buffer, cx| {
3104 assert!(!buffer.is_dirty());
3105 assert_eq!(*events.lock(), &[language::Event::Saved]);
3106 events.lock().clear();
3107
3108 buffer.edit([(1..1, "B")], None, cx);
3109 buffer.edit([(2..2, "D")], None, cx);
3110 });
3111
3112 // after editing again, the buffer is dirty, and emits another dirty event.
3113 buffer1.update(cx, |buffer, cx| {
3114 assert!(buffer.text() == "aBDc");
3115 assert!(buffer.is_dirty());
3116 assert_eq!(
3117 *events.lock(),
3118 &[
3119 language::Event::Edited,
3120 language::Event::DirtyChanged,
3121 language::Event::Edited,
3122 ],
3123 );
3124 events.lock().clear();
3125
3126 // After restoring the buffer to its previously-saved state,
3127 // the buffer is not considered dirty anymore.
3128 buffer.edit([(1..3, "")], None, cx);
3129 assert!(buffer.text() == "ac");
3130 assert!(!buffer.is_dirty());
3131 });
3132
3133 assert_eq!(
3134 *events.lock(),
3135 &[language::Event::Edited, language::Event::DirtyChanged]
3136 );
3137
3138 // When a file is deleted, the buffer is considered dirty.
3139 let events = Arc::new(Mutex::new(Vec::new()));
3140 let buffer2 = project
3141 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3142 .await
3143 .unwrap();
3144 buffer2.update(cx, |_, cx| {
3145 cx.subscribe(&buffer2, {
3146 let events = events.clone();
3147 move |_, _, event, _| events.lock().push(event.clone())
3148 })
3149 .detach();
3150 });
3151
3152 fs.remove_file("/dir/file2".as_ref(), Default::default())
3153 .await
3154 .unwrap();
3155 cx.executor().run_until_parked();
3156 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3157 assert_eq!(
3158 *events.lock(),
3159 &[
3160 language::Event::DirtyChanged,
3161 language::Event::FileHandleChanged
3162 ]
3163 );
3164
3165 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3166 let events = Arc::new(Mutex::new(Vec::new()));
3167 let buffer3 = project
3168 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3169 .await
3170 .unwrap();
3171 buffer3.update(cx, |_, cx| {
3172 cx.subscribe(&buffer3, {
3173 let events = events.clone();
3174 move |_, _, event, _| events.lock().push(event.clone())
3175 })
3176 .detach();
3177 });
3178
3179 buffer3.update(cx, |buffer, cx| {
3180 buffer.edit([(0..0, "x")], None, cx);
3181 });
3182 events.lock().clear();
3183 fs.remove_file("/dir/file3".as_ref(), Default::default())
3184 .await
3185 .unwrap();
3186 cx.executor().run_until_parked();
3187 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3188 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3189}
3190
3191#[gpui::test]
3192async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3193 init_test(cx);
3194
3195 let initial_contents = "aaa\nbbbbb\nc\n";
3196 let fs = FakeFs::new(cx.executor());
3197 fs.insert_tree(
3198 "/dir",
3199 json!({
3200 "the-file": initial_contents,
3201 }),
3202 )
3203 .await;
3204 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3205 let buffer = project
3206 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3207 .await
3208 .unwrap();
3209
3210 let anchors = (0..3)
3211 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3212 .collect::<Vec<_>>();
3213
3214 // Change the file on disk, adding two new lines of text, and removing
3215 // one line.
3216 buffer.update(cx, |buffer, _| {
3217 assert!(!buffer.is_dirty());
3218 assert!(!buffer.has_conflict());
3219 });
3220 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3221 fs.save(
3222 "/dir/the-file".as_ref(),
3223 &new_contents.into(),
3224 LineEnding::Unix,
3225 )
3226 .await
3227 .unwrap();
3228
3229 // Because the buffer was not modified, it is reloaded from disk. Its
3230 // contents are edited according to the diff between the old and new
3231 // file contents.
3232 cx.executor().run_until_parked();
3233 buffer.update(cx, |buffer, _| {
3234 assert_eq!(buffer.text(), new_contents);
3235 assert!(!buffer.is_dirty());
3236 assert!(!buffer.has_conflict());
3237
3238 let anchor_positions = anchors
3239 .iter()
3240 .map(|anchor| anchor.to_point(&*buffer))
3241 .collect::<Vec<_>>();
3242 assert_eq!(
3243 anchor_positions,
3244 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3245 );
3246 });
3247
3248 // Modify the buffer
3249 buffer.update(cx, |buffer, cx| {
3250 buffer.edit([(0..0, " ")], None, cx);
3251 assert!(buffer.is_dirty());
3252 assert!(!buffer.has_conflict());
3253 });
3254
3255 // Change the file on disk again, adding blank lines to the beginning.
3256 fs.save(
3257 "/dir/the-file".as_ref(),
3258 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3259 LineEnding::Unix,
3260 )
3261 .await
3262 .unwrap();
3263
3264 // Because the buffer is modified, it doesn't reload from disk, but is
3265 // marked as having a conflict.
3266 cx.executor().run_until_parked();
3267 buffer.update(cx, |buffer, _| {
3268 assert!(buffer.has_conflict());
3269 });
3270}
3271
3272#[gpui::test]
3273async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3274 init_test(cx);
3275
3276 let fs = FakeFs::new(cx.executor());
3277 fs.insert_tree(
3278 "/dir",
3279 json!({
3280 "file1": "a\nb\nc\n",
3281 "file2": "one\r\ntwo\r\nthree\r\n",
3282 }),
3283 )
3284 .await;
3285
3286 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3287 let buffer1 = project
3288 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3289 .await
3290 .unwrap();
3291 let buffer2 = project
3292 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3293 .await
3294 .unwrap();
3295
3296 buffer1.update(cx, |buffer, _| {
3297 assert_eq!(buffer.text(), "a\nb\nc\n");
3298 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3299 });
3300 buffer2.update(cx, |buffer, _| {
3301 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3302 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3303 });
3304
3305 // Change a file's line endings on disk from unix to windows. The buffer's
3306 // state updates correctly.
3307 fs.save(
3308 "/dir/file1".as_ref(),
3309 &"aaa\nb\nc\n".into(),
3310 LineEnding::Windows,
3311 )
3312 .await
3313 .unwrap();
3314 cx.executor().run_until_parked();
3315 buffer1.update(cx, |buffer, _| {
3316 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3317 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3318 });
3319
3320 // Save a file with windows line endings. The file is written correctly.
3321 buffer2.update(cx, |buffer, cx| {
3322 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3323 });
3324 project
3325 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3326 .await
3327 .unwrap();
3328 assert_eq!(
3329 fs.load("/dir/file2".as_ref()).await.unwrap(),
3330 "one\r\ntwo\r\nthree\r\nfour\r\n",
3331 );
3332}
3333
3334#[gpui::test]
3335async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3336 init_test(cx);
3337
3338 let fs = FakeFs::new(cx.executor());
3339 fs.insert_tree(
3340 "/the-dir",
3341 json!({
3342 "a.rs": "
3343 fn foo(mut v: Vec<usize>) {
3344 for x in &v {
3345 v.push(1);
3346 }
3347 }
3348 "
3349 .unindent(),
3350 }),
3351 )
3352 .await;
3353
3354 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3355 let buffer = project
3356 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3357 .await
3358 .unwrap();
3359
3360 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3361 let message = lsp::PublishDiagnosticsParams {
3362 uri: buffer_uri.clone(),
3363 diagnostics: vec![
3364 lsp::Diagnostic {
3365 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3366 severity: Some(DiagnosticSeverity::WARNING),
3367 message: "error 1".to_string(),
3368 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3369 location: lsp::Location {
3370 uri: buffer_uri.clone(),
3371 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3372 },
3373 message: "error 1 hint 1".to_string(),
3374 }]),
3375 ..Default::default()
3376 },
3377 lsp::Diagnostic {
3378 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3379 severity: Some(DiagnosticSeverity::HINT),
3380 message: "error 1 hint 1".to_string(),
3381 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3382 location: lsp::Location {
3383 uri: buffer_uri.clone(),
3384 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3385 },
3386 message: "original diagnostic".to_string(),
3387 }]),
3388 ..Default::default()
3389 },
3390 lsp::Diagnostic {
3391 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3392 severity: Some(DiagnosticSeverity::ERROR),
3393 message: "error 2".to_string(),
3394 related_information: Some(vec![
3395 lsp::DiagnosticRelatedInformation {
3396 location: lsp::Location {
3397 uri: buffer_uri.clone(),
3398 range: lsp::Range::new(
3399 lsp::Position::new(1, 13),
3400 lsp::Position::new(1, 15),
3401 ),
3402 },
3403 message: "error 2 hint 1".to_string(),
3404 },
3405 lsp::DiagnosticRelatedInformation {
3406 location: lsp::Location {
3407 uri: buffer_uri.clone(),
3408 range: lsp::Range::new(
3409 lsp::Position::new(1, 13),
3410 lsp::Position::new(1, 15),
3411 ),
3412 },
3413 message: "error 2 hint 2".to_string(),
3414 },
3415 ]),
3416 ..Default::default()
3417 },
3418 lsp::Diagnostic {
3419 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3420 severity: Some(DiagnosticSeverity::HINT),
3421 message: "error 2 hint 1".to_string(),
3422 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3423 location: lsp::Location {
3424 uri: buffer_uri.clone(),
3425 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3426 },
3427 message: "original diagnostic".to_string(),
3428 }]),
3429 ..Default::default()
3430 },
3431 lsp::Diagnostic {
3432 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3433 severity: Some(DiagnosticSeverity::HINT),
3434 message: "error 2 hint 2".to_string(),
3435 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3436 location: lsp::Location {
3437 uri: buffer_uri,
3438 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3439 },
3440 message: "original diagnostic".to_string(),
3441 }]),
3442 ..Default::default()
3443 },
3444 ],
3445 version: None,
3446 };
3447
3448 project
3449 .update(cx, |p, cx| {
3450 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3451 })
3452 .unwrap();
3453 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3454
3455 assert_eq!(
3456 buffer
3457 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3458 .collect::<Vec<_>>(),
3459 &[
3460 DiagnosticEntry {
3461 range: Point::new(1, 8)..Point::new(1, 9),
3462 diagnostic: Diagnostic {
3463 severity: DiagnosticSeverity::WARNING,
3464 message: "error 1".to_string(),
3465 group_id: 1,
3466 is_primary: true,
3467 ..Default::default()
3468 }
3469 },
3470 DiagnosticEntry {
3471 range: Point::new(1, 8)..Point::new(1, 9),
3472 diagnostic: Diagnostic {
3473 severity: DiagnosticSeverity::HINT,
3474 message: "error 1 hint 1".to_string(),
3475 group_id: 1,
3476 is_primary: false,
3477 ..Default::default()
3478 }
3479 },
3480 DiagnosticEntry {
3481 range: Point::new(1, 13)..Point::new(1, 15),
3482 diagnostic: Diagnostic {
3483 severity: DiagnosticSeverity::HINT,
3484 message: "error 2 hint 1".to_string(),
3485 group_id: 0,
3486 is_primary: false,
3487 ..Default::default()
3488 }
3489 },
3490 DiagnosticEntry {
3491 range: Point::new(1, 13)..Point::new(1, 15),
3492 diagnostic: Diagnostic {
3493 severity: DiagnosticSeverity::HINT,
3494 message: "error 2 hint 2".to_string(),
3495 group_id: 0,
3496 is_primary: false,
3497 ..Default::default()
3498 }
3499 },
3500 DiagnosticEntry {
3501 range: Point::new(2, 8)..Point::new(2, 17),
3502 diagnostic: Diagnostic {
3503 severity: DiagnosticSeverity::ERROR,
3504 message: "error 2".to_string(),
3505 group_id: 0,
3506 is_primary: true,
3507 ..Default::default()
3508 }
3509 }
3510 ]
3511 );
3512
3513 assert_eq!(
3514 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3515 &[
3516 DiagnosticEntry {
3517 range: Point::new(1, 13)..Point::new(1, 15),
3518 diagnostic: Diagnostic {
3519 severity: DiagnosticSeverity::HINT,
3520 message: "error 2 hint 1".to_string(),
3521 group_id: 0,
3522 is_primary: false,
3523 ..Default::default()
3524 }
3525 },
3526 DiagnosticEntry {
3527 range: Point::new(1, 13)..Point::new(1, 15),
3528 diagnostic: Diagnostic {
3529 severity: DiagnosticSeverity::HINT,
3530 message: "error 2 hint 2".to_string(),
3531 group_id: 0,
3532 is_primary: false,
3533 ..Default::default()
3534 }
3535 },
3536 DiagnosticEntry {
3537 range: Point::new(2, 8)..Point::new(2, 17),
3538 diagnostic: Diagnostic {
3539 severity: DiagnosticSeverity::ERROR,
3540 message: "error 2".to_string(),
3541 group_id: 0,
3542 is_primary: true,
3543 ..Default::default()
3544 }
3545 }
3546 ]
3547 );
3548
3549 assert_eq!(
3550 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3551 &[
3552 DiagnosticEntry {
3553 range: Point::new(1, 8)..Point::new(1, 9),
3554 diagnostic: Diagnostic {
3555 severity: DiagnosticSeverity::WARNING,
3556 message: "error 1".to_string(),
3557 group_id: 1,
3558 is_primary: true,
3559 ..Default::default()
3560 }
3561 },
3562 DiagnosticEntry {
3563 range: Point::new(1, 8)..Point::new(1, 9),
3564 diagnostic: Diagnostic {
3565 severity: DiagnosticSeverity::HINT,
3566 message: "error 1 hint 1".to_string(),
3567 group_id: 1,
3568 is_primary: false,
3569 ..Default::default()
3570 }
3571 },
3572 ]
3573 );
3574}
3575
3576#[gpui::test]
3577async fn test_rename(cx: &mut gpui::TestAppContext) {
3578 init_test(cx);
3579
3580 let mut language = Language::new(
3581 LanguageConfig {
3582 name: "Rust".into(),
3583 path_suffixes: vec!["rs".to_string()],
3584 ..Default::default()
3585 },
3586 Some(tree_sitter_rust::language()),
3587 );
3588 let mut fake_servers = language
3589 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3590 capabilities: lsp::ServerCapabilities {
3591 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3592 prepare_provider: Some(true),
3593 work_done_progress_options: Default::default(),
3594 })),
3595 ..Default::default()
3596 },
3597 ..Default::default()
3598 }))
3599 .await;
3600
3601 let fs = FakeFs::new(cx.executor());
3602 fs.insert_tree(
3603 "/dir",
3604 json!({
3605 "one.rs": "const ONE: usize = 1;",
3606 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3607 }),
3608 )
3609 .await;
3610
3611 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3612 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3613 let buffer = project
3614 .update(cx, |project, cx| {
3615 project.open_local_buffer("/dir/one.rs", cx)
3616 })
3617 .await
3618 .unwrap();
3619
3620 let fake_server = fake_servers.next().await.unwrap();
3621
3622 let response = project.update(cx, |project, cx| {
3623 project.prepare_rename(buffer.clone(), 7, cx)
3624 });
3625 fake_server
3626 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3627 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3628 assert_eq!(params.position, lsp::Position::new(0, 7));
3629 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3630 lsp::Position::new(0, 6),
3631 lsp::Position::new(0, 9),
3632 ))))
3633 })
3634 .next()
3635 .await
3636 .unwrap();
3637 let range = response.await.unwrap().unwrap();
3638 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3639 assert_eq!(range, 6..9);
3640
3641 let response = project.update(cx, |project, cx| {
3642 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3643 });
3644 fake_server
3645 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3646 assert_eq!(
3647 params.text_document_position.text_document.uri.as_str(),
3648 "file:///dir/one.rs"
3649 );
3650 assert_eq!(
3651 params.text_document_position.position,
3652 lsp::Position::new(0, 7)
3653 );
3654 assert_eq!(params.new_name, "THREE");
3655 Ok(Some(lsp::WorkspaceEdit {
3656 changes: Some(
3657 [
3658 (
3659 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3660 vec![lsp::TextEdit::new(
3661 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3662 "THREE".to_string(),
3663 )],
3664 ),
3665 (
3666 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3667 vec![
3668 lsp::TextEdit::new(
3669 lsp::Range::new(
3670 lsp::Position::new(0, 24),
3671 lsp::Position::new(0, 27),
3672 ),
3673 "THREE".to_string(),
3674 ),
3675 lsp::TextEdit::new(
3676 lsp::Range::new(
3677 lsp::Position::new(0, 35),
3678 lsp::Position::new(0, 38),
3679 ),
3680 "THREE".to_string(),
3681 ),
3682 ],
3683 ),
3684 ]
3685 .into_iter()
3686 .collect(),
3687 ),
3688 ..Default::default()
3689 }))
3690 })
3691 .next()
3692 .await
3693 .unwrap();
3694 let mut transaction = response.await.unwrap().0;
3695 assert_eq!(transaction.len(), 2);
3696 assert_eq!(
3697 transaction
3698 .remove_entry(&buffer)
3699 .unwrap()
3700 .0
3701 .update(cx, |buffer, _| buffer.text()),
3702 "const THREE: usize = 1;"
3703 );
3704 assert_eq!(
3705 transaction
3706 .into_keys()
3707 .next()
3708 .unwrap()
3709 .update(cx, |buffer, _| buffer.text()),
3710 "const TWO: usize = one::THREE + one::THREE;"
3711 );
3712}
3713
3714#[gpui::test]
3715async fn test_search(cx: &mut gpui::TestAppContext) {
3716 init_test(cx);
3717
3718 let fs = FakeFs::new(cx.executor());
3719 fs.insert_tree(
3720 "/dir",
3721 json!({
3722 "one.rs": "const ONE: usize = 1;",
3723 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3724 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3725 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3726 }),
3727 )
3728 .await;
3729 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3730 assert_eq!(
3731 search(
3732 &project,
3733 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3734 cx
3735 )
3736 .await
3737 .unwrap(),
3738 HashMap::from_iter([
3739 ("two.rs".to_string(), vec![6..9]),
3740 ("three.rs".to_string(), vec![37..40])
3741 ])
3742 );
3743
3744 let buffer_4 = project
3745 .update(cx, |project, cx| {
3746 project.open_local_buffer("/dir/four.rs", cx)
3747 })
3748 .await
3749 .unwrap();
3750 buffer_4.update(cx, |buffer, cx| {
3751 let text = "two::TWO";
3752 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3753 });
3754
3755 assert_eq!(
3756 search(
3757 &project,
3758 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3759 cx
3760 )
3761 .await
3762 .unwrap(),
3763 HashMap::from_iter([
3764 ("two.rs".to_string(), vec![6..9]),
3765 ("three.rs".to_string(), vec![37..40]),
3766 ("four.rs".to_string(), vec![25..28, 36..39])
3767 ])
3768 );
3769}
3770
3771#[gpui::test]
3772async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3773 init_test(cx);
3774
3775 let search_query = "file";
3776
3777 let fs = FakeFs::new(cx.executor());
3778 fs.insert_tree(
3779 "/dir",
3780 json!({
3781 "one.rs": r#"// Rust file one"#,
3782 "one.ts": r#"// TypeScript file one"#,
3783 "two.rs": r#"// Rust file two"#,
3784 "two.ts": r#"// TypeScript file two"#,
3785 }),
3786 )
3787 .await;
3788 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3789
3790 assert!(
3791 search(
3792 &project,
3793 SearchQuery::text(
3794 search_query,
3795 false,
3796 true,
3797 false,
3798 vec![PathMatcher::new("*.odd").unwrap()],
3799 Vec::new()
3800 )
3801 .unwrap(),
3802 cx
3803 )
3804 .await
3805 .unwrap()
3806 .is_empty(),
3807 "If no inclusions match, no files should be returned"
3808 );
3809
3810 assert_eq!(
3811 search(
3812 &project,
3813 SearchQuery::text(
3814 search_query,
3815 false,
3816 true,
3817 false,
3818 vec![PathMatcher::new("*.rs").unwrap()],
3819 Vec::new()
3820 )
3821 .unwrap(),
3822 cx
3823 )
3824 .await
3825 .unwrap(),
3826 HashMap::from_iter([
3827 ("one.rs".to_string(), vec![8..12]),
3828 ("two.rs".to_string(), vec![8..12]),
3829 ]),
3830 "Rust only search should give only Rust files"
3831 );
3832
3833 assert_eq!(
3834 search(
3835 &project,
3836 SearchQuery::text(
3837 search_query,
3838 false,
3839 true,
3840 false,
3841 vec![
3842 PathMatcher::new("*.ts").unwrap(),
3843 PathMatcher::new("*.odd").unwrap(),
3844 ],
3845 Vec::new()
3846 ).unwrap(),
3847 cx
3848 )
3849 .await
3850 .unwrap(),
3851 HashMap::from_iter([
3852 ("one.ts".to_string(), vec![14..18]),
3853 ("two.ts".to_string(), vec![14..18]),
3854 ]),
3855 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3856 );
3857
3858 assert_eq!(
3859 search(
3860 &project,
3861 SearchQuery::text(
3862 search_query,
3863 false,
3864 true,
3865 false,
3866 vec![
3867 PathMatcher::new("*.rs").unwrap(),
3868 PathMatcher::new("*.ts").unwrap(),
3869 PathMatcher::new("*.odd").unwrap(),
3870 ],
3871 Vec::new()
3872 ).unwrap(),
3873 cx
3874 )
3875 .await
3876 .unwrap(),
3877 HashMap::from_iter([
3878 ("one.rs".to_string(), vec![8..12]),
3879 ("one.ts".to_string(), vec![14..18]),
3880 ("two.rs".to_string(), vec![8..12]),
3881 ("two.ts".to_string(), vec![14..18]),
3882 ]),
3883 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3884 );
3885}
3886
3887#[gpui::test]
3888async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3889 init_test(cx);
3890
3891 let search_query = "file";
3892
3893 let fs = FakeFs::new(cx.executor());
3894 fs.insert_tree(
3895 "/dir",
3896 json!({
3897 "one.rs": r#"// Rust file one"#,
3898 "one.ts": r#"// TypeScript file one"#,
3899 "two.rs": r#"// Rust file two"#,
3900 "two.ts": r#"// TypeScript file two"#,
3901 }),
3902 )
3903 .await;
3904 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3905
3906 assert_eq!(
3907 search(
3908 &project,
3909 SearchQuery::text(
3910 search_query,
3911 false,
3912 true,
3913 false,
3914 Vec::new(),
3915 vec![PathMatcher::new("*.odd").unwrap()],
3916 )
3917 .unwrap(),
3918 cx
3919 )
3920 .await
3921 .unwrap(),
3922 HashMap::from_iter([
3923 ("one.rs".to_string(), vec![8..12]),
3924 ("one.ts".to_string(), vec![14..18]),
3925 ("two.rs".to_string(), vec![8..12]),
3926 ("two.ts".to_string(), vec![14..18]),
3927 ]),
3928 "If no exclusions match, all files should be returned"
3929 );
3930
3931 assert_eq!(
3932 search(
3933 &project,
3934 SearchQuery::text(
3935 search_query,
3936 false,
3937 true,
3938 false,
3939 Vec::new(),
3940 vec![PathMatcher::new("*.rs").unwrap()],
3941 )
3942 .unwrap(),
3943 cx
3944 )
3945 .await
3946 .unwrap(),
3947 HashMap::from_iter([
3948 ("one.ts".to_string(), vec![14..18]),
3949 ("two.ts".to_string(), vec![14..18]),
3950 ]),
3951 "Rust exclusion search should give only TypeScript files"
3952 );
3953
3954 assert_eq!(
3955 search(
3956 &project,
3957 SearchQuery::text(
3958 search_query,
3959 false,
3960 true,
3961 false,
3962 Vec::new(),
3963 vec![
3964 PathMatcher::new("*.ts").unwrap(),
3965 PathMatcher::new("*.odd").unwrap(),
3966 ],
3967 ).unwrap(),
3968 cx
3969 )
3970 .await
3971 .unwrap(),
3972 HashMap::from_iter([
3973 ("one.rs".to_string(), vec![8..12]),
3974 ("two.rs".to_string(), vec![8..12]),
3975 ]),
3976 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3977 );
3978
3979 assert!(
3980 search(
3981 &project,
3982 SearchQuery::text(
3983 search_query,
3984 false,
3985 true,
3986 false,
3987 Vec::new(),
3988 vec![
3989 PathMatcher::new("*.rs").unwrap(),
3990 PathMatcher::new("*.ts").unwrap(),
3991 PathMatcher::new("*.odd").unwrap(),
3992 ],
3993 ).unwrap(),
3994 cx
3995 )
3996 .await
3997 .unwrap().is_empty(),
3998 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3999 );
4000}
4001
4002#[gpui::test]
4003async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4004 init_test(cx);
4005
4006 let search_query = "file";
4007
4008 let fs = FakeFs::new(cx.executor());
4009 fs.insert_tree(
4010 "/dir",
4011 json!({
4012 "one.rs": r#"// Rust file one"#,
4013 "one.ts": r#"// TypeScript file one"#,
4014 "two.rs": r#"// Rust file two"#,
4015 "two.ts": r#"// TypeScript file two"#,
4016 }),
4017 )
4018 .await;
4019 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4020
4021 assert!(
4022 search(
4023 &project,
4024 SearchQuery::text(
4025 search_query,
4026 false,
4027 true,
4028 false,
4029 vec![PathMatcher::new("*.odd").unwrap()],
4030 vec![PathMatcher::new("*.odd").unwrap()],
4031 )
4032 .unwrap(),
4033 cx
4034 )
4035 .await
4036 .unwrap()
4037 .is_empty(),
4038 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4039 );
4040
4041 assert!(
4042 search(
4043 &project,
4044 SearchQuery::text(
4045 search_query,
4046 false,
4047 true,
4048 false,
4049 vec![PathMatcher::new("*.ts").unwrap()],
4050 vec![PathMatcher::new("*.ts").unwrap()],
4051 ).unwrap(),
4052 cx
4053 )
4054 .await
4055 .unwrap()
4056 .is_empty(),
4057 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4058 );
4059
4060 assert!(
4061 search(
4062 &project,
4063 SearchQuery::text(
4064 search_query,
4065 false,
4066 true,
4067 false,
4068 vec![
4069 PathMatcher::new("*.ts").unwrap(),
4070 PathMatcher::new("*.odd").unwrap()
4071 ],
4072 vec![
4073 PathMatcher::new("*.ts").unwrap(),
4074 PathMatcher::new("*.odd").unwrap()
4075 ],
4076 )
4077 .unwrap(),
4078 cx
4079 )
4080 .await
4081 .unwrap()
4082 .is_empty(),
4083 "Non-matching inclusions and exclusions should not change that."
4084 );
4085
4086 assert_eq!(
4087 search(
4088 &project,
4089 SearchQuery::text(
4090 search_query,
4091 false,
4092 true,
4093 false,
4094 vec![
4095 PathMatcher::new("*.ts").unwrap(),
4096 PathMatcher::new("*.odd").unwrap()
4097 ],
4098 vec![
4099 PathMatcher::new("*.rs").unwrap(),
4100 PathMatcher::new("*.odd").unwrap()
4101 ],
4102 )
4103 .unwrap(),
4104 cx
4105 )
4106 .await
4107 .unwrap(),
4108 HashMap::from_iter([
4109 ("one.ts".to_string(), vec![14..18]),
4110 ("two.ts".to_string(), vec![14..18]),
4111 ]),
4112 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4113 );
4114}
4115
4116#[test]
4117fn test_glob_literal_prefix() {
4118 assert_eq!(glob_literal_prefix("**/*.js"), "");
4119 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4120 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4121 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4122}
4123
4124async fn search(
4125 project: &Model<Project>,
4126 query: SearchQuery,
4127 cx: &mut gpui::TestAppContext,
4128) -> Result<HashMap<String, Vec<Range<usize>>>> {
4129 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4130 let mut result = HashMap::default();
4131 while let Some((buffer, range)) = search_rx.next().await {
4132 result.entry(buffer).or_insert(range);
4133 }
4134 Ok(result
4135 .into_iter()
4136 .map(|(buffer, ranges)| {
4137 buffer.update(cx, |buffer, _| {
4138 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4139 let ranges = ranges
4140 .into_iter()
4141 .map(|range| range.to_offset(buffer))
4142 .collect::<Vec<_>>();
4143 (path, ranges)
4144 })
4145 })
4146 .collect())
4147}
4148
4149fn init_test(cx: &mut gpui::TestAppContext) {
4150 if std::env::var("RUST_LOG").is_ok() {
4151 env_logger::try_init().ok();
4152 }
4153
4154 cx.update(|cx| {
4155 let settings_store = SettingsStore::test(cx);
4156 cx.set_global(settings_store);
4157 language::init(cx);
4158 Project::init_settings(cx);
4159 });
4160}