1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50
51 let cancel_flag = Default::default();
52 let results = project
53 .read_with(cx, |project, cx| {
54 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
55 })
56 .await;
57 assert_eq!(
58 results
59 .into_iter()
60 .map(|result| result.path)
61 .collect::<Vec<Arc<Path>>>(),
62 vec![
63 PathBuf::from("banana/carrot/date").into(),
64 PathBuf::from("banana/carrot/endive").into(),
65 ]
66 );
67}
68
69#[gpui::test]
70async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
71 cx.foreground().forbid_parking();
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-rust-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102 let mut fake_json_servers = json_language
103 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
104 name: "the-json-language-server",
105 capabilities: lsp::ServerCapabilities {
106 completion_provider: Some(lsp::CompletionOptions {
107 trigger_characters: Some(vec![":".to_string()]),
108 ..Default::default()
109 }),
110 ..Default::default()
111 },
112 ..Default::default()
113 }))
114 .await;
115
116 let fs = FakeFs::new(cx.background());
117 fs.insert_tree(
118 "/the-root",
119 json!({
120 "test.rs": "const A: i32 = 1;",
121 "test2.rs": "",
122 "Cargo.toml": "a = 1",
123 "package.json": "{\"a\": 1}",
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129
130 // Open a buffer before languages have been added
131 let json_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/package.json", cx)
134 })
135 .await
136 .unwrap();
137
138 // Assert that this buffer does not have a language
139 assert!(json_buffer.read_with(cx, |buffer, _| { buffer.language().is_none() }));
140
141 // Now we add the languages to the project, and subscribe to the watcher
142 project.update(cx, |project, cx| {
143 // Get a handle to the channel and clear out default item
144 let mut recv = project.languages.subscribe();
145 recv.blocking_recv();
146
147 // Add, then wait to be notified that JSON has been added
148 project.languages.add(Arc::new(json_language));
149 recv.blocking_recv();
150
151 // Add, then wait to be notified that Rust has been added
152 project.languages.add(Arc::new(rust_language));
153 recv.blocking_recv();
154 // Uncommenting this would cause the thread to block indefinitely:
155 // recv.blocking_recv();
156
157 // Force the assignment, we know the watcher has been notified
158 // but have no way to wait for the watcher to assign to the project
159 project.assign_language_to_buffer(&json_buffer, cx);
160 });
161
162 // Assert that the opened buffer does have a language, and that it is JSON
163 let name = json_buffer.read_with(cx, |buffer, _| buffer.language().map(|l| l.name()));
164 assert_eq!(name, Some("JSON".into()));
165
166 // Close the JSON buffer we opened
167 cx.update(|_| drop(json_buffer));
168
169 // Open a buffer without an associated language server.
170 let toml_buffer = project
171 .update(cx, |project, cx| {
172 project.open_local_buffer("/the-root/Cargo.toml", cx)
173 })
174 .await
175 .unwrap();
176
177 // Open a buffer with an associated language server.
178 let rust_buffer = project
179 .update(cx, |project, cx| {
180 project.open_local_buffer("/the-root/test.rs", cx)
181 })
182 .await
183 .unwrap();
184
185 // A server is started up, and it is notified about Rust files.
186 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
187 assert_eq!(
188 fake_rust_server
189 .receive_notification::<lsp::notification::DidOpenTextDocument>()
190 .await
191 .text_document,
192 lsp::TextDocumentItem {
193 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
194 version: 0,
195 text: "const A: i32 = 1;".to_string(),
196 language_id: Default::default()
197 }
198 );
199
200 // The buffer is configured based on the language server's capabilities.
201 rust_buffer.read_with(cx, |buffer, _| {
202 assert_eq!(
203 buffer.completion_triggers(),
204 &[".".to_string(), "::".to_string()]
205 );
206 });
207 toml_buffer.read_with(cx, |buffer, _| {
208 assert!(buffer.completion_triggers().is_empty());
209 });
210
211 // Edit a buffer. The changes are reported to the language server.
212 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
213 assert_eq!(
214 fake_rust_server
215 .receive_notification::<lsp::notification::DidChangeTextDocument>()
216 .await
217 .text_document,
218 lsp::VersionedTextDocumentIdentifier::new(
219 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
220 1
221 )
222 );
223
224 // Open a third buffer with a different associated language server.
225 let json_buffer = project
226 .update(cx, |project, cx| {
227 project.open_local_buffer("/the-root/package.json", cx)
228 })
229 .await
230 .unwrap();
231
232 // A json language server is started up and is only notified about the json buffer.
233 let mut fake_json_server = fake_json_servers.next().await.unwrap();
234 assert_eq!(
235 fake_json_server
236 .receive_notification::<lsp::notification::DidOpenTextDocument>()
237 .await
238 .text_document,
239 lsp::TextDocumentItem {
240 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
241 version: 0,
242 text: "{\"a\": 1}".to_string(),
243 language_id: Default::default()
244 }
245 );
246
247 // This buffer is configured based on the second language server's
248 // capabilities.
249 json_buffer.read_with(cx, |buffer, _| {
250 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
251 });
252
253 // When opening another buffer whose language server is already running,
254 // it is also configured based on the existing language server's capabilities.
255 let rust_buffer2 = project
256 .update(cx, |project, cx| {
257 project.open_local_buffer("/the-root/test2.rs", cx)
258 })
259 .await
260 .unwrap();
261 rust_buffer2.read_with(cx, |buffer, _| {
262 assert_eq!(
263 buffer.completion_triggers(),
264 &[".".to_string(), "::".to_string()]
265 );
266 });
267
268 // Changes are reported only to servers matching the buffer's language.
269 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
270 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidChangeTextDocument>()
274 .await
275 .text_document,
276 lsp::VersionedTextDocumentIdentifier::new(
277 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
278 1
279 )
280 );
281
282 // Save notifications are reported to all servers.
283 toml_buffer
284 .update(cx, |buffer, cx| buffer.save(cx))
285 .await
286 .unwrap();
287 assert_eq!(
288 fake_rust_server
289 .receive_notification::<lsp::notification::DidSaveTextDocument>()
290 .await
291 .text_document,
292 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
293 );
294 assert_eq!(
295 fake_json_server
296 .receive_notification::<lsp::notification::DidSaveTextDocument>()
297 .await
298 .text_document,
299 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
300 );
301
302 // Renames are reported only to servers matching the buffer's language.
303 fs.rename(
304 Path::new("/the-root/test2.rs"),
305 Path::new("/the-root/test3.rs"),
306 Default::default(),
307 )
308 .await
309 .unwrap();
310 assert_eq!(
311 fake_rust_server
312 .receive_notification::<lsp::notification::DidCloseTextDocument>()
313 .await
314 .text_document,
315 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
316 );
317 assert_eq!(
318 fake_rust_server
319 .receive_notification::<lsp::notification::DidOpenTextDocument>()
320 .await
321 .text_document,
322 lsp::TextDocumentItem {
323 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
324 version: 0,
325 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
326 language_id: Default::default()
327 },
328 );
329
330 rust_buffer2.update(cx, |buffer, cx| {
331 buffer.update_diagnostics(
332 DiagnosticSet::from_sorted_entries(
333 vec![DiagnosticEntry {
334 diagnostic: Default::default(),
335 range: Anchor::MIN..Anchor::MAX,
336 }],
337 &buffer.snapshot(),
338 ),
339 cx,
340 );
341 assert_eq!(
342 buffer
343 .snapshot()
344 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
345 .count(),
346 1
347 );
348 });
349
350 // When the rename changes the extension of the file, the buffer gets closed on the old
351 // language server and gets opened on the new one.
352 fs.rename(
353 Path::new("/the-root/test3.rs"),
354 Path::new("/the-root/test3.json"),
355 Default::default(),
356 )
357 .await
358 .unwrap();
359 assert_eq!(
360 fake_rust_server
361 .receive_notification::<lsp::notification::DidCloseTextDocument>()
362 .await
363 .text_document,
364 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
365 );
366 assert_eq!(
367 fake_json_server
368 .receive_notification::<lsp::notification::DidOpenTextDocument>()
369 .await
370 .text_document,
371 lsp::TextDocumentItem {
372 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
373 version: 0,
374 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
375 language_id: Default::default()
376 },
377 );
378
379 // We clear the diagnostics, since the language has changed.
380 rust_buffer2.read_with(cx, |buffer, _| {
381 assert_eq!(
382 buffer
383 .snapshot()
384 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
385 .count(),
386 0
387 );
388 });
389
390 // The renamed file's version resets after changing language server.
391 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
392 assert_eq!(
393 fake_json_server
394 .receive_notification::<lsp::notification::DidChangeTextDocument>()
395 .await
396 .text_document,
397 lsp::VersionedTextDocumentIdentifier::new(
398 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
399 1
400 )
401 );
402
403 // Restart language servers
404 project.update(cx, |project, cx| {
405 project.restart_language_servers_for_buffers(
406 vec![rust_buffer.clone(), json_buffer.clone()],
407 cx,
408 );
409 });
410
411 let mut rust_shutdown_requests = fake_rust_server
412 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
413 let mut json_shutdown_requests = fake_json_server
414 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
415 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
416
417 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
418 let mut fake_json_server = fake_json_servers.next().await.unwrap();
419
420 // Ensure rust document is reopened in new rust language server
421 assert_eq!(
422 fake_rust_server
423 .receive_notification::<lsp::notification::DidOpenTextDocument>()
424 .await
425 .text_document,
426 lsp::TextDocumentItem {
427 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
428 version: 1,
429 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
430 language_id: Default::default()
431 }
432 );
433
434 // Ensure json documents are reopened in new json language server
435 assert_set_eq!(
436 [
437 fake_json_server
438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
439 .await
440 .text_document,
441 fake_json_server
442 .receive_notification::<lsp::notification::DidOpenTextDocument>()
443 .await
444 .text_document,
445 ],
446 [
447 lsp::TextDocumentItem {
448 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
449 version: 0,
450 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
451 language_id: Default::default()
452 },
453 lsp::TextDocumentItem {
454 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
455 version: 1,
456 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
457 language_id: Default::default()
458 }
459 ]
460 );
461
462 // Close notifications are reported only to servers matching the buffer's language.
463 cx.update(|_| drop(json_buffer));
464 let close_message = lsp::DidCloseTextDocumentParams {
465 text_document: lsp::TextDocumentIdentifier::new(
466 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
467 ),
468 };
469 assert_eq!(
470 fake_json_server
471 .receive_notification::<lsp::notification::DidCloseTextDocument>()
472 .await,
473 close_message,
474 );
475}
476
477#[gpui::test]
478async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
479 cx.foreground().forbid_parking();
480
481 let fs = FakeFs::new(cx.background());
482 fs.insert_tree(
483 "/dir",
484 json!({
485 "a.rs": "let a = 1;",
486 "b.rs": "let b = 2;"
487 }),
488 )
489 .await;
490
491 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
492
493 let buffer_a = project
494 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
495 .await
496 .unwrap();
497 let buffer_b = project
498 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
499 .await
500 .unwrap();
501
502 project.update(cx, |project, cx| {
503 project
504 .update_diagnostics(
505 0,
506 lsp::PublishDiagnosticsParams {
507 uri: Url::from_file_path("/dir/a.rs").unwrap(),
508 version: None,
509 diagnostics: vec![lsp::Diagnostic {
510 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
511 severity: Some(lsp::DiagnosticSeverity::ERROR),
512 message: "error 1".to_string(),
513 ..Default::default()
514 }],
515 },
516 &[],
517 cx,
518 )
519 .unwrap();
520 project
521 .update_diagnostics(
522 0,
523 lsp::PublishDiagnosticsParams {
524 uri: Url::from_file_path("/dir/b.rs").unwrap(),
525 version: None,
526 diagnostics: vec![lsp::Diagnostic {
527 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
528 severity: Some(lsp::DiagnosticSeverity::WARNING),
529 message: "error 2".to_string(),
530 ..Default::default()
531 }],
532 },
533 &[],
534 cx,
535 )
536 .unwrap();
537 });
538
539 buffer_a.read_with(cx, |buffer, _| {
540 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
541 assert_eq!(
542 chunks
543 .iter()
544 .map(|(s, d)| (s.as_str(), *d))
545 .collect::<Vec<_>>(),
546 &[
547 ("let ", None),
548 ("a", Some(DiagnosticSeverity::ERROR)),
549 (" = 1;", None),
550 ]
551 );
552 });
553 buffer_b.read_with(cx, |buffer, _| {
554 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
555 assert_eq!(
556 chunks
557 .iter()
558 .map(|(s, d)| (s.as_str(), *d))
559 .collect::<Vec<_>>(),
560 &[
561 ("let ", None),
562 ("b", Some(DiagnosticSeverity::WARNING)),
563 (" = 2;", None),
564 ]
565 );
566 });
567}
568
569#[gpui::test]
570async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
571 cx.foreground().forbid_parking();
572
573 let fs = FakeFs::new(cx.background());
574 fs.insert_tree(
575 "/root",
576 json!({
577 "dir": {
578 "a.rs": "let a = 1;",
579 },
580 "other.rs": "let b = c;"
581 }),
582 )
583 .await;
584
585 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
586
587 let (worktree, _) = project
588 .update(cx, |project, cx| {
589 project.find_or_create_local_worktree("/root/other.rs", false, cx)
590 })
591 .await
592 .unwrap();
593 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
594
595 project.update(cx, |project, cx| {
596 project
597 .update_diagnostics(
598 0,
599 lsp::PublishDiagnosticsParams {
600 uri: Url::from_file_path("/root/other.rs").unwrap(),
601 version: None,
602 diagnostics: vec![lsp::Diagnostic {
603 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
604 severity: Some(lsp::DiagnosticSeverity::ERROR),
605 message: "unknown variable 'c'".to_string(),
606 ..Default::default()
607 }],
608 },
609 &[],
610 cx,
611 )
612 .unwrap();
613 });
614
615 let buffer = project
616 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
617 .await
618 .unwrap();
619 buffer.read_with(cx, |buffer, _| {
620 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
621 assert_eq!(
622 chunks
623 .iter()
624 .map(|(s, d)| (s.as_str(), *d))
625 .collect::<Vec<_>>(),
626 &[
627 ("let b = ", None),
628 ("c", Some(DiagnosticSeverity::ERROR)),
629 (";", None),
630 ]
631 );
632 });
633
634 project.read_with(cx, |project, cx| {
635 assert_eq!(project.diagnostic_summaries(cx).next(), None);
636 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
637 });
638}
639
640#[gpui::test]
641async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
642 cx.foreground().forbid_parking();
643
644 let progress_token = "the-progress-token";
645 let mut language = Language::new(
646 LanguageConfig {
647 name: "Rust".into(),
648 path_suffixes: vec!["rs".to_string()],
649 ..Default::default()
650 },
651 Some(tree_sitter_rust::language()),
652 );
653 let mut fake_servers = language
654 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
655 disk_based_diagnostics_progress_token: Some(progress_token.into()),
656 disk_based_diagnostics_sources: vec!["disk".into()],
657 ..Default::default()
658 }))
659 .await;
660
661 let fs = FakeFs::new(cx.background());
662 fs.insert_tree(
663 "/dir",
664 json!({
665 "a.rs": "fn a() { A }",
666 "b.rs": "const y: i32 = 1",
667 }),
668 )
669 .await;
670
671 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
672 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
673 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
674
675 // Cause worktree to start the fake language server
676 let _buffer = project
677 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
678 .await
679 .unwrap();
680
681 let mut events = subscribe(&project, cx);
682
683 let fake_server = fake_servers.next().await.unwrap();
684 fake_server.start_progress(progress_token).await;
685 assert_eq!(
686 events.next().await.unwrap(),
687 Event::DiskBasedDiagnosticsStarted {
688 language_server_id: 0,
689 }
690 );
691
692 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
693 uri: Url::from_file_path("/dir/a.rs").unwrap(),
694 version: None,
695 diagnostics: vec![lsp::Diagnostic {
696 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
697 severity: Some(lsp::DiagnosticSeverity::ERROR),
698 message: "undefined variable 'A'".to_string(),
699 ..Default::default()
700 }],
701 });
702 assert_eq!(
703 events.next().await.unwrap(),
704 Event::DiagnosticsUpdated {
705 language_server_id: 0,
706 path: (worktree_id, Path::new("a.rs")).into()
707 }
708 );
709
710 fake_server.end_progress(progress_token);
711 assert_eq!(
712 events.next().await.unwrap(),
713 Event::DiskBasedDiagnosticsFinished {
714 language_server_id: 0
715 }
716 );
717
718 let buffer = project
719 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
720 .await
721 .unwrap();
722
723 buffer.read_with(cx, |buffer, _| {
724 let snapshot = buffer.snapshot();
725 let diagnostics = snapshot
726 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
727 .collect::<Vec<_>>();
728 assert_eq!(
729 diagnostics,
730 &[DiagnosticEntry {
731 range: Point::new(0, 9)..Point::new(0, 10),
732 diagnostic: Diagnostic {
733 severity: lsp::DiagnosticSeverity::ERROR,
734 message: "undefined variable 'A'".to_string(),
735 group_id: 0,
736 is_primary: true,
737 ..Default::default()
738 }
739 }]
740 )
741 });
742
743 // Ensure publishing empty diagnostics twice only results in one update event.
744 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
745 uri: Url::from_file_path("/dir/a.rs").unwrap(),
746 version: None,
747 diagnostics: Default::default(),
748 });
749 assert_eq!(
750 events.next().await.unwrap(),
751 Event::DiagnosticsUpdated {
752 language_server_id: 0,
753 path: (worktree_id, Path::new("a.rs")).into()
754 }
755 );
756
757 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
758 uri: Url::from_file_path("/dir/a.rs").unwrap(),
759 version: None,
760 diagnostics: Default::default(),
761 });
762 cx.foreground().run_until_parked();
763 assert_eq!(futures::poll!(events.next()), Poll::Pending);
764}
765
766#[gpui::test]
767async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
768 cx.foreground().forbid_parking();
769
770 let progress_token = "the-progress-token";
771 let mut language = Language::new(
772 LanguageConfig {
773 path_suffixes: vec!["rs".to_string()],
774 ..Default::default()
775 },
776 None,
777 );
778 let mut fake_servers = language
779 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
780 disk_based_diagnostics_sources: vec!["disk".into()],
781 disk_based_diagnostics_progress_token: Some(progress_token.into()),
782 ..Default::default()
783 }))
784 .await;
785
786 let fs = FakeFs::new(cx.background());
787 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
788
789 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
790 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
791
792 let buffer = project
793 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
794 .await
795 .unwrap();
796
797 // Simulate diagnostics starting to update.
798 let fake_server = fake_servers.next().await.unwrap();
799 fake_server.start_progress(progress_token).await;
800
801 // Restart the server before the diagnostics finish updating.
802 project.update(cx, |project, cx| {
803 project.restart_language_servers_for_buffers([buffer], cx);
804 });
805 let mut events = subscribe(&project, cx);
806
807 // Simulate the newly started server sending more diagnostics.
808 let fake_server = fake_servers.next().await.unwrap();
809 fake_server.start_progress(progress_token).await;
810 assert_eq!(
811 events.next().await.unwrap(),
812 Event::DiskBasedDiagnosticsStarted {
813 language_server_id: 1
814 }
815 );
816 project.read_with(cx, |project, _| {
817 assert_eq!(
818 project
819 .language_servers_running_disk_based_diagnostics()
820 .collect::<Vec<_>>(),
821 [1]
822 );
823 });
824
825 // All diagnostics are considered done, despite the old server's diagnostic
826 // task never completing.
827 fake_server.end_progress(progress_token);
828 assert_eq!(
829 events.next().await.unwrap(),
830 Event::DiskBasedDiagnosticsFinished {
831 language_server_id: 1
832 }
833 );
834 project.read_with(cx, |project, _| {
835 assert_eq!(
836 project
837 .language_servers_running_disk_based_diagnostics()
838 .collect::<Vec<_>>(),
839 [0; 0]
840 );
841 });
842}
843
844#[gpui::test]
845async fn test_toggling_enable_language_server(
846 deterministic: Arc<Deterministic>,
847 cx: &mut gpui::TestAppContext,
848) {
849 deterministic.forbid_parking();
850
851 let mut rust = Language::new(
852 LanguageConfig {
853 name: Arc::from("Rust"),
854 path_suffixes: vec!["rs".to_string()],
855 ..Default::default()
856 },
857 None,
858 );
859 let mut fake_rust_servers = rust
860 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
861 name: "rust-lsp",
862 ..Default::default()
863 }))
864 .await;
865 let mut js = Language::new(
866 LanguageConfig {
867 name: Arc::from("JavaScript"),
868 path_suffixes: vec!["js".to_string()],
869 ..Default::default()
870 },
871 None,
872 );
873 let mut fake_js_servers = js
874 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
875 name: "js-lsp",
876 ..Default::default()
877 }))
878 .await;
879
880 let fs = FakeFs::new(cx.background());
881 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
882 .await;
883
884 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
885 project.update(cx, |project, _| {
886 project.languages.add(Arc::new(rust));
887 project.languages.add(Arc::new(js));
888 });
889
890 let _rs_buffer = project
891 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
892 .await
893 .unwrap();
894 let _js_buffer = project
895 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
896 .await
897 .unwrap();
898
899 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
900 assert_eq!(
901 fake_rust_server_1
902 .receive_notification::<lsp::notification::DidOpenTextDocument>()
903 .await
904 .text_document
905 .uri
906 .as_str(),
907 "file:///dir/a.rs"
908 );
909
910 let mut fake_js_server = fake_js_servers.next().await.unwrap();
911 assert_eq!(
912 fake_js_server
913 .receive_notification::<lsp::notification::DidOpenTextDocument>()
914 .await
915 .text_document
916 .uri
917 .as_str(),
918 "file:///dir/b.js"
919 );
920
921 // Disable Rust language server, ensuring only that server gets stopped.
922 cx.update(|cx| {
923 cx.update_global(|settings: &mut Settings, _| {
924 settings.language_overrides.insert(
925 Arc::from("Rust"),
926 settings::LanguageSettings {
927 enable_language_server: Some(false),
928 ..Default::default()
929 },
930 );
931 })
932 });
933 fake_rust_server_1
934 .receive_notification::<lsp::notification::Exit>()
935 .await;
936
937 // Enable Rust and disable JavaScript language servers, ensuring that the
938 // former gets started again and that the latter stops.
939 cx.update(|cx| {
940 cx.update_global(|settings: &mut Settings, _| {
941 settings.language_overrides.insert(
942 Arc::from("Rust"),
943 settings::LanguageSettings {
944 enable_language_server: Some(true),
945 ..Default::default()
946 },
947 );
948 settings.language_overrides.insert(
949 Arc::from("JavaScript"),
950 settings::LanguageSettings {
951 enable_language_server: Some(false),
952 ..Default::default()
953 },
954 );
955 })
956 });
957 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
958 assert_eq!(
959 fake_rust_server_2
960 .receive_notification::<lsp::notification::DidOpenTextDocument>()
961 .await
962 .text_document
963 .uri
964 .as_str(),
965 "file:///dir/a.rs"
966 );
967 fake_js_server
968 .receive_notification::<lsp::notification::Exit>()
969 .await;
970}
971
972#[gpui::test]
973async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
974 cx.foreground().forbid_parking();
975
976 let mut language = Language::new(
977 LanguageConfig {
978 name: "Rust".into(),
979 path_suffixes: vec!["rs".to_string()],
980 ..Default::default()
981 },
982 Some(tree_sitter_rust::language()),
983 );
984 let mut fake_servers = language
985 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
986 disk_based_diagnostics_sources: vec!["disk".into()],
987 ..Default::default()
988 }))
989 .await;
990
991 let text = "
992 fn a() { A }
993 fn b() { BB }
994 fn c() { CCC }
995 "
996 .unindent();
997
998 let fs = FakeFs::new(cx.background());
999 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1000
1001 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1002 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1003
1004 let buffer = project
1005 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1006 .await
1007 .unwrap();
1008
1009 let mut fake_server = fake_servers.next().await.unwrap();
1010 let open_notification = fake_server
1011 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1012 .await;
1013
1014 // Edit the buffer, moving the content down
1015 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
1016 let change_notification_1 = fake_server
1017 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1018 .await;
1019 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1020
1021 // Report some diagnostics for the initial version of the buffer
1022 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1023 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1024 version: Some(open_notification.text_document.version),
1025 diagnostics: vec![
1026 lsp::Diagnostic {
1027 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1028 severity: Some(DiagnosticSeverity::ERROR),
1029 message: "undefined variable 'A'".to_string(),
1030 source: Some("disk".to_string()),
1031 ..Default::default()
1032 },
1033 lsp::Diagnostic {
1034 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1035 severity: Some(DiagnosticSeverity::ERROR),
1036 message: "undefined variable 'BB'".to_string(),
1037 source: Some("disk".to_string()),
1038 ..Default::default()
1039 },
1040 lsp::Diagnostic {
1041 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1042 severity: Some(DiagnosticSeverity::ERROR),
1043 source: Some("disk".to_string()),
1044 message: "undefined variable 'CCC'".to_string(),
1045 ..Default::default()
1046 },
1047 ],
1048 });
1049
1050 // The diagnostics have moved down since they were created.
1051 buffer.next_notification(cx).await;
1052 buffer.read_with(cx, |buffer, _| {
1053 assert_eq!(
1054 buffer
1055 .snapshot()
1056 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1057 .collect::<Vec<_>>(),
1058 &[
1059 DiagnosticEntry {
1060 range: Point::new(3, 9)..Point::new(3, 11),
1061 diagnostic: Diagnostic {
1062 severity: DiagnosticSeverity::ERROR,
1063 message: "undefined variable 'BB'".to_string(),
1064 is_disk_based: true,
1065 group_id: 1,
1066 is_primary: true,
1067 ..Default::default()
1068 },
1069 },
1070 DiagnosticEntry {
1071 range: Point::new(4, 9)..Point::new(4, 12),
1072 diagnostic: Diagnostic {
1073 severity: DiagnosticSeverity::ERROR,
1074 message: "undefined variable 'CCC'".to_string(),
1075 is_disk_based: true,
1076 group_id: 2,
1077 is_primary: true,
1078 ..Default::default()
1079 }
1080 }
1081 ]
1082 );
1083 assert_eq!(
1084 chunks_with_diagnostics(buffer, 0..buffer.len()),
1085 [
1086 ("\n\nfn a() { ".to_string(), None),
1087 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1088 (" }\nfn b() { ".to_string(), None),
1089 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1090 (" }\nfn c() { ".to_string(), None),
1091 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1092 (" }\n".to_string(), None),
1093 ]
1094 );
1095 assert_eq!(
1096 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1097 [
1098 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1099 (" }\nfn c() { ".to_string(), None),
1100 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1101 ]
1102 );
1103 });
1104
1105 // Ensure overlapping diagnostics are highlighted correctly.
1106 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1107 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1108 version: Some(open_notification.text_document.version),
1109 diagnostics: vec![
1110 lsp::Diagnostic {
1111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1112 severity: Some(DiagnosticSeverity::ERROR),
1113 message: "undefined variable 'A'".to_string(),
1114 source: Some("disk".to_string()),
1115 ..Default::default()
1116 },
1117 lsp::Diagnostic {
1118 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1119 severity: Some(DiagnosticSeverity::WARNING),
1120 message: "unreachable statement".to_string(),
1121 source: Some("disk".to_string()),
1122 ..Default::default()
1123 },
1124 ],
1125 });
1126
1127 buffer.next_notification(cx).await;
1128 buffer.read_with(cx, |buffer, _| {
1129 assert_eq!(
1130 buffer
1131 .snapshot()
1132 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1133 .collect::<Vec<_>>(),
1134 &[
1135 DiagnosticEntry {
1136 range: Point::new(2, 9)..Point::new(2, 12),
1137 diagnostic: Diagnostic {
1138 severity: DiagnosticSeverity::WARNING,
1139 message: "unreachable statement".to_string(),
1140 is_disk_based: true,
1141 group_id: 4,
1142 is_primary: true,
1143 ..Default::default()
1144 }
1145 },
1146 DiagnosticEntry {
1147 range: Point::new(2, 9)..Point::new(2, 10),
1148 diagnostic: Diagnostic {
1149 severity: DiagnosticSeverity::ERROR,
1150 message: "undefined variable 'A'".to_string(),
1151 is_disk_based: true,
1152 group_id: 3,
1153 is_primary: true,
1154 ..Default::default()
1155 },
1156 }
1157 ]
1158 );
1159 assert_eq!(
1160 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1161 [
1162 ("fn a() { ".to_string(), None),
1163 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1164 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1165 ("\n".to_string(), None),
1166 ]
1167 );
1168 assert_eq!(
1169 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1170 [
1171 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1172 ("\n".to_string(), None),
1173 ]
1174 );
1175 });
1176
1177 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1178 // changes since the last save.
1179 buffer.update(cx, |buffer, cx| {
1180 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1181 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1182 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1183 });
1184 let change_notification_2 = fake_server
1185 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1186 .await;
1187 assert!(
1188 change_notification_2.text_document.version > change_notification_1.text_document.version
1189 );
1190
1191 // Handle out-of-order diagnostics
1192 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1193 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1194 version: Some(change_notification_2.text_document.version),
1195 diagnostics: vec![
1196 lsp::Diagnostic {
1197 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1198 severity: Some(DiagnosticSeverity::ERROR),
1199 message: "undefined variable 'BB'".to_string(),
1200 source: Some("disk".to_string()),
1201 ..Default::default()
1202 },
1203 lsp::Diagnostic {
1204 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1205 severity: Some(DiagnosticSeverity::WARNING),
1206 message: "undefined variable 'A'".to_string(),
1207 source: Some("disk".to_string()),
1208 ..Default::default()
1209 },
1210 ],
1211 });
1212
1213 buffer.next_notification(cx).await;
1214 buffer.read_with(cx, |buffer, _| {
1215 assert_eq!(
1216 buffer
1217 .snapshot()
1218 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1219 .collect::<Vec<_>>(),
1220 &[
1221 DiagnosticEntry {
1222 range: Point::new(2, 21)..Point::new(2, 22),
1223 diagnostic: Diagnostic {
1224 severity: DiagnosticSeverity::WARNING,
1225 message: "undefined variable 'A'".to_string(),
1226 is_disk_based: true,
1227 group_id: 6,
1228 is_primary: true,
1229 ..Default::default()
1230 }
1231 },
1232 DiagnosticEntry {
1233 range: Point::new(3, 9)..Point::new(3, 14),
1234 diagnostic: Diagnostic {
1235 severity: DiagnosticSeverity::ERROR,
1236 message: "undefined variable 'BB'".to_string(),
1237 is_disk_based: true,
1238 group_id: 5,
1239 is_primary: true,
1240 ..Default::default()
1241 },
1242 }
1243 ]
1244 );
1245 });
1246}
1247
1248#[gpui::test]
1249async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1250 cx.foreground().forbid_parking();
1251
1252 let text = concat!(
1253 "let one = ;\n", //
1254 "let two = \n",
1255 "let three = 3;\n",
1256 );
1257
1258 let fs = FakeFs::new(cx.background());
1259 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1260
1261 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1262 let buffer = project
1263 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1264 .await
1265 .unwrap();
1266
1267 project.update(cx, |project, cx| {
1268 project
1269 .update_buffer_diagnostics(
1270 &buffer,
1271 vec![
1272 DiagnosticEntry {
1273 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1274 diagnostic: Diagnostic {
1275 severity: DiagnosticSeverity::ERROR,
1276 message: "syntax error 1".to_string(),
1277 ..Default::default()
1278 },
1279 },
1280 DiagnosticEntry {
1281 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1282 diagnostic: Diagnostic {
1283 severity: DiagnosticSeverity::ERROR,
1284 message: "syntax error 2".to_string(),
1285 ..Default::default()
1286 },
1287 },
1288 ],
1289 None,
1290 cx,
1291 )
1292 .unwrap();
1293 });
1294
1295 // An empty range is extended forward to include the following character.
1296 // At the end of a line, an empty range is extended backward to include
1297 // the preceding character.
1298 buffer.read_with(cx, |buffer, _| {
1299 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1300 assert_eq!(
1301 chunks
1302 .iter()
1303 .map(|(s, d)| (s.as_str(), *d))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("let one = ", None),
1307 (";", Some(DiagnosticSeverity::ERROR)),
1308 ("\nlet two =", None),
1309 (" ", Some(DiagnosticSeverity::ERROR)),
1310 ("\nlet three = 3;\n", None)
1311 ]
1312 );
1313 });
1314}
1315
1316#[gpui::test]
1317async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1318 cx.foreground().forbid_parking();
1319
1320 let mut language = Language::new(
1321 LanguageConfig {
1322 name: "Rust".into(),
1323 path_suffixes: vec!["rs".to_string()],
1324 ..Default::default()
1325 },
1326 Some(tree_sitter_rust::language()),
1327 );
1328 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1329
1330 let text = "
1331 fn a() {
1332 f1();
1333 }
1334 fn b() {
1335 f2();
1336 }
1337 fn c() {
1338 f3();
1339 }
1340 "
1341 .unindent();
1342
1343 let fs = FakeFs::new(cx.background());
1344 fs.insert_tree(
1345 "/dir",
1346 json!({
1347 "a.rs": text.clone(),
1348 }),
1349 )
1350 .await;
1351
1352 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1353 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1354 let buffer = project
1355 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1356 .await
1357 .unwrap();
1358
1359 let mut fake_server = fake_servers.next().await.unwrap();
1360 let lsp_document_version = fake_server
1361 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1362 .await
1363 .text_document
1364 .version;
1365
1366 // Simulate editing the buffer after the language server computes some edits.
1367 buffer.update(cx, |buffer, cx| {
1368 buffer.edit(
1369 [(
1370 Point::new(0, 0)..Point::new(0, 0),
1371 "// above first function\n",
1372 )],
1373 cx,
1374 );
1375 buffer.edit(
1376 [(
1377 Point::new(2, 0)..Point::new(2, 0),
1378 " // inside first function\n",
1379 )],
1380 cx,
1381 );
1382 buffer.edit(
1383 [(
1384 Point::new(6, 4)..Point::new(6, 4),
1385 "// inside second function ",
1386 )],
1387 cx,
1388 );
1389
1390 assert_eq!(
1391 buffer.text(),
1392 "
1393 // above first function
1394 fn a() {
1395 // inside first function
1396 f1();
1397 }
1398 fn b() {
1399 // inside second function f2();
1400 }
1401 fn c() {
1402 f3();
1403 }
1404 "
1405 .unindent()
1406 );
1407 });
1408
1409 let edits = project
1410 .update(cx, |project, cx| {
1411 project.edits_from_lsp(
1412 &buffer,
1413 vec![
1414 // replace body of first function
1415 lsp::TextEdit {
1416 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1417 new_text: "
1418 fn a() {
1419 f10();
1420 }
1421 "
1422 .unindent(),
1423 },
1424 // edit inside second function
1425 lsp::TextEdit {
1426 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1427 new_text: "00".into(),
1428 },
1429 // edit inside third function via two distinct edits
1430 lsp::TextEdit {
1431 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1432 new_text: "4000".into(),
1433 },
1434 lsp::TextEdit {
1435 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1436 new_text: "".into(),
1437 },
1438 ],
1439 Some(lsp_document_version),
1440 cx,
1441 )
1442 })
1443 .await
1444 .unwrap();
1445
1446 buffer.update(cx, |buffer, cx| {
1447 for (range, new_text) in edits {
1448 buffer.edit([(range, new_text)], cx);
1449 }
1450 assert_eq!(
1451 buffer.text(),
1452 "
1453 // above first function
1454 fn a() {
1455 // inside first function
1456 f10();
1457 }
1458 fn b() {
1459 // inside second function f200();
1460 }
1461 fn c() {
1462 f4000();
1463 }
1464 "
1465 .unindent()
1466 );
1467 });
1468}
1469
1470#[gpui::test]
1471async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1472 cx.foreground().forbid_parking();
1473
1474 let text = "
1475 use a::b;
1476 use a::c;
1477
1478 fn f() {
1479 b();
1480 c();
1481 }
1482 "
1483 .unindent();
1484
1485 let fs = FakeFs::new(cx.background());
1486 fs.insert_tree(
1487 "/dir",
1488 json!({
1489 "a.rs": text.clone(),
1490 }),
1491 )
1492 .await;
1493
1494 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1495 let buffer = project
1496 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1497 .await
1498 .unwrap();
1499
1500 // Simulate the language server sending us a small edit in the form of a very large diff.
1501 // Rust-analyzer does this when performing a merge-imports code action.
1502 let edits = project
1503 .update(cx, |project, cx| {
1504 project.edits_from_lsp(
1505 &buffer,
1506 [
1507 // Replace the first use statement without editing the semicolon.
1508 lsp::TextEdit {
1509 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1510 new_text: "a::{b, c}".into(),
1511 },
1512 // Reinsert the remainder of the file between the semicolon and the final
1513 // newline of the file.
1514 lsp::TextEdit {
1515 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1516 new_text: "\n\n".into(),
1517 },
1518 lsp::TextEdit {
1519 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1520 new_text: "
1521 fn f() {
1522 b();
1523 c();
1524 }"
1525 .unindent(),
1526 },
1527 // Delete everything after the first newline of the file.
1528 lsp::TextEdit {
1529 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1530 new_text: "".into(),
1531 },
1532 ],
1533 None,
1534 cx,
1535 )
1536 })
1537 .await
1538 .unwrap();
1539
1540 buffer.update(cx, |buffer, cx| {
1541 let edits = edits
1542 .into_iter()
1543 .map(|(range, text)| {
1544 (
1545 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1546 text,
1547 )
1548 })
1549 .collect::<Vec<_>>();
1550
1551 assert_eq!(
1552 edits,
1553 [
1554 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1555 (Point::new(1, 0)..Point::new(2, 0), "".into())
1556 ]
1557 );
1558
1559 for (range, new_text) in edits {
1560 buffer.edit([(range, new_text)], cx);
1561 }
1562 assert_eq!(
1563 buffer.text(),
1564 "
1565 use a::{b, c};
1566
1567 fn f() {
1568 b();
1569 c();
1570 }
1571 "
1572 .unindent()
1573 );
1574 });
1575}
1576
1577#[gpui::test]
1578async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1579 cx.foreground().forbid_parking();
1580
1581 let text = "
1582 use a::b;
1583 use a::c;
1584
1585 fn f() {
1586 b();
1587 c();
1588 }
1589 "
1590 .unindent();
1591
1592 let fs = FakeFs::new(cx.background());
1593 fs.insert_tree(
1594 "/dir",
1595 json!({
1596 "a.rs": text.clone(),
1597 }),
1598 )
1599 .await;
1600
1601 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1602 let buffer = project
1603 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1604 .await
1605 .unwrap();
1606
1607 // Simulate the language server sending us edits in a non-ordered fashion,
1608 // with ranges sometimes being inverted.
1609 let edits = project
1610 .update(cx, |project, cx| {
1611 project.edits_from_lsp(
1612 &buffer,
1613 [
1614 lsp::TextEdit {
1615 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1616 new_text: "\n\n".into(),
1617 },
1618 lsp::TextEdit {
1619 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1620 new_text: "a::{b, c}".into(),
1621 },
1622 lsp::TextEdit {
1623 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1624 new_text: "".into(),
1625 },
1626 lsp::TextEdit {
1627 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1628 new_text: "
1629 fn f() {
1630 b();
1631 c();
1632 }"
1633 .unindent(),
1634 },
1635 ],
1636 None,
1637 cx,
1638 )
1639 })
1640 .await
1641 .unwrap();
1642
1643 buffer.update(cx, |buffer, cx| {
1644 let edits = edits
1645 .into_iter()
1646 .map(|(range, text)| {
1647 (
1648 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1649 text,
1650 )
1651 })
1652 .collect::<Vec<_>>();
1653
1654 assert_eq!(
1655 edits,
1656 [
1657 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1658 (Point::new(1, 0)..Point::new(2, 0), "".into())
1659 ]
1660 );
1661
1662 for (range, new_text) in edits {
1663 buffer.edit([(range, new_text)], cx);
1664 }
1665 assert_eq!(
1666 buffer.text(),
1667 "
1668 use a::{b, c};
1669
1670 fn f() {
1671 b();
1672 c();
1673 }
1674 "
1675 .unindent()
1676 );
1677 });
1678}
1679
1680fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1681 buffer: &Buffer,
1682 range: Range<T>,
1683) -> Vec<(String, Option<DiagnosticSeverity>)> {
1684 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1685 for chunk in buffer.snapshot().chunks(range, true) {
1686 if chunks.last().map_or(false, |prev_chunk| {
1687 prev_chunk.1 == chunk.diagnostic_severity
1688 }) {
1689 chunks.last_mut().unwrap().0.push_str(chunk.text);
1690 } else {
1691 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1692 }
1693 }
1694 chunks
1695}
1696
1697#[gpui::test]
1698async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
1699 let dir = temp_tree(json!({
1700 "root": {
1701 "dir1": {},
1702 "dir2": {
1703 "dir3": {}
1704 }
1705 }
1706 }));
1707
1708 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
1709 let cancel_flag = Default::default();
1710 let results = project
1711 .read_with(cx, |project, cx| {
1712 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
1713 })
1714 .await;
1715
1716 assert!(results.is_empty());
1717}
1718
1719#[gpui::test(iterations = 10)]
1720async fn test_definition(cx: &mut gpui::TestAppContext) {
1721 let mut language = Language::new(
1722 LanguageConfig {
1723 name: "Rust".into(),
1724 path_suffixes: vec!["rs".to_string()],
1725 ..Default::default()
1726 },
1727 Some(tree_sitter_rust::language()),
1728 );
1729 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1730
1731 let fs = FakeFs::new(cx.background());
1732 fs.insert_tree(
1733 "/dir",
1734 json!({
1735 "a.rs": "const fn a() { A }",
1736 "b.rs": "const y: i32 = crate::a()",
1737 }),
1738 )
1739 .await;
1740
1741 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1742 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1743
1744 let buffer = project
1745 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1746 .await
1747 .unwrap();
1748
1749 let fake_server = fake_servers.next().await.unwrap();
1750 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1751 let params = params.text_document_position_params;
1752 assert_eq!(
1753 params.text_document.uri.to_file_path().unwrap(),
1754 Path::new("/dir/b.rs"),
1755 );
1756 assert_eq!(params.position, lsp::Position::new(0, 22));
1757
1758 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1759 lsp::Location::new(
1760 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1761 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1762 ),
1763 )))
1764 });
1765
1766 let mut definitions = project
1767 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1768 .await
1769 .unwrap();
1770
1771 // Assert no new language server started
1772 cx.foreground().run_until_parked();
1773 assert!(fake_servers.try_next().is_err());
1774
1775 assert_eq!(definitions.len(), 1);
1776 let definition = definitions.pop().unwrap();
1777 cx.update(|cx| {
1778 let target_buffer = definition.target.buffer.read(cx);
1779 assert_eq!(
1780 target_buffer
1781 .file()
1782 .unwrap()
1783 .as_local()
1784 .unwrap()
1785 .abs_path(cx),
1786 Path::new("/dir/a.rs"),
1787 );
1788 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1789 assert_eq!(
1790 list_worktrees(&project, cx),
1791 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1792 );
1793
1794 drop(definition);
1795 });
1796 cx.read(|cx| {
1797 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1798 });
1799
1800 fn list_worktrees<'a>(
1801 project: &'a ModelHandle<Project>,
1802 cx: &'a AppContext,
1803 ) -> Vec<(&'a Path, bool)> {
1804 project
1805 .read(cx)
1806 .worktrees(cx)
1807 .map(|worktree| {
1808 let worktree = worktree.read(cx);
1809 (
1810 worktree.as_local().unwrap().abs_path().as_ref(),
1811 worktree.is_visible(),
1812 )
1813 })
1814 .collect::<Vec<_>>()
1815 }
1816}
1817
1818#[gpui::test]
1819async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1820 let mut language = Language::new(
1821 LanguageConfig {
1822 name: "TypeScript".into(),
1823 path_suffixes: vec!["ts".to_string()],
1824 ..Default::default()
1825 },
1826 Some(tree_sitter_typescript::language_typescript()),
1827 );
1828 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1829
1830 let fs = FakeFs::new(cx.background());
1831 fs.insert_tree(
1832 "/dir",
1833 json!({
1834 "a.ts": "",
1835 }),
1836 )
1837 .await;
1838
1839 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1840 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1841 let buffer = project
1842 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1843 .await
1844 .unwrap();
1845
1846 let fake_server = fake_language_servers.next().await.unwrap();
1847
1848 let text = "let a = b.fqn";
1849 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1850 let completions = project.update(cx, |project, cx| {
1851 project.completions(&buffer, text.len(), cx)
1852 });
1853
1854 fake_server
1855 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1856 Ok(Some(lsp::CompletionResponse::Array(vec![
1857 lsp::CompletionItem {
1858 label: "fullyQualifiedName?".into(),
1859 insert_text: Some("fullyQualifiedName".into()),
1860 ..Default::default()
1861 },
1862 ])))
1863 })
1864 .next()
1865 .await;
1866 let completions = completions.await.unwrap();
1867 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1868 assert_eq!(completions.len(), 1);
1869 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1870 assert_eq!(
1871 completions[0].old_range.to_offset(&snapshot),
1872 text.len() - 3..text.len()
1873 );
1874
1875 let text = "let a = \"atoms/cmp\"";
1876 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1877 let completions = project.update(cx, |project, cx| {
1878 project.completions(&buffer, text.len() - 1, cx)
1879 });
1880
1881 fake_server
1882 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1883 Ok(Some(lsp::CompletionResponse::Array(vec![
1884 lsp::CompletionItem {
1885 label: "component".into(),
1886 ..Default::default()
1887 },
1888 ])))
1889 })
1890 .next()
1891 .await;
1892 let completions = completions.await.unwrap();
1893 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1894 assert_eq!(completions.len(), 1);
1895 assert_eq!(completions[0].new_text, "component");
1896 assert_eq!(
1897 completions[0].old_range.to_offset(&snapshot),
1898 text.len() - 4..text.len() - 1
1899 );
1900}
1901
1902#[gpui::test]
1903async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1904 let mut language = Language::new(
1905 LanguageConfig {
1906 name: "TypeScript".into(),
1907 path_suffixes: vec!["ts".to_string()],
1908 ..Default::default()
1909 },
1910 Some(tree_sitter_typescript::language_typescript()),
1911 );
1912 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1913
1914 let fs = FakeFs::new(cx.background());
1915 fs.insert_tree(
1916 "/dir",
1917 json!({
1918 "a.ts": "",
1919 }),
1920 )
1921 .await;
1922
1923 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1924 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1925 let buffer = project
1926 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1927 .await
1928 .unwrap();
1929
1930 let fake_server = fake_language_servers.next().await.unwrap();
1931
1932 let text = "let a = b.fqn";
1933 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1934 let completions = project.update(cx, |project, cx| {
1935 project.completions(&buffer, text.len(), cx)
1936 });
1937
1938 fake_server
1939 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1940 Ok(Some(lsp::CompletionResponse::Array(vec![
1941 lsp::CompletionItem {
1942 label: "fullyQualifiedName?".into(),
1943 insert_text: Some("fully\rQualified\r\nName".into()),
1944 ..Default::default()
1945 },
1946 ])))
1947 })
1948 .next()
1949 .await;
1950 let completions = completions.await.unwrap();
1951 assert_eq!(completions.len(), 1);
1952 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1953}
1954
1955#[gpui::test(iterations = 10)]
1956async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1957 let mut language = Language::new(
1958 LanguageConfig {
1959 name: "TypeScript".into(),
1960 path_suffixes: vec!["ts".to_string()],
1961 ..Default::default()
1962 },
1963 None,
1964 );
1965 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1966
1967 let fs = FakeFs::new(cx.background());
1968 fs.insert_tree(
1969 "/dir",
1970 json!({
1971 "a.ts": "a",
1972 }),
1973 )
1974 .await;
1975
1976 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1977 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1978 let buffer = project
1979 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1980 .await
1981 .unwrap();
1982
1983 let fake_server = fake_language_servers.next().await.unwrap();
1984
1985 // Language server returns code actions that contain commands, and not edits.
1986 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1987 fake_server
1988 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1989 Ok(Some(vec![
1990 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1991 title: "The code action".into(),
1992 command: Some(lsp::Command {
1993 title: "The command".into(),
1994 command: "_the/command".into(),
1995 arguments: Some(vec![json!("the-argument")]),
1996 }),
1997 ..Default::default()
1998 }),
1999 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2000 title: "two".into(),
2001 ..Default::default()
2002 }),
2003 ]))
2004 })
2005 .next()
2006 .await;
2007
2008 let action = actions.await.unwrap()[0].clone();
2009 let apply = project.update(cx, |project, cx| {
2010 project.apply_code_action(buffer.clone(), action, true, cx)
2011 });
2012
2013 // Resolving the code action does not populate its edits. In absence of
2014 // edits, we must execute the given command.
2015 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2016 |action, _| async move { Ok(action) },
2017 );
2018
2019 // While executing the command, the language server sends the editor
2020 // a `workspaceEdit` request.
2021 fake_server
2022 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2023 let fake = fake_server.clone();
2024 move |params, _| {
2025 assert_eq!(params.command, "_the/command");
2026 let fake = fake.clone();
2027 async move {
2028 fake.server
2029 .request::<lsp::request::ApplyWorkspaceEdit>(
2030 lsp::ApplyWorkspaceEditParams {
2031 label: None,
2032 edit: lsp::WorkspaceEdit {
2033 changes: Some(
2034 [(
2035 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2036 vec![lsp::TextEdit {
2037 range: lsp::Range::new(
2038 lsp::Position::new(0, 0),
2039 lsp::Position::new(0, 0),
2040 ),
2041 new_text: "X".into(),
2042 }],
2043 )]
2044 .into_iter()
2045 .collect(),
2046 ),
2047 ..Default::default()
2048 },
2049 },
2050 )
2051 .await
2052 .unwrap();
2053 Ok(Some(json!(null)))
2054 }
2055 }
2056 })
2057 .next()
2058 .await;
2059
2060 // Applying the code action returns a project transaction containing the edits
2061 // sent by the language server in its `workspaceEdit` request.
2062 let transaction = apply.await.unwrap();
2063 assert!(transaction.0.contains_key(&buffer));
2064 buffer.update(cx, |buffer, cx| {
2065 assert_eq!(buffer.text(), "Xa");
2066 buffer.undo(cx);
2067 assert_eq!(buffer.text(), "a");
2068 });
2069}
2070
2071#[gpui::test]
2072async fn test_save_file(cx: &mut gpui::TestAppContext) {
2073 let fs = FakeFs::new(cx.background());
2074 fs.insert_tree(
2075 "/dir",
2076 json!({
2077 "file1": "the old contents",
2078 }),
2079 )
2080 .await;
2081
2082 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2083 let buffer = project
2084 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2085 .await
2086 .unwrap();
2087 buffer
2088 .update(cx, |buffer, cx| {
2089 assert_eq!(buffer.text(), "the old contents");
2090 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2091 buffer.save(cx)
2092 })
2093 .await
2094 .unwrap();
2095
2096 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2097 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2098}
2099
2100#[gpui::test]
2101async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2102 let fs = FakeFs::new(cx.background());
2103 fs.insert_tree(
2104 "/dir",
2105 json!({
2106 "file1": "the old contents",
2107 }),
2108 )
2109 .await;
2110
2111 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2112 let buffer = project
2113 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2114 .await
2115 .unwrap();
2116 buffer
2117 .update(cx, |buffer, cx| {
2118 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2119 buffer.save(cx)
2120 })
2121 .await
2122 .unwrap();
2123
2124 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2125 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2126}
2127
2128#[gpui::test]
2129async fn test_save_as(cx: &mut gpui::TestAppContext) {
2130 let fs = FakeFs::new(cx.background());
2131 fs.insert_tree("/dir", json!({})).await;
2132
2133 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2134 let buffer = project.update(cx, |project, cx| {
2135 project.create_buffer("", None, cx).unwrap()
2136 });
2137 buffer.update(cx, |buffer, cx| {
2138 buffer.edit([(0..0, "abc")], cx);
2139 assert!(buffer.is_dirty());
2140 assert!(!buffer.has_conflict());
2141 });
2142 project
2143 .update(cx, |project, cx| {
2144 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2145 })
2146 .await
2147 .unwrap();
2148 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2149 buffer.read_with(cx, |buffer, cx| {
2150 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2151 assert!(!buffer.is_dirty());
2152 assert!(!buffer.has_conflict());
2153 });
2154
2155 let opened_buffer = project
2156 .update(cx, |project, cx| {
2157 project.open_local_buffer("/dir/file1", cx)
2158 })
2159 .await
2160 .unwrap();
2161 assert_eq!(opened_buffer, buffer);
2162}
2163
2164#[gpui::test(retries = 5)]
2165async fn test_rescan_and_remote_updates(
2166 deterministic: Arc<Deterministic>,
2167 cx: &mut gpui::TestAppContext,
2168) {
2169 let dir = temp_tree(json!({
2170 "a": {
2171 "file1": "",
2172 "file2": "",
2173 "file3": "",
2174 },
2175 "b": {
2176 "c": {
2177 "file4": "",
2178 "file5": "",
2179 }
2180 }
2181 }));
2182
2183 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2184 let rpc = project.read_with(cx, |p, _| p.client.clone());
2185
2186 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2187 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2188 async move { buffer.await.unwrap() }
2189 };
2190 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2191 project.read_with(cx, |project, cx| {
2192 let tree = project.worktrees(cx).next().unwrap();
2193 tree.read(cx)
2194 .entry_for_path(path)
2195 .expect(&format!("no entry for path {}", path))
2196 .id
2197 })
2198 };
2199
2200 let buffer2 = buffer_for_path("a/file2", cx).await;
2201 let buffer3 = buffer_for_path("a/file3", cx).await;
2202 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2203 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2204
2205 let file2_id = id_for_path("a/file2", &cx);
2206 let file3_id = id_for_path("a/file3", &cx);
2207 let file4_id = id_for_path("b/c/file4", &cx);
2208
2209 // Create a remote copy of this worktree.
2210 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2211 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2212 let remote = cx.update(|cx| {
2213 Worktree::remote(
2214 1,
2215 1,
2216 proto::WorktreeMetadata {
2217 id: initial_snapshot.id().to_proto(),
2218 root_name: initial_snapshot.root_name().into(),
2219 visible: true,
2220 },
2221 rpc.clone(),
2222 cx,
2223 )
2224 });
2225 remote.update(cx, |remote, _| {
2226 let update = initial_snapshot.build_initial_update(1);
2227 remote.as_remote_mut().unwrap().update_from_remote(update);
2228 });
2229 deterministic.run_until_parked();
2230
2231 cx.read(|cx| {
2232 assert!(!buffer2.read(cx).is_dirty());
2233 assert!(!buffer3.read(cx).is_dirty());
2234 assert!(!buffer4.read(cx).is_dirty());
2235 assert!(!buffer5.read(cx).is_dirty());
2236 });
2237
2238 // Rename and delete files and directories.
2239 tree.flush_fs_events(&cx).await;
2240 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2241 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2242 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2243 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2244 tree.flush_fs_events(&cx).await;
2245
2246 let expected_paths = vec![
2247 "a",
2248 "a/file1",
2249 "a/file2.new",
2250 "b",
2251 "d",
2252 "d/file3",
2253 "d/file4",
2254 ];
2255
2256 cx.read(|app| {
2257 assert_eq!(
2258 tree.read(app)
2259 .paths()
2260 .map(|p| p.to_str().unwrap())
2261 .collect::<Vec<_>>(),
2262 expected_paths
2263 );
2264
2265 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2266 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2267 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2268
2269 assert_eq!(
2270 buffer2.read(app).file().unwrap().path().as_ref(),
2271 Path::new("a/file2.new")
2272 );
2273 assert_eq!(
2274 buffer3.read(app).file().unwrap().path().as_ref(),
2275 Path::new("d/file3")
2276 );
2277 assert_eq!(
2278 buffer4.read(app).file().unwrap().path().as_ref(),
2279 Path::new("d/file4")
2280 );
2281 assert_eq!(
2282 buffer5.read(app).file().unwrap().path().as_ref(),
2283 Path::new("b/c/file5")
2284 );
2285
2286 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2287 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2288 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2289 assert!(buffer5.read(app).file().unwrap().is_deleted());
2290 });
2291
2292 // Update the remote worktree. Check that it becomes consistent with the
2293 // local worktree.
2294 remote.update(cx, |remote, cx| {
2295 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2296 &initial_snapshot,
2297 1,
2298 1,
2299 true,
2300 );
2301 remote.as_remote_mut().unwrap().update_from_remote(update);
2302 });
2303 deterministic.run_until_parked();
2304 remote.read_with(cx, |remote, _| {
2305 assert_eq!(
2306 remote
2307 .paths()
2308 .map(|p| p.to_str().unwrap())
2309 .collect::<Vec<_>>(),
2310 expected_paths
2311 );
2312 });
2313}
2314
2315#[gpui::test]
2316async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2317 let fs = FakeFs::new(cx.background());
2318 fs.insert_tree(
2319 "/dir",
2320 json!({
2321 "a.txt": "a-contents",
2322 "b.txt": "b-contents",
2323 }),
2324 )
2325 .await;
2326
2327 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2328
2329 // Spawn multiple tasks to open paths, repeating some paths.
2330 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2331 (
2332 p.open_local_buffer("/dir/a.txt", cx),
2333 p.open_local_buffer("/dir/b.txt", cx),
2334 p.open_local_buffer("/dir/a.txt", cx),
2335 )
2336 });
2337
2338 let buffer_a_1 = buffer_a_1.await.unwrap();
2339 let buffer_a_2 = buffer_a_2.await.unwrap();
2340 let buffer_b = buffer_b.await.unwrap();
2341 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2342 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2343
2344 // There is only one buffer per path.
2345 let buffer_a_id = buffer_a_1.id();
2346 assert_eq!(buffer_a_2.id(), buffer_a_id);
2347
2348 // Open the same path again while it is still open.
2349 drop(buffer_a_1);
2350 let buffer_a_3 = project
2351 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2352 .await
2353 .unwrap();
2354
2355 // There's still only one buffer per path.
2356 assert_eq!(buffer_a_3.id(), buffer_a_id);
2357}
2358
2359#[gpui::test]
2360async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2361 let fs = FakeFs::new(cx.background());
2362 fs.insert_tree(
2363 "/dir",
2364 json!({
2365 "file1": "abc",
2366 "file2": "def",
2367 "file3": "ghi",
2368 }),
2369 )
2370 .await;
2371
2372 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2373
2374 let buffer1 = project
2375 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2376 .await
2377 .unwrap();
2378 let events = Rc::new(RefCell::new(Vec::new()));
2379
2380 // initially, the buffer isn't dirty.
2381 buffer1.update(cx, |buffer, cx| {
2382 cx.subscribe(&buffer1, {
2383 let events = events.clone();
2384 move |_, _, event, _| match event {
2385 BufferEvent::Operation(_) => {}
2386 _ => events.borrow_mut().push(event.clone()),
2387 }
2388 })
2389 .detach();
2390
2391 assert!(!buffer.is_dirty());
2392 assert!(events.borrow().is_empty());
2393
2394 buffer.edit([(1..2, "")], cx);
2395 });
2396
2397 // after the first edit, the buffer is dirty, and emits a dirtied event.
2398 buffer1.update(cx, |buffer, cx| {
2399 assert!(buffer.text() == "ac");
2400 assert!(buffer.is_dirty());
2401 assert_eq!(
2402 *events.borrow(),
2403 &[language::Event::Edited, language::Event::DirtyChanged]
2404 );
2405 events.borrow_mut().clear();
2406 buffer.did_save(
2407 buffer.version(),
2408 buffer.as_rope().fingerprint(),
2409 buffer.file().unwrap().mtime(),
2410 None,
2411 cx,
2412 );
2413 });
2414
2415 // after saving, the buffer is not dirty, and emits a saved event.
2416 buffer1.update(cx, |buffer, cx| {
2417 assert!(!buffer.is_dirty());
2418 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2419 events.borrow_mut().clear();
2420
2421 buffer.edit([(1..1, "B")], cx);
2422 buffer.edit([(2..2, "D")], cx);
2423 });
2424
2425 // after editing again, the buffer is dirty, and emits another dirty event.
2426 buffer1.update(cx, |buffer, cx| {
2427 assert!(buffer.text() == "aBDc");
2428 assert!(buffer.is_dirty());
2429 assert_eq!(
2430 *events.borrow(),
2431 &[
2432 language::Event::Edited,
2433 language::Event::DirtyChanged,
2434 language::Event::Edited,
2435 ],
2436 );
2437 events.borrow_mut().clear();
2438
2439 // After restoring the buffer to its previously-saved state,
2440 // the buffer is not considered dirty anymore.
2441 buffer.edit([(1..3, "")], cx);
2442 assert!(buffer.text() == "ac");
2443 assert!(!buffer.is_dirty());
2444 });
2445
2446 assert_eq!(
2447 *events.borrow(),
2448 &[language::Event::Edited, language::Event::DirtyChanged]
2449 );
2450
2451 // When a file is deleted, the buffer is considered dirty.
2452 let events = Rc::new(RefCell::new(Vec::new()));
2453 let buffer2 = project
2454 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2455 .await
2456 .unwrap();
2457 buffer2.update(cx, |_, cx| {
2458 cx.subscribe(&buffer2, {
2459 let events = events.clone();
2460 move |_, _, event, _| events.borrow_mut().push(event.clone())
2461 })
2462 .detach();
2463 });
2464
2465 fs.remove_file("/dir/file2".as_ref(), Default::default())
2466 .await
2467 .unwrap();
2468 cx.foreground().run_until_parked();
2469 assert_eq!(
2470 *events.borrow(),
2471 &[
2472 language::Event::DirtyChanged,
2473 language::Event::FileHandleChanged
2474 ]
2475 );
2476
2477 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2478 let events = Rc::new(RefCell::new(Vec::new()));
2479 let buffer3 = project
2480 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2481 .await
2482 .unwrap();
2483 buffer3.update(cx, |_, cx| {
2484 cx.subscribe(&buffer3, {
2485 let events = events.clone();
2486 move |_, _, event, _| events.borrow_mut().push(event.clone())
2487 })
2488 .detach();
2489 });
2490
2491 buffer3.update(cx, |buffer, cx| {
2492 buffer.edit([(0..0, "x")], cx);
2493 });
2494 events.borrow_mut().clear();
2495 fs.remove_file("/dir/file3".as_ref(), Default::default())
2496 .await
2497 .unwrap();
2498 cx.foreground().run_until_parked();
2499 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2500 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2501}
2502
2503#[gpui::test]
2504async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2505 let initial_contents = "aaa\nbbbbb\nc\n";
2506 let fs = FakeFs::new(cx.background());
2507 fs.insert_tree(
2508 "/dir",
2509 json!({
2510 "the-file": initial_contents,
2511 }),
2512 )
2513 .await;
2514 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2515 let buffer = project
2516 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2517 .await
2518 .unwrap();
2519
2520 let anchors = (0..3)
2521 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2522 .collect::<Vec<_>>();
2523
2524 // Change the file on disk, adding two new lines of text, and removing
2525 // one line.
2526 buffer.read_with(cx, |buffer, _| {
2527 assert!(!buffer.is_dirty());
2528 assert!(!buffer.has_conflict());
2529 });
2530 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2531 fs.save(
2532 "/dir/the-file".as_ref(),
2533 &new_contents.into(),
2534 LineEnding::Unix,
2535 )
2536 .await
2537 .unwrap();
2538
2539 // Because the buffer was not modified, it is reloaded from disk. Its
2540 // contents are edited according to the diff between the old and new
2541 // file contents.
2542 cx.foreground().run_until_parked();
2543 buffer.update(cx, |buffer, _| {
2544 assert_eq!(buffer.text(), new_contents);
2545 assert!(!buffer.is_dirty());
2546 assert!(!buffer.has_conflict());
2547
2548 let anchor_positions = anchors
2549 .iter()
2550 .map(|anchor| anchor.to_point(&*buffer))
2551 .collect::<Vec<_>>();
2552 assert_eq!(
2553 anchor_positions,
2554 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2555 );
2556 });
2557
2558 // Modify the buffer
2559 buffer.update(cx, |buffer, cx| {
2560 buffer.edit([(0..0, " ")], cx);
2561 assert!(buffer.is_dirty());
2562 assert!(!buffer.has_conflict());
2563 });
2564
2565 // Change the file on disk again, adding blank lines to the beginning.
2566 fs.save(
2567 "/dir/the-file".as_ref(),
2568 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2569 LineEnding::Unix,
2570 )
2571 .await
2572 .unwrap();
2573
2574 // Because the buffer is modified, it doesn't reload from disk, but is
2575 // marked as having a conflict.
2576 cx.foreground().run_until_parked();
2577 buffer.read_with(cx, |buffer, _| {
2578 assert!(buffer.has_conflict());
2579 });
2580}
2581
2582#[gpui::test]
2583async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2584 let fs = FakeFs::new(cx.background());
2585 fs.insert_tree(
2586 "/dir",
2587 json!({
2588 "file1": "a\nb\nc\n",
2589 "file2": "one\r\ntwo\r\nthree\r\n",
2590 }),
2591 )
2592 .await;
2593
2594 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2595 let buffer1 = project
2596 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2597 .await
2598 .unwrap();
2599 let buffer2 = project
2600 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2601 .await
2602 .unwrap();
2603
2604 buffer1.read_with(cx, |buffer, _| {
2605 assert_eq!(buffer.text(), "a\nb\nc\n");
2606 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2607 });
2608 buffer2.read_with(cx, |buffer, _| {
2609 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2610 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2611 });
2612
2613 // Change a file's line endings on disk from unix to windows. The buffer's
2614 // state updates correctly.
2615 fs.save(
2616 "/dir/file1".as_ref(),
2617 &"aaa\nb\nc\n".into(),
2618 LineEnding::Windows,
2619 )
2620 .await
2621 .unwrap();
2622 cx.foreground().run_until_parked();
2623 buffer1.read_with(cx, |buffer, _| {
2624 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2625 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2626 });
2627
2628 // Save a file with windows line endings. The file is written correctly.
2629 buffer2
2630 .update(cx, |buffer, cx| {
2631 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2632 buffer.save(cx)
2633 })
2634 .await
2635 .unwrap();
2636 assert_eq!(
2637 fs.load("/dir/file2".as_ref()).await.unwrap(),
2638 "one\r\ntwo\r\nthree\r\nfour\r\n",
2639 );
2640}
2641
2642#[gpui::test]
2643async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2644 cx.foreground().forbid_parking();
2645
2646 let fs = FakeFs::new(cx.background());
2647 fs.insert_tree(
2648 "/the-dir",
2649 json!({
2650 "a.rs": "
2651 fn foo(mut v: Vec<usize>) {
2652 for x in &v {
2653 v.push(1);
2654 }
2655 }
2656 "
2657 .unindent(),
2658 }),
2659 )
2660 .await;
2661
2662 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2663 let buffer = project
2664 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2665 .await
2666 .unwrap();
2667
2668 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2669 let message = lsp::PublishDiagnosticsParams {
2670 uri: buffer_uri.clone(),
2671 diagnostics: vec![
2672 lsp::Diagnostic {
2673 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2674 severity: Some(DiagnosticSeverity::WARNING),
2675 message: "error 1".to_string(),
2676 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2677 location: lsp::Location {
2678 uri: buffer_uri.clone(),
2679 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2680 },
2681 message: "error 1 hint 1".to_string(),
2682 }]),
2683 ..Default::default()
2684 },
2685 lsp::Diagnostic {
2686 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2687 severity: Some(DiagnosticSeverity::HINT),
2688 message: "error 1 hint 1".to_string(),
2689 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2690 location: lsp::Location {
2691 uri: buffer_uri.clone(),
2692 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2693 },
2694 message: "original diagnostic".to_string(),
2695 }]),
2696 ..Default::default()
2697 },
2698 lsp::Diagnostic {
2699 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2700 severity: Some(DiagnosticSeverity::ERROR),
2701 message: "error 2".to_string(),
2702 related_information: Some(vec![
2703 lsp::DiagnosticRelatedInformation {
2704 location: lsp::Location {
2705 uri: buffer_uri.clone(),
2706 range: lsp::Range::new(
2707 lsp::Position::new(1, 13),
2708 lsp::Position::new(1, 15),
2709 ),
2710 },
2711 message: "error 2 hint 1".to_string(),
2712 },
2713 lsp::DiagnosticRelatedInformation {
2714 location: lsp::Location {
2715 uri: buffer_uri.clone(),
2716 range: lsp::Range::new(
2717 lsp::Position::new(1, 13),
2718 lsp::Position::new(1, 15),
2719 ),
2720 },
2721 message: "error 2 hint 2".to_string(),
2722 },
2723 ]),
2724 ..Default::default()
2725 },
2726 lsp::Diagnostic {
2727 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2728 severity: Some(DiagnosticSeverity::HINT),
2729 message: "error 2 hint 1".to_string(),
2730 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2731 location: lsp::Location {
2732 uri: buffer_uri.clone(),
2733 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2734 },
2735 message: "original diagnostic".to_string(),
2736 }]),
2737 ..Default::default()
2738 },
2739 lsp::Diagnostic {
2740 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2741 severity: Some(DiagnosticSeverity::HINT),
2742 message: "error 2 hint 2".to_string(),
2743 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2744 location: lsp::Location {
2745 uri: buffer_uri.clone(),
2746 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2747 },
2748 message: "original diagnostic".to_string(),
2749 }]),
2750 ..Default::default()
2751 },
2752 ],
2753 version: None,
2754 };
2755
2756 project
2757 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2758 .unwrap();
2759 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2760
2761 assert_eq!(
2762 buffer
2763 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2764 .collect::<Vec<_>>(),
2765 &[
2766 DiagnosticEntry {
2767 range: Point::new(1, 8)..Point::new(1, 9),
2768 diagnostic: Diagnostic {
2769 severity: DiagnosticSeverity::WARNING,
2770 message: "error 1".to_string(),
2771 group_id: 0,
2772 is_primary: true,
2773 ..Default::default()
2774 }
2775 },
2776 DiagnosticEntry {
2777 range: Point::new(1, 8)..Point::new(1, 9),
2778 diagnostic: Diagnostic {
2779 severity: DiagnosticSeverity::HINT,
2780 message: "error 1 hint 1".to_string(),
2781 group_id: 0,
2782 is_primary: false,
2783 ..Default::default()
2784 }
2785 },
2786 DiagnosticEntry {
2787 range: Point::new(1, 13)..Point::new(1, 15),
2788 diagnostic: Diagnostic {
2789 severity: DiagnosticSeverity::HINT,
2790 message: "error 2 hint 1".to_string(),
2791 group_id: 1,
2792 is_primary: false,
2793 ..Default::default()
2794 }
2795 },
2796 DiagnosticEntry {
2797 range: Point::new(1, 13)..Point::new(1, 15),
2798 diagnostic: Diagnostic {
2799 severity: DiagnosticSeverity::HINT,
2800 message: "error 2 hint 2".to_string(),
2801 group_id: 1,
2802 is_primary: false,
2803 ..Default::default()
2804 }
2805 },
2806 DiagnosticEntry {
2807 range: Point::new(2, 8)..Point::new(2, 17),
2808 diagnostic: Diagnostic {
2809 severity: DiagnosticSeverity::ERROR,
2810 message: "error 2".to_string(),
2811 group_id: 1,
2812 is_primary: true,
2813 ..Default::default()
2814 }
2815 }
2816 ]
2817 );
2818
2819 assert_eq!(
2820 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2821 &[
2822 DiagnosticEntry {
2823 range: Point::new(1, 8)..Point::new(1, 9),
2824 diagnostic: Diagnostic {
2825 severity: DiagnosticSeverity::WARNING,
2826 message: "error 1".to_string(),
2827 group_id: 0,
2828 is_primary: true,
2829 ..Default::default()
2830 }
2831 },
2832 DiagnosticEntry {
2833 range: Point::new(1, 8)..Point::new(1, 9),
2834 diagnostic: Diagnostic {
2835 severity: DiagnosticSeverity::HINT,
2836 message: "error 1 hint 1".to_string(),
2837 group_id: 0,
2838 is_primary: false,
2839 ..Default::default()
2840 }
2841 },
2842 ]
2843 );
2844 assert_eq!(
2845 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2846 &[
2847 DiagnosticEntry {
2848 range: Point::new(1, 13)..Point::new(1, 15),
2849 diagnostic: Diagnostic {
2850 severity: DiagnosticSeverity::HINT,
2851 message: "error 2 hint 1".to_string(),
2852 group_id: 1,
2853 is_primary: false,
2854 ..Default::default()
2855 }
2856 },
2857 DiagnosticEntry {
2858 range: Point::new(1, 13)..Point::new(1, 15),
2859 diagnostic: Diagnostic {
2860 severity: DiagnosticSeverity::HINT,
2861 message: "error 2 hint 2".to_string(),
2862 group_id: 1,
2863 is_primary: false,
2864 ..Default::default()
2865 }
2866 },
2867 DiagnosticEntry {
2868 range: Point::new(2, 8)..Point::new(2, 17),
2869 diagnostic: Diagnostic {
2870 severity: DiagnosticSeverity::ERROR,
2871 message: "error 2".to_string(),
2872 group_id: 1,
2873 is_primary: true,
2874 ..Default::default()
2875 }
2876 }
2877 ]
2878 );
2879}
2880
2881#[gpui::test]
2882async fn test_rename(cx: &mut gpui::TestAppContext) {
2883 cx.foreground().forbid_parking();
2884
2885 let mut language = Language::new(
2886 LanguageConfig {
2887 name: "Rust".into(),
2888 path_suffixes: vec!["rs".to_string()],
2889 ..Default::default()
2890 },
2891 Some(tree_sitter_rust::language()),
2892 );
2893 let mut fake_servers = language
2894 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2895 capabilities: lsp::ServerCapabilities {
2896 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2897 prepare_provider: Some(true),
2898 work_done_progress_options: Default::default(),
2899 })),
2900 ..Default::default()
2901 },
2902 ..Default::default()
2903 }))
2904 .await;
2905
2906 let fs = FakeFs::new(cx.background());
2907 fs.insert_tree(
2908 "/dir",
2909 json!({
2910 "one.rs": "const ONE: usize = 1;",
2911 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2912 }),
2913 )
2914 .await;
2915
2916 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2917 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2918 let buffer = project
2919 .update(cx, |project, cx| {
2920 project.open_local_buffer("/dir/one.rs", cx)
2921 })
2922 .await
2923 .unwrap();
2924
2925 let fake_server = fake_servers.next().await.unwrap();
2926
2927 let response = project.update(cx, |project, cx| {
2928 project.prepare_rename(buffer.clone(), 7, cx)
2929 });
2930 fake_server
2931 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2932 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2933 assert_eq!(params.position, lsp::Position::new(0, 7));
2934 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2935 lsp::Position::new(0, 6),
2936 lsp::Position::new(0, 9),
2937 ))))
2938 })
2939 .next()
2940 .await
2941 .unwrap();
2942 let range = response.await.unwrap().unwrap();
2943 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2944 assert_eq!(range, 6..9);
2945
2946 let response = project.update(cx, |project, cx| {
2947 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2948 });
2949 fake_server
2950 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2951 assert_eq!(
2952 params.text_document_position.text_document.uri.as_str(),
2953 "file:///dir/one.rs"
2954 );
2955 assert_eq!(
2956 params.text_document_position.position,
2957 lsp::Position::new(0, 7)
2958 );
2959 assert_eq!(params.new_name, "THREE");
2960 Ok(Some(lsp::WorkspaceEdit {
2961 changes: Some(
2962 [
2963 (
2964 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2965 vec![lsp::TextEdit::new(
2966 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2967 "THREE".to_string(),
2968 )],
2969 ),
2970 (
2971 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2972 vec![
2973 lsp::TextEdit::new(
2974 lsp::Range::new(
2975 lsp::Position::new(0, 24),
2976 lsp::Position::new(0, 27),
2977 ),
2978 "THREE".to_string(),
2979 ),
2980 lsp::TextEdit::new(
2981 lsp::Range::new(
2982 lsp::Position::new(0, 35),
2983 lsp::Position::new(0, 38),
2984 ),
2985 "THREE".to_string(),
2986 ),
2987 ],
2988 ),
2989 ]
2990 .into_iter()
2991 .collect(),
2992 ),
2993 ..Default::default()
2994 }))
2995 })
2996 .next()
2997 .await
2998 .unwrap();
2999 let mut transaction = response.await.unwrap().0;
3000 assert_eq!(transaction.len(), 2);
3001 assert_eq!(
3002 transaction
3003 .remove_entry(&buffer)
3004 .unwrap()
3005 .0
3006 .read_with(cx, |buffer, _| buffer.text()),
3007 "const THREE: usize = 1;"
3008 );
3009 assert_eq!(
3010 transaction
3011 .into_keys()
3012 .next()
3013 .unwrap()
3014 .read_with(cx, |buffer, _| buffer.text()),
3015 "const TWO: usize = one::THREE + one::THREE;"
3016 );
3017}
3018
3019#[gpui::test]
3020async fn test_search(cx: &mut gpui::TestAppContext) {
3021 let fs = FakeFs::new(cx.background());
3022 fs.insert_tree(
3023 "/dir",
3024 json!({
3025 "one.rs": "const ONE: usize = 1;",
3026 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3027 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3028 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3029 }),
3030 )
3031 .await;
3032 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3033 assert_eq!(
3034 search(&project, SearchQuery::text("TWO", false, true), cx)
3035 .await
3036 .unwrap(),
3037 HashMap::from_iter([
3038 ("two.rs".to_string(), vec![6..9]),
3039 ("three.rs".to_string(), vec![37..40])
3040 ])
3041 );
3042
3043 let buffer_4 = project
3044 .update(cx, |project, cx| {
3045 project.open_local_buffer("/dir/four.rs", cx)
3046 })
3047 .await
3048 .unwrap();
3049 buffer_4.update(cx, |buffer, cx| {
3050 let text = "two::TWO";
3051 buffer.edit([(20..28, text), (31..43, text)], cx);
3052 });
3053
3054 assert_eq!(
3055 search(&project, SearchQuery::text("TWO", false, true), cx)
3056 .await
3057 .unwrap(),
3058 HashMap::from_iter([
3059 ("two.rs".to_string(), vec![6..9]),
3060 ("three.rs".to_string(), vec![37..40]),
3061 ("four.rs".to_string(), vec![25..28, 36..39])
3062 ])
3063 );
3064
3065 async fn search(
3066 project: &ModelHandle<Project>,
3067 query: SearchQuery,
3068 cx: &mut gpui::TestAppContext,
3069 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3070 let results = project
3071 .update(cx, |project, cx| project.search(query, cx))
3072 .await?;
3073
3074 Ok(results
3075 .into_iter()
3076 .map(|(buffer, ranges)| {
3077 buffer.read_with(cx, |buffer, _| {
3078 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3079 let ranges = ranges
3080 .into_iter()
3081 .map(|range| range.to_offset(buffer))
3082 .collect::<Vec<_>>();
3083 (path, ranges)
3084 })
3085 })
3086 .collect())
3087 }
3088}