1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50
51 let cancel_flag = Default::default();
52 let results = project
53 .read_with(cx, |project, cx| {
54 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
55 })
56 .await;
57 assert_eq!(
58 results
59 .into_iter()
60 .map(|result| result.path)
61 .collect::<Vec<Arc<Path>>>(),
62 vec![
63 PathBuf::from("banana/carrot/date").into(),
64 PathBuf::from("banana/carrot/endive").into(),
65 ]
66 );
67}
68
69#[gpui::test]
70async fn test_managing_language_servers(
71 deterministic: Arc<Deterministic>,
72 cx: &mut gpui::TestAppContext,
73) {
74 cx.foreground().forbid_parking();
75
76 let mut rust_language = Language::new(
77 LanguageConfig {
78 name: "Rust".into(),
79 path_suffixes: vec!["rs".to_string()],
80 ..Default::default()
81 },
82 Some(tree_sitter_rust::language()),
83 );
84 let mut json_language = Language::new(
85 LanguageConfig {
86 name: "JSON".into(),
87 path_suffixes: vec!["json".to_string()],
88 ..Default::default()
89 },
90 None,
91 );
92 let mut fake_rust_servers = rust_language
93 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
94 name: "the-rust-language-server",
95 capabilities: lsp::ServerCapabilities {
96 completion_provider: Some(lsp::CompletionOptions {
97 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
98 ..Default::default()
99 }),
100 ..Default::default()
101 },
102 ..Default::default()
103 }))
104 .await;
105 let mut fake_json_servers = json_language
106 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
107 name: "the-json-language-server",
108 capabilities: lsp::ServerCapabilities {
109 completion_provider: Some(lsp::CompletionOptions {
110 trigger_characters: Some(vec![":".to_string()]),
111 ..Default::default()
112 }),
113 ..Default::default()
114 },
115 ..Default::default()
116 }))
117 .await;
118
119 let fs = FakeFs::new(cx.background());
120 fs.insert_tree(
121 "/the-root",
122 json!({
123 "test.rs": "const A: i32 = 1;",
124 "test2.rs": "",
125 "Cargo.toml": "a = 1",
126 "package.json": "{\"a\": 1}",
127 }),
128 )
129 .await;
130
131 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
132
133 // Open a buffer without an associated language server.
134 let toml_buffer = project
135 .update(cx, |project, cx| {
136 project.open_local_buffer("/the-root/Cargo.toml", cx)
137 })
138 .await
139 .unwrap();
140
141 // Open a buffer with an associated language server before the language for it has been loaded.
142 let rust_buffer = project
143 .update(cx, |project, cx| {
144 project.open_local_buffer("/the-root/test.rs", cx)
145 })
146 .await
147 .unwrap();
148 rust_buffer.read_with(cx, |buffer, _| {
149 assert_eq!(buffer.language().map(|l| l.name()), None);
150 });
151
152 // Now we add the languages to the project, and ensure they get assigned to all
153 // the relevant open buffers.
154 project.update(cx, |project, _| {
155 project.languages.add(Arc::new(json_language));
156 project.languages.add(Arc::new(rust_language));
157 });
158 deterministic.run_until_parked();
159 rust_buffer.read_with(cx, |buffer, _| {
160 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
161 });
162
163 // A server is started up, and it is notified about Rust files.
164 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
165 assert_eq!(
166 fake_rust_server
167 .receive_notification::<lsp::notification::DidOpenTextDocument>()
168 .await
169 .text_document,
170 lsp::TextDocumentItem {
171 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
172 version: 0,
173 text: "const A: i32 = 1;".to_string(),
174 language_id: Default::default()
175 }
176 );
177
178 // The buffer is configured based on the language server's capabilities.
179 rust_buffer.read_with(cx, |buffer, _| {
180 assert_eq!(
181 buffer.completion_triggers(),
182 &[".".to_string(), "::".to_string()]
183 );
184 });
185 toml_buffer.read_with(cx, |buffer, _| {
186 assert!(buffer.completion_triggers().is_empty());
187 });
188
189 // Edit a buffer. The changes are reported to the language server.
190 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
191 assert_eq!(
192 fake_rust_server
193 .receive_notification::<lsp::notification::DidChangeTextDocument>()
194 .await
195 .text_document,
196 lsp::VersionedTextDocumentIdentifier::new(
197 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
198 1
199 )
200 );
201
202 // Open a third buffer with a different associated language server.
203 let json_buffer = project
204 .update(cx, |project, cx| {
205 project.open_local_buffer("/the-root/package.json", cx)
206 })
207 .await
208 .unwrap();
209
210 // A json language server is started up and is only notified about the json buffer.
211 let mut fake_json_server = fake_json_servers.next().await.unwrap();
212 assert_eq!(
213 fake_json_server
214 .receive_notification::<lsp::notification::DidOpenTextDocument>()
215 .await
216 .text_document,
217 lsp::TextDocumentItem {
218 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
219 version: 0,
220 text: "{\"a\": 1}".to_string(),
221 language_id: Default::default()
222 }
223 );
224
225 // This buffer is configured based on the second language server's
226 // capabilities.
227 json_buffer.read_with(cx, |buffer, _| {
228 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
229 });
230
231 // When opening another buffer whose language server is already running,
232 // it is also configured based on the existing language server's capabilities.
233 let rust_buffer2 = project
234 .update(cx, |project, cx| {
235 project.open_local_buffer("/the-root/test2.rs", cx)
236 })
237 .await
238 .unwrap();
239 rust_buffer2.read_with(cx, |buffer, _| {
240 assert_eq!(
241 buffer.completion_triggers(),
242 &[".".to_string(), "::".to_string()]
243 );
244 });
245
246 // Changes are reported only to servers matching the buffer's language.
247 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
248 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
249 assert_eq!(
250 fake_rust_server
251 .receive_notification::<lsp::notification::DidChangeTextDocument>()
252 .await
253 .text_document,
254 lsp::VersionedTextDocumentIdentifier::new(
255 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
256 1
257 )
258 );
259
260 // Save notifications are reported to all servers.
261 toml_buffer
262 .update(cx, |buffer, cx| buffer.save(cx))
263 .await
264 .unwrap();
265 assert_eq!(
266 fake_rust_server
267 .receive_notification::<lsp::notification::DidSaveTextDocument>()
268 .await
269 .text_document,
270 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
271 );
272 assert_eq!(
273 fake_json_server
274 .receive_notification::<lsp::notification::DidSaveTextDocument>()
275 .await
276 .text_document,
277 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
278 );
279
280 // Renames are reported only to servers matching the buffer's language.
281 fs.rename(
282 Path::new("/the-root/test2.rs"),
283 Path::new("/the-root/test3.rs"),
284 Default::default(),
285 )
286 .await
287 .unwrap();
288 assert_eq!(
289 fake_rust_server
290 .receive_notification::<lsp::notification::DidCloseTextDocument>()
291 .await
292 .text_document,
293 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
294 );
295 assert_eq!(
296 fake_rust_server
297 .receive_notification::<lsp::notification::DidOpenTextDocument>()
298 .await
299 .text_document,
300 lsp::TextDocumentItem {
301 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
302 version: 0,
303 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
304 language_id: Default::default()
305 },
306 );
307
308 rust_buffer2.update(cx, |buffer, cx| {
309 buffer.update_diagnostics(
310 DiagnosticSet::from_sorted_entries(
311 vec![DiagnosticEntry {
312 diagnostic: Default::default(),
313 range: Anchor::MIN..Anchor::MAX,
314 }],
315 &buffer.snapshot(),
316 ),
317 cx,
318 );
319 assert_eq!(
320 buffer
321 .snapshot()
322 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
323 .count(),
324 1
325 );
326 });
327
328 // When the rename changes the extension of the file, the buffer gets closed on the old
329 // language server and gets opened on the new one.
330 fs.rename(
331 Path::new("/the-root/test3.rs"),
332 Path::new("/the-root/test3.json"),
333 Default::default(),
334 )
335 .await
336 .unwrap();
337 assert_eq!(
338 fake_rust_server
339 .receive_notification::<lsp::notification::DidCloseTextDocument>()
340 .await
341 .text_document,
342 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
343 );
344 assert_eq!(
345 fake_json_server
346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
347 .await
348 .text_document,
349 lsp::TextDocumentItem {
350 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
351 version: 0,
352 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
353 language_id: Default::default()
354 },
355 );
356
357 // We clear the diagnostics, since the language has changed.
358 rust_buffer2.read_with(cx, |buffer, _| {
359 assert_eq!(
360 buffer
361 .snapshot()
362 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
363 .count(),
364 0
365 );
366 });
367
368 // The renamed file's version resets after changing language server.
369 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
370 assert_eq!(
371 fake_json_server
372 .receive_notification::<lsp::notification::DidChangeTextDocument>()
373 .await
374 .text_document,
375 lsp::VersionedTextDocumentIdentifier::new(
376 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
377 1
378 )
379 );
380
381 // Restart language servers
382 project.update(cx, |project, cx| {
383 project.restart_language_servers_for_buffers(
384 vec![rust_buffer.clone(), json_buffer.clone()],
385 cx,
386 );
387 });
388
389 let mut rust_shutdown_requests = fake_rust_server
390 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
391 let mut json_shutdown_requests = fake_json_server
392 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
393 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
394
395 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
396 let mut fake_json_server = fake_json_servers.next().await.unwrap();
397
398 // Ensure rust document is reopened in new rust language server
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
406 version: 1,
407 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 }
410 );
411
412 // Ensure json documents are reopened in new json language server
413 assert_set_eq!(
414 [
415 fake_json_server
416 .receive_notification::<lsp::notification::DidOpenTextDocument>()
417 .await
418 .text_document,
419 fake_json_server
420 .receive_notification::<lsp::notification::DidOpenTextDocument>()
421 .await
422 .text_document,
423 ],
424 [
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
427 version: 0,
428 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 1,
434 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 }
437 ]
438 );
439
440 // Close notifications are reported only to servers matching the buffer's language.
441 cx.update(|_| drop(json_buffer));
442 let close_message = lsp::DidCloseTextDocumentParams {
443 text_document: lsp::TextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 ),
446 };
447 assert_eq!(
448 fake_json_server
449 .receive_notification::<lsp::notification::DidCloseTextDocument>()
450 .await,
451 close_message,
452 );
453}
454
455#[gpui::test]
456async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
457 cx.foreground().forbid_parking();
458
459 let fs = FakeFs::new(cx.background());
460 fs.insert_tree(
461 "/dir",
462 json!({
463 "a.rs": "let a = 1;",
464 "b.rs": "let b = 2;"
465 }),
466 )
467 .await;
468
469 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
470
471 let buffer_a = project
472 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
473 .await
474 .unwrap();
475 let buffer_b = project
476 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
477 .await
478 .unwrap();
479
480 project.update(cx, |project, cx| {
481 project
482 .update_diagnostics(
483 0,
484 lsp::PublishDiagnosticsParams {
485 uri: Url::from_file_path("/dir/a.rs").unwrap(),
486 version: None,
487 diagnostics: vec![lsp::Diagnostic {
488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
489 severity: Some(lsp::DiagnosticSeverity::ERROR),
490 message: "error 1".to_string(),
491 ..Default::default()
492 }],
493 },
494 &[],
495 cx,
496 )
497 .unwrap();
498 project
499 .update_diagnostics(
500 0,
501 lsp::PublishDiagnosticsParams {
502 uri: Url::from_file_path("/dir/b.rs").unwrap(),
503 version: None,
504 diagnostics: vec![lsp::Diagnostic {
505 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
506 severity: Some(lsp::DiagnosticSeverity::WARNING),
507 message: "error 2".to_string(),
508 ..Default::default()
509 }],
510 },
511 &[],
512 cx,
513 )
514 .unwrap();
515 });
516
517 buffer_a.read_with(cx, |buffer, _| {
518 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
519 assert_eq!(
520 chunks
521 .iter()
522 .map(|(s, d)| (s.as_str(), *d))
523 .collect::<Vec<_>>(),
524 &[
525 ("let ", None),
526 ("a", Some(DiagnosticSeverity::ERROR)),
527 (" = 1;", None),
528 ]
529 );
530 });
531 buffer_b.read_with(cx, |buffer, _| {
532 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
533 assert_eq!(
534 chunks
535 .iter()
536 .map(|(s, d)| (s.as_str(), *d))
537 .collect::<Vec<_>>(),
538 &[
539 ("let ", None),
540 ("b", Some(DiagnosticSeverity::WARNING)),
541 (" = 2;", None),
542 ]
543 );
544 });
545}
546
547#[gpui::test]
548async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
549 cx.foreground().forbid_parking();
550
551 let fs = FakeFs::new(cx.background());
552 fs.insert_tree(
553 "/root",
554 json!({
555 "dir": {
556 "a.rs": "let a = 1;",
557 },
558 "other.rs": "let b = c;"
559 }),
560 )
561 .await;
562
563 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
564
565 let (worktree, _) = project
566 .update(cx, |project, cx| {
567 project.find_or_create_local_worktree("/root/other.rs", false, cx)
568 })
569 .await
570 .unwrap();
571 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
572
573 project.update(cx, |project, cx| {
574 project
575 .update_diagnostics(
576 0,
577 lsp::PublishDiagnosticsParams {
578 uri: Url::from_file_path("/root/other.rs").unwrap(),
579 version: None,
580 diagnostics: vec![lsp::Diagnostic {
581 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
582 severity: Some(lsp::DiagnosticSeverity::ERROR),
583 message: "unknown variable 'c'".to_string(),
584 ..Default::default()
585 }],
586 },
587 &[],
588 cx,
589 )
590 .unwrap();
591 });
592
593 let buffer = project
594 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
595 .await
596 .unwrap();
597 buffer.read_with(cx, |buffer, _| {
598 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
599 assert_eq!(
600 chunks
601 .iter()
602 .map(|(s, d)| (s.as_str(), *d))
603 .collect::<Vec<_>>(),
604 &[
605 ("let b = ", None),
606 ("c", Some(DiagnosticSeverity::ERROR)),
607 (";", None),
608 ]
609 );
610 });
611
612 project.read_with(cx, |project, cx| {
613 assert_eq!(project.diagnostic_summaries(cx).next(), None);
614 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
615 });
616}
617
618#[gpui::test]
619async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
620 cx.foreground().forbid_parking();
621
622 let progress_token = "the-progress-token";
623 let mut language = Language::new(
624 LanguageConfig {
625 name: "Rust".into(),
626 path_suffixes: vec!["rs".to_string()],
627 ..Default::default()
628 },
629 Some(tree_sitter_rust::language()),
630 );
631 let mut fake_servers = language
632 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
633 disk_based_diagnostics_progress_token: Some(progress_token.into()),
634 disk_based_diagnostics_sources: vec!["disk".into()],
635 ..Default::default()
636 }))
637 .await;
638
639 let fs = FakeFs::new(cx.background());
640 fs.insert_tree(
641 "/dir",
642 json!({
643 "a.rs": "fn a() { A }",
644 "b.rs": "const y: i32 = 1",
645 }),
646 )
647 .await;
648
649 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
650 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
651 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
652
653 // Cause worktree to start the fake language server
654 let _buffer = project
655 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
656 .await
657 .unwrap();
658
659 let mut events = subscribe(&project, cx);
660
661 let fake_server = fake_servers.next().await.unwrap();
662 fake_server.start_progress(progress_token).await;
663 assert_eq!(
664 events.next().await.unwrap(),
665 Event::DiskBasedDiagnosticsStarted {
666 language_server_id: 0,
667 }
668 );
669
670 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
671 uri: Url::from_file_path("/dir/a.rs").unwrap(),
672 version: None,
673 diagnostics: vec![lsp::Diagnostic {
674 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
675 severity: Some(lsp::DiagnosticSeverity::ERROR),
676 message: "undefined variable 'A'".to_string(),
677 ..Default::default()
678 }],
679 });
680 assert_eq!(
681 events.next().await.unwrap(),
682 Event::DiagnosticsUpdated {
683 language_server_id: 0,
684 path: (worktree_id, Path::new("a.rs")).into()
685 }
686 );
687
688 fake_server.end_progress(progress_token);
689 assert_eq!(
690 events.next().await.unwrap(),
691 Event::DiskBasedDiagnosticsFinished {
692 language_server_id: 0
693 }
694 );
695
696 let buffer = project
697 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
698 .await
699 .unwrap();
700
701 buffer.read_with(cx, |buffer, _| {
702 let snapshot = buffer.snapshot();
703 let diagnostics = snapshot
704 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
705 .collect::<Vec<_>>();
706 assert_eq!(
707 diagnostics,
708 &[DiagnosticEntry {
709 range: Point::new(0, 9)..Point::new(0, 10),
710 diagnostic: Diagnostic {
711 severity: lsp::DiagnosticSeverity::ERROR,
712 message: "undefined variable 'A'".to_string(),
713 group_id: 0,
714 is_primary: true,
715 ..Default::default()
716 }
717 }]
718 )
719 });
720
721 // Ensure publishing empty diagnostics twice only results in one update event.
722 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
723 uri: Url::from_file_path("/dir/a.rs").unwrap(),
724 version: None,
725 diagnostics: Default::default(),
726 });
727 assert_eq!(
728 events.next().await.unwrap(),
729 Event::DiagnosticsUpdated {
730 language_server_id: 0,
731 path: (worktree_id, Path::new("a.rs")).into()
732 }
733 );
734
735 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
736 uri: Url::from_file_path("/dir/a.rs").unwrap(),
737 version: None,
738 diagnostics: Default::default(),
739 });
740 cx.foreground().run_until_parked();
741 assert_eq!(futures::poll!(events.next()), Poll::Pending);
742}
743
744#[gpui::test]
745async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
746 cx.foreground().forbid_parking();
747
748 let progress_token = "the-progress-token";
749 let mut language = Language::new(
750 LanguageConfig {
751 path_suffixes: vec!["rs".to_string()],
752 ..Default::default()
753 },
754 None,
755 );
756 let mut fake_servers = language
757 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
758 disk_based_diagnostics_sources: vec!["disk".into()],
759 disk_based_diagnostics_progress_token: Some(progress_token.into()),
760 ..Default::default()
761 }))
762 .await;
763
764 let fs = FakeFs::new(cx.background());
765 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
766
767 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
768 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
769
770 let buffer = project
771 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
772 .await
773 .unwrap();
774
775 // Simulate diagnostics starting to update.
776 let fake_server = fake_servers.next().await.unwrap();
777 fake_server.start_progress(progress_token).await;
778
779 // Restart the server before the diagnostics finish updating.
780 project.update(cx, |project, cx| {
781 project.restart_language_servers_for_buffers([buffer], cx);
782 });
783 let mut events = subscribe(&project, cx);
784
785 // Simulate the newly started server sending more diagnostics.
786 let fake_server = fake_servers.next().await.unwrap();
787 fake_server.start_progress(progress_token).await;
788 assert_eq!(
789 events.next().await.unwrap(),
790 Event::DiskBasedDiagnosticsStarted {
791 language_server_id: 1
792 }
793 );
794 project.read_with(cx, |project, _| {
795 assert_eq!(
796 project
797 .language_servers_running_disk_based_diagnostics()
798 .collect::<Vec<_>>(),
799 [1]
800 );
801 });
802
803 // All diagnostics are considered done, despite the old server's diagnostic
804 // task never completing.
805 fake_server.end_progress(progress_token);
806 assert_eq!(
807 events.next().await.unwrap(),
808 Event::DiskBasedDiagnosticsFinished {
809 language_server_id: 1
810 }
811 );
812 project.read_with(cx, |project, _| {
813 assert_eq!(
814 project
815 .language_servers_running_disk_based_diagnostics()
816 .collect::<Vec<_>>(),
817 [0; 0]
818 );
819 });
820}
821
822#[gpui::test]
823async fn test_toggling_enable_language_server(
824 deterministic: Arc<Deterministic>,
825 cx: &mut gpui::TestAppContext,
826) {
827 deterministic.forbid_parking();
828
829 let mut rust = Language::new(
830 LanguageConfig {
831 name: Arc::from("Rust"),
832 path_suffixes: vec!["rs".to_string()],
833 ..Default::default()
834 },
835 None,
836 );
837 let mut fake_rust_servers = rust
838 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
839 name: "rust-lsp",
840 ..Default::default()
841 }))
842 .await;
843 let mut js = Language::new(
844 LanguageConfig {
845 name: Arc::from("JavaScript"),
846 path_suffixes: vec!["js".to_string()],
847 ..Default::default()
848 },
849 None,
850 );
851 let mut fake_js_servers = js
852 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
853 name: "js-lsp",
854 ..Default::default()
855 }))
856 .await;
857
858 let fs = FakeFs::new(cx.background());
859 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
860 .await;
861
862 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
863 project.update(cx, |project, _| {
864 project.languages.add(Arc::new(rust));
865 project.languages.add(Arc::new(js));
866 });
867
868 let _rs_buffer = project
869 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
870 .await
871 .unwrap();
872 let _js_buffer = project
873 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
874 .await
875 .unwrap();
876
877 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
878 assert_eq!(
879 fake_rust_server_1
880 .receive_notification::<lsp::notification::DidOpenTextDocument>()
881 .await
882 .text_document
883 .uri
884 .as_str(),
885 "file:///dir/a.rs"
886 );
887
888 let mut fake_js_server = fake_js_servers.next().await.unwrap();
889 assert_eq!(
890 fake_js_server
891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
892 .await
893 .text_document
894 .uri
895 .as_str(),
896 "file:///dir/b.js"
897 );
898
899 // Disable Rust language server, ensuring only that server gets stopped.
900 cx.update(|cx| {
901 cx.update_global(|settings: &mut Settings, _| {
902 settings.language_overrides.insert(
903 Arc::from("Rust"),
904 settings::LanguageSettings {
905 enable_language_server: Some(false),
906 ..Default::default()
907 },
908 );
909 })
910 });
911 fake_rust_server_1
912 .receive_notification::<lsp::notification::Exit>()
913 .await;
914
915 // Enable Rust and disable JavaScript language servers, ensuring that the
916 // former gets started again and that the latter stops.
917 cx.update(|cx| {
918 cx.update_global(|settings: &mut Settings, _| {
919 settings.language_overrides.insert(
920 Arc::from("Rust"),
921 settings::LanguageSettings {
922 enable_language_server: Some(true),
923 ..Default::default()
924 },
925 );
926 settings.language_overrides.insert(
927 Arc::from("JavaScript"),
928 settings::LanguageSettings {
929 enable_language_server: Some(false),
930 ..Default::default()
931 },
932 );
933 })
934 });
935 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
936 assert_eq!(
937 fake_rust_server_2
938 .receive_notification::<lsp::notification::DidOpenTextDocument>()
939 .await
940 .text_document
941 .uri
942 .as_str(),
943 "file:///dir/a.rs"
944 );
945 fake_js_server
946 .receive_notification::<lsp::notification::Exit>()
947 .await;
948}
949
950#[gpui::test]
951async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
952 cx.foreground().forbid_parking();
953
954 let mut language = Language::new(
955 LanguageConfig {
956 name: "Rust".into(),
957 path_suffixes: vec!["rs".to_string()],
958 ..Default::default()
959 },
960 Some(tree_sitter_rust::language()),
961 );
962 let mut fake_servers = language
963 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
964 disk_based_diagnostics_sources: vec!["disk".into()],
965 ..Default::default()
966 }))
967 .await;
968
969 let text = "
970 fn a() { A }
971 fn b() { BB }
972 fn c() { CCC }
973 "
974 .unindent();
975
976 let fs = FakeFs::new(cx.background());
977 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
978
979 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
980 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
981
982 let buffer = project
983 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
984 .await
985 .unwrap();
986
987 let mut fake_server = fake_servers.next().await.unwrap();
988 let open_notification = fake_server
989 .receive_notification::<lsp::notification::DidOpenTextDocument>()
990 .await;
991
992 // Edit the buffer, moving the content down
993 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
994 let change_notification_1 = fake_server
995 .receive_notification::<lsp::notification::DidChangeTextDocument>()
996 .await;
997 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
998
999 // Report some diagnostics for the initial version of the buffer
1000 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1001 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1002 version: Some(open_notification.text_document.version),
1003 diagnostics: vec![
1004 lsp::Diagnostic {
1005 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1006 severity: Some(DiagnosticSeverity::ERROR),
1007 message: "undefined variable 'A'".to_string(),
1008 source: Some("disk".to_string()),
1009 ..Default::default()
1010 },
1011 lsp::Diagnostic {
1012 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1013 severity: Some(DiagnosticSeverity::ERROR),
1014 message: "undefined variable 'BB'".to_string(),
1015 source: Some("disk".to_string()),
1016 ..Default::default()
1017 },
1018 lsp::Diagnostic {
1019 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1020 severity: Some(DiagnosticSeverity::ERROR),
1021 source: Some("disk".to_string()),
1022 message: "undefined variable 'CCC'".to_string(),
1023 ..Default::default()
1024 },
1025 ],
1026 });
1027
1028 // The diagnostics have moved down since they were created.
1029 buffer.next_notification(cx).await;
1030 buffer.read_with(cx, |buffer, _| {
1031 assert_eq!(
1032 buffer
1033 .snapshot()
1034 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1035 .collect::<Vec<_>>(),
1036 &[
1037 DiagnosticEntry {
1038 range: Point::new(3, 9)..Point::new(3, 11),
1039 diagnostic: Diagnostic {
1040 severity: DiagnosticSeverity::ERROR,
1041 message: "undefined variable 'BB'".to_string(),
1042 is_disk_based: true,
1043 group_id: 1,
1044 is_primary: true,
1045 ..Default::default()
1046 },
1047 },
1048 DiagnosticEntry {
1049 range: Point::new(4, 9)..Point::new(4, 12),
1050 diagnostic: Diagnostic {
1051 severity: DiagnosticSeverity::ERROR,
1052 message: "undefined variable 'CCC'".to_string(),
1053 is_disk_based: true,
1054 group_id: 2,
1055 is_primary: true,
1056 ..Default::default()
1057 }
1058 }
1059 ]
1060 );
1061 assert_eq!(
1062 chunks_with_diagnostics(buffer, 0..buffer.len()),
1063 [
1064 ("\n\nfn a() { ".to_string(), None),
1065 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1066 (" }\nfn b() { ".to_string(), None),
1067 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1068 (" }\nfn c() { ".to_string(), None),
1069 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1070 (" }\n".to_string(), None),
1071 ]
1072 );
1073 assert_eq!(
1074 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1075 [
1076 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1077 (" }\nfn c() { ".to_string(), None),
1078 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1079 ]
1080 );
1081 });
1082
1083 // Ensure overlapping diagnostics are highlighted correctly.
1084 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1085 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1086 version: Some(open_notification.text_document.version),
1087 diagnostics: vec![
1088 lsp::Diagnostic {
1089 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1090 severity: Some(DiagnosticSeverity::ERROR),
1091 message: "undefined variable 'A'".to_string(),
1092 source: Some("disk".to_string()),
1093 ..Default::default()
1094 },
1095 lsp::Diagnostic {
1096 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1097 severity: Some(DiagnosticSeverity::WARNING),
1098 message: "unreachable statement".to_string(),
1099 source: Some("disk".to_string()),
1100 ..Default::default()
1101 },
1102 ],
1103 });
1104
1105 buffer.next_notification(cx).await;
1106 buffer.read_with(cx, |buffer, _| {
1107 assert_eq!(
1108 buffer
1109 .snapshot()
1110 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1111 .collect::<Vec<_>>(),
1112 &[
1113 DiagnosticEntry {
1114 range: Point::new(2, 9)..Point::new(2, 12),
1115 diagnostic: Diagnostic {
1116 severity: DiagnosticSeverity::WARNING,
1117 message: "unreachable statement".to_string(),
1118 is_disk_based: true,
1119 group_id: 4,
1120 is_primary: true,
1121 ..Default::default()
1122 }
1123 },
1124 DiagnosticEntry {
1125 range: Point::new(2, 9)..Point::new(2, 10),
1126 diagnostic: Diagnostic {
1127 severity: DiagnosticSeverity::ERROR,
1128 message: "undefined variable 'A'".to_string(),
1129 is_disk_based: true,
1130 group_id: 3,
1131 is_primary: true,
1132 ..Default::default()
1133 },
1134 }
1135 ]
1136 );
1137 assert_eq!(
1138 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1139 [
1140 ("fn a() { ".to_string(), None),
1141 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1142 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1143 ("\n".to_string(), None),
1144 ]
1145 );
1146 assert_eq!(
1147 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1148 [
1149 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1150 ("\n".to_string(), None),
1151 ]
1152 );
1153 });
1154
1155 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1156 // changes since the last save.
1157 buffer.update(cx, |buffer, cx| {
1158 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1159 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1160 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1161 });
1162 let change_notification_2 = fake_server
1163 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1164 .await;
1165 assert!(
1166 change_notification_2.text_document.version > change_notification_1.text_document.version
1167 );
1168
1169 // Handle out-of-order diagnostics
1170 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1171 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1172 version: Some(change_notification_2.text_document.version),
1173 diagnostics: vec![
1174 lsp::Diagnostic {
1175 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1176 severity: Some(DiagnosticSeverity::ERROR),
1177 message: "undefined variable 'BB'".to_string(),
1178 source: Some("disk".to_string()),
1179 ..Default::default()
1180 },
1181 lsp::Diagnostic {
1182 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1183 severity: Some(DiagnosticSeverity::WARNING),
1184 message: "undefined variable 'A'".to_string(),
1185 source: Some("disk".to_string()),
1186 ..Default::default()
1187 },
1188 ],
1189 });
1190
1191 buffer.next_notification(cx).await;
1192 buffer.read_with(cx, |buffer, _| {
1193 assert_eq!(
1194 buffer
1195 .snapshot()
1196 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1197 .collect::<Vec<_>>(),
1198 &[
1199 DiagnosticEntry {
1200 range: Point::new(2, 21)..Point::new(2, 22),
1201 diagnostic: Diagnostic {
1202 severity: DiagnosticSeverity::WARNING,
1203 message: "undefined variable 'A'".to_string(),
1204 is_disk_based: true,
1205 group_id: 6,
1206 is_primary: true,
1207 ..Default::default()
1208 }
1209 },
1210 DiagnosticEntry {
1211 range: Point::new(3, 9)..Point::new(3, 14),
1212 diagnostic: Diagnostic {
1213 severity: DiagnosticSeverity::ERROR,
1214 message: "undefined variable 'BB'".to_string(),
1215 is_disk_based: true,
1216 group_id: 5,
1217 is_primary: true,
1218 ..Default::default()
1219 },
1220 }
1221 ]
1222 );
1223 });
1224}
1225
1226#[gpui::test]
1227async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1228 cx.foreground().forbid_parking();
1229
1230 let text = concat!(
1231 "let one = ;\n", //
1232 "let two = \n",
1233 "let three = 3;\n",
1234 );
1235
1236 let fs = FakeFs::new(cx.background());
1237 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1238
1239 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1240 let buffer = project
1241 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1242 .await
1243 .unwrap();
1244
1245 project.update(cx, |project, cx| {
1246 project
1247 .update_buffer_diagnostics(
1248 &buffer,
1249 vec![
1250 DiagnosticEntry {
1251 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1252 diagnostic: Diagnostic {
1253 severity: DiagnosticSeverity::ERROR,
1254 message: "syntax error 1".to_string(),
1255 ..Default::default()
1256 },
1257 },
1258 DiagnosticEntry {
1259 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1260 diagnostic: Diagnostic {
1261 severity: DiagnosticSeverity::ERROR,
1262 message: "syntax error 2".to_string(),
1263 ..Default::default()
1264 },
1265 },
1266 ],
1267 None,
1268 cx,
1269 )
1270 .unwrap();
1271 });
1272
1273 // An empty range is extended forward to include the following character.
1274 // At the end of a line, an empty range is extended backward to include
1275 // the preceding character.
1276 buffer.read_with(cx, |buffer, _| {
1277 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1278 assert_eq!(
1279 chunks
1280 .iter()
1281 .map(|(s, d)| (s.as_str(), *d))
1282 .collect::<Vec<_>>(),
1283 &[
1284 ("let one = ", None),
1285 (";", Some(DiagnosticSeverity::ERROR)),
1286 ("\nlet two =", None),
1287 (" ", Some(DiagnosticSeverity::ERROR)),
1288 ("\nlet three = 3;\n", None)
1289 ]
1290 );
1291 });
1292}
1293
1294#[gpui::test]
1295async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1296 cx.foreground().forbid_parking();
1297
1298 let mut language = Language::new(
1299 LanguageConfig {
1300 name: "Rust".into(),
1301 path_suffixes: vec!["rs".to_string()],
1302 ..Default::default()
1303 },
1304 Some(tree_sitter_rust::language()),
1305 );
1306 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1307
1308 let text = "
1309 fn a() {
1310 f1();
1311 }
1312 fn b() {
1313 f2();
1314 }
1315 fn c() {
1316 f3();
1317 }
1318 "
1319 .unindent();
1320
1321 let fs = FakeFs::new(cx.background());
1322 fs.insert_tree(
1323 "/dir",
1324 json!({
1325 "a.rs": text.clone(),
1326 }),
1327 )
1328 .await;
1329
1330 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1331 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1332 let buffer = project
1333 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1334 .await
1335 .unwrap();
1336
1337 let mut fake_server = fake_servers.next().await.unwrap();
1338 let lsp_document_version = fake_server
1339 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1340 .await
1341 .text_document
1342 .version;
1343
1344 // Simulate editing the buffer after the language server computes some edits.
1345 buffer.update(cx, |buffer, cx| {
1346 buffer.edit(
1347 [(
1348 Point::new(0, 0)..Point::new(0, 0),
1349 "// above first function\n",
1350 )],
1351 cx,
1352 );
1353 buffer.edit(
1354 [(
1355 Point::new(2, 0)..Point::new(2, 0),
1356 " // inside first function\n",
1357 )],
1358 cx,
1359 );
1360 buffer.edit(
1361 [(
1362 Point::new(6, 4)..Point::new(6, 4),
1363 "// inside second function ",
1364 )],
1365 cx,
1366 );
1367
1368 assert_eq!(
1369 buffer.text(),
1370 "
1371 // above first function
1372 fn a() {
1373 // inside first function
1374 f1();
1375 }
1376 fn b() {
1377 // inside second function f2();
1378 }
1379 fn c() {
1380 f3();
1381 }
1382 "
1383 .unindent()
1384 );
1385 });
1386
1387 let edits = project
1388 .update(cx, |project, cx| {
1389 project.edits_from_lsp(
1390 &buffer,
1391 vec![
1392 // replace body of first function
1393 lsp::TextEdit {
1394 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1395 new_text: "
1396 fn a() {
1397 f10();
1398 }
1399 "
1400 .unindent(),
1401 },
1402 // edit inside second function
1403 lsp::TextEdit {
1404 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1405 new_text: "00".into(),
1406 },
1407 // edit inside third function via two distinct edits
1408 lsp::TextEdit {
1409 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1410 new_text: "4000".into(),
1411 },
1412 lsp::TextEdit {
1413 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1414 new_text: "".into(),
1415 },
1416 ],
1417 Some(lsp_document_version),
1418 cx,
1419 )
1420 })
1421 .await
1422 .unwrap();
1423
1424 buffer.update(cx, |buffer, cx| {
1425 for (range, new_text) in edits {
1426 buffer.edit([(range, new_text)], cx);
1427 }
1428 assert_eq!(
1429 buffer.text(),
1430 "
1431 // above first function
1432 fn a() {
1433 // inside first function
1434 f10();
1435 }
1436 fn b() {
1437 // inside second function f200();
1438 }
1439 fn c() {
1440 f4000();
1441 }
1442 "
1443 .unindent()
1444 );
1445 });
1446}
1447
1448#[gpui::test]
1449async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1450 cx.foreground().forbid_parking();
1451
1452 let text = "
1453 use a::b;
1454 use a::c;
1455
1456 fn f() {
1457 b();
1458 c();
1459 }
1460 "
1461 .unindent();
1462
1463 let fs = FakeFs::new(cx.background());
1464 fs.insert_tree(
1465 "/dir",
1466 json!({
1467 "a.rs": text.clone(),
1468 }),
1469 )
1470 .await;
1471
1472 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1473 let buffer = project
1474 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1475 .await
1476 .unwrap();
1477
1478 // Simulate the language server sending us a small edit in the form of a very large diff.
1479 // Rust-analyzer does this when performing a merge-imports code action.
1480 let edits = project
1481 .update(cx, |project, cx| {
1482 project.edits_from_lsp(
1483 &buffer,
1484 [
1485 // Replace the first use statement without editing the semicolon.
1486 lsp::TextEdit {
1487 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1488 new_text: "a::{b, c}".into(),
1489 },
1490 // Reinsert the remainder of the file between the semicolon and the final
1491 // newline of the file.
1492 lsp::TextEdit {
1493 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1494 new_text: "\n\n".into(),
1495 },
1496 lsp::TextEdit {
1497 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1498 new_text: "
1499 fn f() {
1500 b();
1501 c();
1502 }"
1503 .unindent(),
1504 },
1505 // Delete everything after the first newline of the file.
1506 lsp::TextEdit {
1507 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1508 new_text: "".into(),
1509 },
1510 ],
1511 None,
1512 cx,
1513 )
1514 })
1515 .await
1516 .unwrap();
1517
1518 buffer.update(cx, |buffer, cx| {
1519 let edits = edits
1520 .into_iter()
1521 .map(|(range, text)| {
1522 (
1523 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1524 text,
1525 )
1526 })
1527 .collect::<Vec<_>>();
1528
1529 assert_eq!(
1530 edits,
1531 [
1532 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1533 (Point::new(1, 0)..Point::new(2, 0), "".into())
1534 ]
1535 );
1536
1537 for (range, new_text) in edits {
1538 buffer.edit([(range, new_text)], cx);
1539 }
1540 assert_eq!(
1541 buffer.text(),
1542 "
1543 use a::{b, c};
1544
1545 fn f() {
1546 b();
1547 c();
1548 }
1549 "
1550 .unindent()
1551 );
1552 });
1553}
1554
1555#[gpui::test]
1556async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1557 cx.foreground().forbid_parking();
1558
1559 let text = "
1560 use a::b;
1561 use a::c;
1562
1563 fn f() {
1564 b();
1565 c();
1566 }
1567 "
1568 .unindent();
1569
1570 let fs = FakeFs::new(cx.background());
1571 fs.insert_tree(
1572 "/dir",
1573 json!({
1574 "a.rs": text.clone(),
1575 }),
1576 )
1577 .await;
1578
1579 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1580 let buffer = project
1581 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1582 .await
1583 .unwrap();
1584
1585 // Simulate the language server sending us edits in a non-ordered fashion,
1586 // with ranges sometimes being inverted.
1587 let edits = project
1588 .update(cx, |project, cx| {
1589 project.edits_from_lsp(
1590 &buffer,
1591 [
1592 lsp::TextEdit {
1593 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1594 new_text: "\n\n".into(),
1595 },
1596 lsp::TextEdit {
1597 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1598 new_text: "a::{b, c}".into(),
1599 },
1600 lsp::TextEdit {
1601 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1602 new_text: "".into(),
1603 },
1604 lsp::TextEdit {
1605 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1606 new_text: "
1607 fn f() {
1608 b();
1609 c();
1610 }"
1611 .unindent(),
1612 },
1613 ],
1614 None,
1615 cx,
1616 )
1617 })
1618 .await
1619 .unwrap();
1620
1621 buffer.update(cx, |buffer, cx| {
1622 let edits = edits
1623 .into_iter()
1624 .map(|(range, text)| {
1625 (
1626 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1627 text,
1628 )
1629 })
1630 .collect::<Vec<_>>();
1631
1632 assert_eq!(
1633 edits,
1634 [
1635 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1636 (Point::new(1, 0)..Point::new(2, 0), "".into())
1637 ]
1638 );
1639
1640 for (range, new_text) in edits {
1641 buffer.edit([(range, new_text)], cx);
1642 }
1643 assert_eq!(
1644 buffer.text(),
1645 "
1646 use a::{b, c};
1647
1648 fn f() {
1649 b();
1650 c();
1651 }
1652 "
1653 .unindent()
1654 );
1655 });
1656}
1657
1658fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1659 buffer: &Buffer,
1660 range: Range<T>,
1661) -> Vec<(String, Option<DiagnosticSeverity>)> {
1662 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1663 for chunk in buffer.snapshot().chunks(range, true) {
1664 if chunks.last().map_or(false, |prev_chunk| {
1665 prev_chunk.1 == chunk.diagnostic_severity
1666 }) {
1667 chunks.last_mut().unwrap().0.push_str(chunk.text);
1668 } else {
1669 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1670 }
1671 }
1672 chunks
1673}
1674
1675#[gpui::test]
1676async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
1677 let dir = temp_tree(json!({
1678 "root": {
1679 "dir1": {},
1680 "dir2": {
1681 "dir3": {}
1682 }
1683 }
1684 }));
1685
1686 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
1687 let cancel_flag = Default::default();
1688 let results = project
1689 .read_with(cx, |project, cx| {
1690 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
1691 })
1692 .await;
1693
1694 assert!(results.is_empty());
1695}
1696
1697#[gpui::test(iterations = 10)]
1698async fn test_definition(cx: &mut gpui::TestAppContext) {
1699 let mut language = Language::new(
1700 LanguageConfig {
1701 name: "Rust".into(),
1702 path_suffixes: vec!["rs".to_string()],
1703 ..Default::default()
1704 },
1705 Some(tree_sitter_rust::language()),
1706 );
1707 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1708
1709 let fs = FakeFs::new(cx.background());
1710 fs.insert_tree(
1711 "/dir",
1712 json!({
1713 "a.rs": "const fn a() { A }",
1714 "b.rs": "const y: i32 = crate::a()",
1715 }),
1716 )
1717 .await;
1718
1719 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1720 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1721
1722 let buffer = project
1723 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1724 .await
1725 .unwrap();
1726
1727 let fake_server = fake_servers.next().await.unwrap();
1728 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1729 let params = params.text_document_position_params;
1730 assert_eq!(
1731 params.text_document.uri.to_file_path().unwrap(),
1732 Path::new("/dir/b.rs"),
1733 );
1734 assert_eq!(params.position, lsp::Position::new(0, 22));
1735
1736 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1737 lsp::Location::new(
1738 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1739 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1740 ),
1741 )))
1742 });
1743
1744 let mut definitions = project
1745 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1746 .await
1747 .unwrap();
1748
1749 // Assert no new language server started
1750 cx.foreground().run_until_parked();
1751 assert!(fake_servers.try_next().is_err());
1752
1753 assert_eq!(definitions.len(), 1);
1754 let definition = definitions.pop().unwrap();
1755 cx.update(|cx| {
1756 let target_buffer = definition.target.buffer.read(cx);
1757 assert_eq!(
1758 target_buffer
1759 .file()
1760 .unwrap()
1761 .as_local()
1762 .unwrap()
1763 .abs_path(cx),
1764 Path::new("/dir/a.rs"),
1765 );
1766 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1767 assert_eq!(
1768 list_worktrees(&project, cx),
1769 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1770 );
1771
1772 drop(definition);
1773 });
1774 cx.read(|cx| {
1775 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1776 });
1777
1778 fn list_worktrees<'a>(
1779 project: &'a ModelHandle<Project>,
1780 cx: &'a AppContext,
1781 ) -> Vec<(&'a Path, bool)> {
1782 project
1783 .read(cx)
1784 .worktrees(cx)
1785 .map(|worktree| {
1786 let worktree = worktree.read(cx);
1787 (
1788 worktree.as_local().unwrap().abs_path().as_ref(),
1789 worktree.is_visible(),
1790 )
1791 })
1792 .collect::<Vec<_>>()
1793 }
1794}
1795
1796#[gpui::test]
1797async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1798 let mut language = Language::new(
1799 LanguageConfig {
1800 name: "TypeScript".into(),
1801 path_suffixes: vec!["ts".to_string()],
1802 ..Default::default()
1803 },
1804 Some(tree_sitter_typescript::language_typescript()),
1805 );
1806 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1807
1808 let fs = FakeFs::new(cx.background());
1809 fs.insert_tree(
1810 "/dir",
1811 json!({
1812 "a.ts": "",
1813 }),
1814 )
1815 .await;
1816
1817 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1818 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1819 let buffer = project
1820 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1821 .await
1822 .unwrap();
1823
1824 let fake_server = fake_language_servers.next().await.unwrap();
1825
1826 let text = "let a = b.fqn";
1827 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1828 let completions = project.update(cx, |project, cx| {
1829 project.completions(&buffer, text.len(), cx)
1830 });
1831
1832 fake_server
1833 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1834 Ok(Some(lsp::CompletionResponse::Array(vec![
1835 lsp::CompletionItem {
1836 label: "fullyQualifiedName?".into(),
1837 insert_text: Some("fullyQualifiedName".into()),
1838 ..Default::default()
1839 },
1840 ])))
1841 })
1842 .next()
1843 .await;
1844 let completions = completions.await.unwrap();
1845 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1846 assert_eq!(completions.len(), 1);
1847 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1848 assert_eq!(
1849 completions[0].old_range.to_offset(&snapshot),
1850 text.len() - 3..text.len()
1851 );
1852
1853 let text = "let a = \"atoms/cmp\"";
1854 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1855 let completions = project.update(cx, |project, cx| {
1856 project.completions(&buffer, text.len() - 1, cx)
1857 });
1858
1859 fake_server
1860 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1861 Ok(Some(lsp::CompletionResponse::Array(vec![
1862 lsp::CompletionItem {
1863 label: "component".into(),
1864 ..Default::default()
1865 },
1866 ])))
1867 })
1868 .next()
1869 .await;
1870 let completions = completions.await.unwrap();
1871 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1872 assert_eq!(completions.len(), 1);
1873 assert_eq!(completions[0].new_text, "component");
1874 assert_eq!(
1875 completions[0].old_range.to_offset(&snapshot),
1876 text.len() - 4..text.len() - 1
1877 );
1878}
1879
1880#[gpui::test]
1881async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1882 let mut language = Language::new(
1883 LanguageConfig {
1884 name: "TypeScript".into(),
1885 path_suffixes: vec!["ts".to_string()],
1886 ..Default::default()
1887 },
1888 Some(tree_sitter_typescript::language_typescript()),
1889 );
1890 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1891
1892 let fs = FakeFs::new(cx.background());
1893 fs.insert_tree(
1894 "/dir",
1895 json!({
1896 "a.ts": "",
1897 }),
1898 )
1899 .await;
1900
1901 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1902 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1903 let buffer = project
1904 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1905 .await
1906 .unwrap();
1907
1908 let fake_server = fake_language_servers.next().await.unwrap();
1909
1910 let text = "let a = b.fqn";
1911 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1912 let completions = project.update(cx, |project, cx| {
1913 project.completions(&buffer, text.len(), cx)
1914 });
1915
1916 fake_server
1917 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1918 Ok(Some(lsp::CompletionResponse::Array(vec![
1919 lsp::CompletionItem {
1920 label: "fullyQualifiedName?".into(),
1921 insert_text: Some("fully\rQualified\r\nName".into()),
1922 ..Default::default()
1923 },
1924 ])))
1925 })
1926 .next()
1927 .await;
1928 let completions = completions.await.unwrap();
1929 assert_eq!(completions.len(), 1);
1930 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1931}
1932
1933#[gpui::test(iterations = 10)]
1934async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1935 let mut language = Language::new(
1936 LanguageConfig {
1937 name: "TypeScript".into(),
1938 path_suffixes: vec!["ts".to_string()],
1939 ..Default::default()
1940 },
1941 None,
1942 );
1943 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1944
1945 let fs = FakeFs::new(cx.background());
1946 fs.insert_tree(
1947 "/dir",
1948 json!({
1949 "a.ts": "a",
1950 }),
1951 )
1952 .await;
1953
1954 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1955 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1956 let buffer = project
1957 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1958 .await
1959 .unwrap();
1960
1961 let fake_server = fake_language_servers.next().await.unwrap();
1962
1963 // Language server returns code actions that contain commands, and not edits.
1964 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1965 fake_server
1966 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1967 Ok(Some(vec![
1968 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1969 title: "The code action".into(),
1970 command: Some(lsp::Command {
1971 title: "The command".into(),
1972 command: "_the/command".into(),
1973 arguments: Some(vec![json!("the-argument")]),
1974 }),
1975 ..Default::default()
1976 }),
1977 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1978 title: "two".into(),
1979 ..Default::default()
1980 }),
1981 ]))
1982 })
1983 .next()
1984 .await;
1985
1986 let action = actions.await.unwrap()[0].clone();
1987 let apply = project.update(cx, |project, cx| {
1988 project.apply_code_action(buffer.clone(), action, true, cx)
1989 });
1990
1991 // Resolving the code action does not populate its edits. In absence of
1992 // edits, we must execute the given command.
1993 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1994 |action, _| async move { Ok(action) },
1995 );
1996
1997 // While executing the command, the language server sends the editor
1998 // a `workspaceEdit` request.
1999 fake_server
2000 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2001 let fake = fake_server.clone();
2002 move |params, _| {
2003 assert_eq!(params.command, "_the/command");
2004 let fake = fake.clone();
2005 async move {
2006 fake.server
2007 .request::<lsp::request::ApplyWorkspaceEdit>(
2008 lsp::ApplyWorkspaceEditParams {
2009 label: None,
2010 edit: lsp::WorkspaceEdit {
2011 changes: Some(
2012 [(
2013 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2014 vec![lsp::TextEdit {
2015 range: lsp::Range::new(
2016 lsp::Position::new(0, 0),
2017 lsp::Position::new(0, 0),
2018 ),
2019 new_text: "X".into(),
2020 }],
2021 )]
2022 .into_iter()
2023 .collect(),
2024 ),
2025 ..Default::default()
2026 },
2027 },
2028 )
2029 .await
2030 .unwrap();
2031 Ok(Some(json!(null)))
2032 }
2033 }
2034 })
2035 .next()
2036 .await;
2037
2038 // Applying the code action returns a project transaction containing the edits
2039 // sent by the language server in its `workspaceEdit` request.
2040 let transaction = apply.await.unwrap();
2041 assert!(transaction.0.contains_key(&buffer));
2042 buffer.update(cx, |buffer, cx| {
2043 assert_eq!(buffer.text(), "Xa");
2044 buffer.undo(cx);
2045 assert_eq!(buffer.text(), "a");
2046 });
2047}
2048
2049#[gpui::test]
2050async fn test_save_file(cx: &mut gpui::TestAppContext) {
2051 let fs = FakeFs::new(cx.background());
2052 fs.insert_tree(
2053 "/dir",
2054 json!({
2055 "file1": "the old contents",
2056 }),
2057 )
2058 .await;
2059
2060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2061 let buffer = project
2062 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2063 .await
2064 .unwrap();
2065 buffer
2066 .update(cx, |buffer, cx| {
2067 assert_eq!(buffer.text(), "the old contents");
2068 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2069 buffer.save(cx)
2070 })
2071 .await
2072 .unwrap();
2073
2074 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2075 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2076}
2077
2078#[gpui::test]
2079async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2080 let fs = FakeFs::new(cx.background());
2081 fs.insert_tree(
2082 "/dir",
2083 json!({
2084 "file1": "the old contents",
2085 }),
2086 )
2087 .await;
2088
2089 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2090 let buffer = project
2091 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2092 .await
2093 .unwrap();
2094 buffer
2095 .update(cx, |buffer, cx| {
2096 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2097 buffer.save(cx)
2098 })
2099 .await
2100 .unwrap();
2101
2102 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2103 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2104}
2105
2106#[gpui::test]
2107async fn test_save_as(cx: &mut gpui::TestAppContext) {
2108 let fs = FakeFs::new(cx.background());
2109 fs.insert_tree("/dir", json!({})).await;
2110
2111 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2112 let buffer = project.update(cx, |project, cx| {
2113 project.create_buffer("", None, cx).unwrap()
2114 });
2115 buffer.update(cx, |buffer, cx| {
2116 buffer.edit([(0..0, "abc")], cx);
2117 assert!(buffer.is_dirty());
2118 assert!(!buffer.has_conflict());
2119 });
2120 project
2121 .update(cx, |project, cx| {
2122 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2123 })
2124 .await
2125 .unwrap();
2126 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2127 buffer.read_with(cx, |buffer, cx| {
2128 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2129 assert!(!buffer.is_dirty());
2130 assert!(!buffer.has_conflict());
2131 });
2132
2133 let opened_buffer = project
2134 .update(cx, |project, cx| {
2135 project.open_local_buffer("/dir/file1", cx)
2136 })
2137 .await
2138 .unwrap();
2139 assert_eq!(opened_buffer, buffer);
2140}
2141
2142#[gpui::test(retries = 5)]
2143async fn test_rescan_and_remote_updates(
2144 deterministic: Arc<Deterministic>,
2145 cx: &mut gpui::TestAppContext,
2146) {
2147 let dir = temp_tree(json!({
2148 "a": {
2149 "file1": "",
2150 "file2": "",
2151 "file3": "",
2152 },
2153 "b": {
2154 "c": {
2155 "file4": "",
2156 "file5": "",
2157 }
2158 }
2159 }));
2160
2161 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2162 let rpc = project.read_with(cx, |p, _| p.client.clone());
2163
2164 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2165 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2166 async move { buffer.await.unwrap() }
2167 };
2168 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2169 project.read_with(cx, |project, cx| {
2170 let tree = project.worktrees(cx).next().unwrap();
2171 tree.read(cx)
2172 .entry_for_path(path)
2173 .expect(&format!("no entry for path {}", path))
2174 .id
2175 })
2176 };
2177
2178 let buffer2 = buffer_for_path("a/file2", cx).await;
2179 let buffer3 = buffer_for_path("a/file3", cx).await;
2180 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2181 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2182
2183 let file2_id = id_for_path("a/file2", &cx);
2184 let file3_id = id_for_path("a/file3", &cx);
2185 let file4_id = id_for_path("b/c/file4", &cx);
2186
2187 // Create a remote copy of this worktree.
2188 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2189 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2190 let remote = cx.update(|cx| {
2191 Worktree::remote(
2192 1,
2193 1,
2194 proto::WorktreeMetadata {
2195 id: initial_snapshot.id().to_proto(),
2196 root_name: initial_snapshot.root_name().into(),
2197 visible: true,
2198 },
2199 rpc.clone(),
2200 cx,
2201 )
2202 });
2203 remote.update(cx, |remote, _| {
2204 let update = initial_snapshot.build_initial_update(1);
2205 remote.as_remote_mut().unwrap().update_from_remote(update);
2206 });
2207 deterministic.run_until_parked();
2208
2209 cx.read(|cx| {
2210 assert!(!buffer2.read(cx).is_dirty());
2211 assert!(!buffer3.read(cx).is_dirty());
2212 assert!(!buffer4.read(cx).is_dirty());
2213 assert!(!buffer5.read(cx).is_dirty());
2214 });
2215
2216 // Rename and delete files and directories.
2217 tree.flush_fs_events(&cx).await;
2218 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2219 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2220 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2221 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2222 tree.flush_fs_events(&cx).await;
2223
2224 let expected_paths = vec![
2225 "a",
2226 "a/file1",
2227 "a/file2.new",
2228 "b",
2229 "d",
2230 "d/file3",
2231 "d/file4",
2232 ];
2233
2234 cx.read(|app| {
2235 assert_eq!(
2236 tree.read(app)
2237 .paths()
2238 .map(|p| p.to_str().unwrap())
2239 .collect::<Vec<_>>(),
2240 expected_paths
2241 );
2242
2243 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2244 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2245 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2246
2247 assert_eq!(
2248 buffer2.read(app).file().unwrap().path().as_ref(),
2249 Path::new("a/file2.new")
2250 );
2251 assert_eq!(
2252 buffer3.read(app).file().unwrap().path().as_ref(),
2253 Path::new("d/file3")
2254 );
2255 assert_eq!(
2256 buffer4.read(app).file().unwrap().path().as_ref(),
2257 Path::new("d/file4")
2258 );
2259 assert_eq!(
2260 buffer5.read(app).file().unwrap().path().as_ref(),
2261 Path::new("b/c/file5")
2262 );
2263
2264 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2265 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2266 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2267 assert!(buffer5.read(app).file().unwrap().is_deleted());
2268 });
2269
2270 // Update the remote worktree. Check that it becomes consistent with the
2271 // local worktree.
2272 remote.update(cx, |remote, cx| {
2273 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2274 &initial_snapshot,
2275 1,
2276 1,
2277 true,
2278 );
2279 remote.as_remote_mut().unwrap().update_from_remote(update);
2280 });
2281 deterministic.run_until_parked();
2282 remote.read_with(cx, |remote, _| {
2283 assert_eq!(
2284 remote
2285 .paths()
2286 .map(|p| p.to_str().unwrap())
2287 .collect::<Vec<_>>(),
2288 expected_paths
2289 );
2290 });
2291}
2292
2293#[gpui::test]
2294async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2295 let fs = FakeFs::new(cx.background());
2296 fs.insert_tree(
2297 "/dir",
2298 json!({
2299 "a.txt": "a-contents",
2300 "b.txt": "b-contents",
2301 }),
2302 )
2303 .await;
2304
2305 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2306
2307 // Spawn multiple tasks to open paths, repeating some paths.
2308 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2309 (
2310 p.open_local_buffer("/dir/a.txt", cx),
2311 p.open_local_buffer("/dir/b.txt", cx),
2312 p.open_local_buffer("/dir/a.txt", cx),
2313 )
2314 });
2315
2316 let buffer_a_1 = buffer_a_1.await.unwrap();
2317 let buffer_a_2 = buffer_a_2.await.unwrap();
2318 let buffer_b = buffer_b.await.unwrap();
2319 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2320 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2321
2322 // There is only one buffer per path.
2323 let buffer_a_id = buffer_a_1.id();
2324 assert_eq!(buffer_a_2.id(), buffer_a_id);
2325
2326 // Open the same path again while it is still open.
2327 drop(buffer_a_1);
2328 let buffer_a_3 = project
2329 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2330 .await
2331 .unwrap();
2332
2333 // There's still only one buffer per path.
2334 assert_eq!(buffer_a_3.id(), buffer_a_id);
2335}
2336
2337#[gpui::test]
2338async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2339 let fs = FakeFs::new(cx.background());
2340 fs.insert_tree(
2341 "/dir",
2342 json!({
2343 "file1": "abc",
2344 "file2": "def",
2345 "file3": "ghi",
2346 }),
2347 )
2348 .await;
2349
2350 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2351
2352 let buffer1 = project
2353 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2354 .await
2355 .unwrap();
2356 let events = Rc::new(RefCell::new(Vec::new()));
2357
2358 // initially, the buffer isn't dirty.
2359 buffer1.update(cx, |buffer, cx| {
2360 cx.subscribe(&buffer1, {
2361 let events = events.clone();
2362 move |_, _, event, _| match event {
2363 BufferEvent::Operation(_) => {}
2364 _ => events.borrow_mut().push(event.clone()),
2365 }
2366 })
2367 .detach();
2368
2369 assert!(!buffer.is_dirty());
2370 assert!(events.borrow().is_empty());
2371
2372 buffer.edit([(1..2, "")], cx);
2373 });
2374
2375 // after the first edit, the buffer is dirty, and emits a dirtied event.
2376 buffer1.update(cx, |buffer, cx| {
2377 assert!(buffer.text() == "ac");
2378 assert!(buffer.is_dirty());
2379 assert_eq!(
2380 *events.borrow(),
2381 &[language::Event::Edited, language::Event::DirtyChanged]
2382 );
2383 events.borrow_mut().clear();
2384 buffer.did_save(
2385 buffer.version(),
2386 buffer.as_rope().fingerprint(),
2387 buffer.file().unwrap().mtime(),
2388 None,
2389 cx,
2390 );
2391 });
2392
2393 // after saving, the buffer is not dirty, and emits a saved event.
2394 buffer1.update(cx, |buffer, cx| {
2395 assert!(!buffer.is_dirty());
2396 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2397 events.borrow_mut().clear();
2398
2399 buffer.edit([(1..1, "B")], cx);
2400 buffer.edit([(2..2, "D")], cx);
2401 });
2402
2403 // after editing again, the buffer is dirty, and emits another dirty event.
2404 buffer1.update(cx, |buffer, cx| {
2405 assert!(buffer.text() == "aBDc");
2406 assert!(buffer.is_dirty());
2407 assert_eq!(
2408 *events.borrow(),
2409 &[
2410 language::Event::Edited,
2411 language::Event::DirtyChanged,
2412 language::Event::Edited,
2413 ],
2414 );
2415 events.borrow_mut().clear();
2416
2417 // After restoring the buffer to its previously-saved state,
2418 // the buffer is not considered dirty anymore.
2419 buffer.edit([(1..3, "")], cx);
2420 assert!(buffer.text() == "ac");
2421 assert!(!buffer.is_dirty());
2422 });
2423
2424 assert_eq!(
2425 *events.borrow(),
2426 &[language::Event::Edited, language::Event::DirtyChanged]
2427 );
2428
2429 // When a file is deleted, the buffer is considered dirty.
2430 let events = Rc::new(RefCell::new(Vec::new()));
2431 let buffer2 = project
2432 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2433 .await
2434 .unwrap();
2435 buffer2.update(cx, |_, cx| {
2436 cx.subscribe(&buffer2, {
2437 let events = events.clone();
2438 move |_, _, event, _| events.borrow_mut().push(event.clone())
2439 })
2440 .detach();
2441 });
2442
2443 fs.remove_file("/dir/file2".as_ref(), Default::default())
2444 .await
2445 .unwrap();
2446 cx.foreground().run_until_parked();
2447 assert_eq!(
2448 *events.borrow(),
2449 &[
2450 language::Event::DirtyChanged,
2451 language::Event::FileHandleChanged
2452 ]
2453 );
2454
2455 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2456 let events = Rc::new(RefCell::new(Vec::new()));
2457 let buffer3 = project
2458 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2459 .await
2460 .unwrap();
2461 buffer3.update(cx, |_, cx| {
2462 cx.subscribe(&buffer3, {
2463 let events = events.clone();
2464 move |_, _, event, _| events.borrow_mut().push(event.clone())
2465 })
2466 .detach();
2467 });
2468
2469 buffer3.update(cx, |buffer, cx| {
2470 buffer.edit([(0..0, "x")], cx);
2471 });
2472 events.borrow_mut().clear();
2473 fs.remove_file("/dir/file3".as_ref(), Default::default())
2474 .await
2475 .unwrap();
2476 cx.foreground().run_until_parked();
2477 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2478 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2479}
2480
2481#[gpui::test]
2482async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2483 let initial_contents = "aaa\nbbbbb\nc\n";
2484 let fs = FakeFs::new(cx.background());
2485 fs.insert_tree(
2486 "/dir",
2487 json!({
2488 "the-file": initial_contents,
2489 }),
2490 )
2491 .await;
2492 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2493 let buffer = project
2494 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2495 .await
2496 .unwrap();
2497
2498 let anchors = (0..3)
2499 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2500 .collect::<Vec<_>>();
2501
2502 // Change the file on disk, adding two new lines of text, and removing
2503 // one line.
2504 buffer.read_with(cx, |buffer, _| {
2505 assert!(!buffer.is_dirty());
2506 assert!(!buffer.has_conflict());
2507 });
2508 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2509 fs.save(
2510 "/dir/the-file".as_ref(),
2511 &new_contents.into(),
2512 LineEnding::Unix,
2513 )
2514 .await
2515 .unwrap();
2516
2517 // Because the buffer was not modified, it is reloaded from disk. Its
2518 // contents are edited according to the diff between the old and new
2519 // file contents.
2520 cx.foreground().run_until_parked();
2521 buffer.update(cx, |buffer, _| {
2522 assert_eq!(buffer.text(), new_contents);
2523 assert!(!buffer.is_dirty());
2524 assert!(!buffer.has_conflict());
2525
2526 let anchor_positions = anchors
2527 .iter()
2528 .map(|anchor| anchor.to_point(&*buffer))
2529 .collect::<Vec<_>>();
2530 assert_eq!(
2531 anchor_positions,
2532 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2533 );
2534 });
2535
2536 // Modify the buffer
2537 buffer.update(cx, |buffer, cx| {
2538 buffer.edit([(0..0, " ")], cx);
2539 assert!(buffer.is_dirty());
2540 assert!(!buffer.has_conflict());
2541 });
2542
2543 // Change the file on disk again, adding blank lines to the beginning.
2544 fs.save(
2545 "/dir/the-file".as_ref(),
2546 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2547 LineEnding::Unix,
2548 )
2549 .await
2550 .unwrap();
2551
2552 // Because the buffer is modified, it doesn't reload from disk, but is
2553 // marked as having a conflict.
2554 cx.foreground().run_until_parked();
2555 buffer.read_with(cx, |buffer, _| {
2556 assert!(buffer.has_conflict());
2557 });
2558}
2559
2560#[gpui::test]
2561async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2562 let fs = FakeFs::new(cx.background());
2563 fs.insert_tree(
2564 "/dir",
2565 json!({
2566 "file1": "a\nb\nc\n",
2567 "file2": "one\r\ntwo\r\nthree\r\n",
2568 }),
2569 )
2570 .await;
2571
2572 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2573 let buffer1 = project
2574 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2575 .await
2576 .unwrap();
2577 let buffer2 = project
2578 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2579 .await
2580 .unwrap();
2581
2582 buffer1.read_with(cx, |buffer, _| {
2583 assert_eq!(buffer.text(), "a\nb\nc\n");
2584 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2585 });
2586 buffer2.read_with(cx, |buffer, _| {
2587 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2588 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2589 });
2590
2591 // Change a file's line endings on disk from unix to windows. The buffer's
2592 // state updates correctly.
2593 fs.save(
2594 "/dir/file1".as_ref(),
2595 &"aaa\nb\nc\n".into(),
2596 LineEnding::Windows,
2597 )
2598 .await
2599 .unwrap();
2600 cx.foreground().run_until_parked();
2601 buffer1.read_with(cx, |buffer, _| {
2602 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2603 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2604 });
2605
2606 // Save a file with windows line endings. The file is written correctly.
2607 buffer2
2608 .update(cx, |buffer, cx| {
2609 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2610 buffer.save(cx)
2611 })
2612 .await
2613 .unwrap();
2614 assert_eq!(
2615 fs.load("/dir/file2".as_ref()).await.unwrap(),
2616 "one\r\ntwo\r\nthree\r\nfour\r\n",
2617 );
2618}
2619
2620#[gpui::test]
2621async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2622 cx.foreground().forbid_parking();
2623
2624 let fs = FakeFs::new(cx.background());
2625 fs.insert_tree(
2626 "/the-dir",
2627 json!({
2628 "a.rs": "
2629 fn foo(mut v: Vec<usize>) {
2630 for x in &v {
2631 v.push(1);
2632 }
2633 }
2634 "
2635 .unindent(),
2636 }),
2637 )
2638 .await;
2639
2640 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2641 let buffer = project
2642 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2643 .await
2644 .unwrap();
2645
2646 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2647 let message = lsp::PublishDiagnosticsParams {
2648 uri: buffer_uri.clone(),
2649 diagnostics: vec![
2650 lsp::Diagnostic {
2651 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2652 severity: Some(DiagnosticSeverity::WARNING),
2653 message: "error 1".to_string(),
2654 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2655 location: lsp::Location {
2656 uri: buffer_uri.clone(),
2657 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2658 },
2659 message: "error 1 hint 1".to_string(),
2660 }]),
2661 ..Default::default()
2662 },
2663 lsp::Diagnostic {
2664 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2665 severity: Some(DiagnosticSeverity::HINT),
2666 message: "error 1 hint 1".to_string(),
2667 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2668 location: lsp::Location {
2669 uri: buffer_uri.clone(),
2670 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2671 },
2672 message: "original diagnostic".to_string(),
2673 }]),
2674 ..Default::default()
2675 },
2676 lsp::Diagnostic {
2677 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2678 severity: Some(DiagnosticSeverity::ERROR),
2679 message: "error 2".to_string(),
2680 related_information: Some(vec![
2681 lsp::DiagnosticRelatedInformation {
2682 location: lsp::Location {
2683 uri: buffer_uri.clone(),
2684 range: lsp::Range::new(
2685 lsp::Position::new(1, 13),
2686 lsp::Position::new(1, 15),
2687 ),
2688 },
2689 message: "error 2 hint 1".to_string(),
2690 },
2691 lsp::DiagnosticRelatedInformation {
2692 location: lsp::Location {
2693 uri: buffer_uri.clone(),
2694 range: lsp::Range::new(
2695 lsp::Position::new(1, 13),
2696 lsp::Position::new(1, 15),
2697 ),
2698 },
2699 message: "error 2 hint 2".to_string(),
2700 },
2701 ]),
2702 ..Default::default()
2703 },
2704 lsp::Diagnostic {
2705 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2706 severity: Some(DiagnosticSeverity::HINT),
2707 message: "error 2 hint 1".to_string(),
2708 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2709 location: lsp::Location {
2710 uri: buffer_uri.clone(),
2711 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2712 },
2713 message: "original diagnostic".to_string(),
2714 }]),
2715 ..Default::default()
2716 },
2717 lsp::Diagnostic {
2718 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2719 severity: Some(DiagnosticSeverity::HINT),
2720 message: "error 2 hint 2".to_string(),
2721 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2722 location: lsp::Location {
2723 uri: buffer_uri.clone(),
2724 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2725 },
2726 message: "original diagnostic".to_string(),
2727 }]),
2728 ..Default::default()
2729 },
2730 ],
2731 version: None,
2732 };
2733
2734 project
2735 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2736 .unwrap();
2737 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2738
2739 assert_eq!(
2740 buffer
2741 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2742 .collect::<Vec<_>>(),
2743 &[
2744 DiagnosticEntry {
2745 range: Point::new(1, 8)..Point::new(1, 9),
2746 diagnostic: Diagnostic {
2747 severity: DiagnosticSeverity::WARNING,
2748 message: "error 1".to_string(),
2749 group_id: 0,
2750 is_primary: true,
2751 ..Default::default()
2752 }
2753 },
2754 DiagnosticEntry {
2755 range: Point::new(1, 8)..Point::new(1, 9),
2756 diagnostic: Diagnostic {
2757 severity: DiagnosticSeverity::HINT,
2758 message: "error 1 hint 1".to_string(),
2759 group_id: 0,
2760 is_primary: false,
2761 ..Default::default()
2762 }
2763 },
2764 DiagnosticEntry {
2765 range: Point::new(1, 13)..Point::new(1, 15),
2766 diagnostic: Diagnostic {
2767 severity: DiagnosticSeverity::HINT,
2768 message: "error 2 hint 1".to_string(),
2769 group_id: 1,
2770 is_primary: false,
2771 ..Default::default()
2772 }
2773 },
2774 DiagnosticEntry {
2775 range: Point::new(1, 13)..Point::new(1, 15),
2776 diagnostic: Diagnostic {
2777 severity: DiagnosticSeverity::HINT,
2778 message: "error 2 hint 2".to_string(),
2779 group_id: 1,
2780 is_primary: false,
2781 ..Default::default()
2782 }
2783 },
2784 DiagnosticEntry {
2785 range: Point::new(2, 8)..Point::new(2, 17),
2786 diagnostic: Diagnostic {
2787 severity: DiagnosticSeverity::ERROR,
2788 message: "error 2".to_string(),
2789 group_id: 1,
2790 is_primary: true,
2791 ..Default::default()
2792 }
2793 }
2794 ]
2795 );
2796
2797 assert_eq!(
2798 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2799 &[
2800 DiagnosticEntry {
2801 range: Point::new(1, 8)..Point::new(1, 9),
2802 diagnostic: Diagnostic {
2803 severity: DiagnosticSeverity::WARNING,
2804 message: "error 1".to_string(),
2805 group_id: 0,
2806 is_primary: true,
2807 ..Default::default()
2808 }
2809 },
2810 DiagnosticEntry {
2811 range: Point::new(1, 8)..Point::new(1, 9),
2812 diagnostic: Diagnostic {
2813 severity: DiagnosticSeverity::HINT,
2814 message: "error 1 hint 1".to_string(),
2815 group_id: 0,
2816 is_primary: false,
2817 ..Default::default()
2818 }
2819 },
2820 ]
2821 );
2822 assert_eq!(
2823 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2824 &[
2825 DiagnosticEntry {
2826 range: Point::new(1, 13)..Point::new(1, 15),
2827 diagnostic: Diagnostic {
2828 severity: DiagnosticSeverity::HINT,
2829 message: "error 2 hint 1".to_string(),
2830 group_id: 1,
2831 is_primary: false,
2832 ..Default::default()
2833 }
2834 },
2835 DiagnosticEntry {
2836 range: Point::new(1, 13)..Point::new(1, 15),
2837 diagnostic: Diagnostic {
2838 severity: DiagnosticSeverity::HINT,
2839 message: "error 2 hint 2".to_string(),
2840 group_id: 1,
2841 is_primary: false,
2842 ..Default::default()
2843 }
2844 },
2845 DiagnosticEntry {
2846 range: Point::new(2, 8)..Point::new(2, 17),
2847 diagnostic: Diagnostic {
2848 severity: DiagnosticSeverity::ERROR,
2849 message: "error 2".to_string(),
2850 group_id: 1,
2851 is_primary: true,
2852 ..Default::default()
2853 }
2854 }
2855 ]
2856 );
2857}
2858
2859#[gpui::test]
2860async fn test_rename(cx: &mut gpui::TestAppContext) {
2861 cx.foreground().forbid_parking();
2862
2863 let mut language = Language::new(
2864 LanguageConfig {
2865 name: "Rust".into(),
2866 path_suffixes: vec!["rs".to_string()],
2867 ..Default::default()
2868 },
2869 Some(tree_sitter_rust::language()),
2870 );
2871 let mut fake_servers = language
2872 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2873 capabilities: lsp::ServerCapabilities {
2874 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2875 prepare_provider: Some(true),
2876 work_done_progress_options: Default::default(),
2877 })),
2878 ..Default::default()
2879 },
2880 ..Default::default()
2881 }))
2882 .await;
2883
2884 let fs = FakeFs::new(cx.background());
2885 fs.insert_tree(
2886 "/dir",
2887 json!({
2888 "one.rs": "const ONE: usize = 1;",
2889 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2890 }),
2891 )
2892 .await;
2893
2894 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2895 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2896 let buffer = project
2897 .update(cx, |project, cx| {
2898 project.open_local_buffer("/dir/one.rs", cx)
2899 })
2900 .await
2901 .unwrap();
2902
2903 let fake_server = fake_servers.next().await.unwrap();
2904
2905 let response = project.update(cx, |project, cx| {
2906 project.prepare_rename(buffer.clone(), 7, cx)
2907 });
2908 fake_server
2909 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2910 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2911 assert_eq!(params.position, lsp::Position::new(0, 7));
2912 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2913 lsp::Position::new(0, 6),
2914 lsp::Position::new(0, 9),
2915 ))))
2916 })
2917 .next()
2918 .await
2919 .unwrap();
2920 let range = response.await.unwrap().unwrap();
2921 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2922 assert_eq!(range, 6..9);
2923
2924 let response = project.update(cx, |project, cx| {
2925 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2926 });
2927 fake_server
2928 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2929 assert_eq!(
2930 params.text_document_position.text_document.uri.as_str(),
2931 "file:///dir/one.rs"
2932 );
2933 assert_eq!(
2934 params.text_document_position.position,
2935 lsp::Position::new(0, 7)
2936 );
2937 assert_eq!(params.new_name, "THREE");
2938 Ok(Some(lsp::WorkspaceEdit {
2939 changes: Some(
2940 [
2941 (
2942 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2943 vec![lsp::TextEdit::new(
2944 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2945 "THREE".to_string(),
2946 )],
2947 ),
2948 (
2949 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2950 vec![
2951 lsp::TextEdit::new(
2952 lsp::Range::new(
2953 lsp::Position::new(0, 24),
2954 lsp::Position::new(0, 27),
2955 ),
2956 "THREE".to_string(),
2957 ),
2958 lsp::TextEdit::new(
2959 lsp::Range::new(
2960 lsp::Position::new(0, 35),
2961 lsp::Position::new(0, 38),
2962 ),
2963 "THREE".to_string(),
2964 ),
2965 ],
2966 ),
2967 ]
2968 .into_iter()
2969 .collect(),
2970 ),
2971 ..Default::default()
2972 }))
2973 })
2974 .next()
2975 .await
2976 .unwrap();
2977 let mut transaction = response.await.unwrap().0;
2978 assert_eq!(transaction.len(), 2);
2979 assert_eq!(
2980 transaction
2981 .remove_entry(&buffer)
2982 .unwrap()
2983 .0
2984 .read_with(cx, |buffer, _| buffer.text()),
2985 "const THREE: usize = 1;"
2986 );
2987 assert_eq!(
2988 transaction
2989 .into_keys()
2990 .next()
2991 .unwrap()
2992 .read_with(cx, |buffer, _| buffer.text()),
2993 "const TWO: usize = one::THREE + one::THREE;"
2994 );
2995}
2996
2997#[gpui::test]
2998async fn test_search(cx: &mut gpui::TestAppContext) {
2999 let fs = FakeFs::new(cx.background());
3000 fs.insert_tree(
3001 "/dir",
3002 json!({
3003 "one.rs": "const ONE: usize = 1;",
3004 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3005 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3006 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3007 }),
3008 )
3009 .await;
3010 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3011 assert_eq!(
3012 search(&project, SearchQuery::text("TWO", false, true), cx)
3013 .await
3014 .unwrap(),
3015 HashMap::from_iter([
3016 ("two.rs".to_string(), vec![6..9]),
3017 ("three.rs".to_string(), vec![37..40])
3018 ])
3019 );
3020
3021 let buffer_4 = project
3022 .update(cx, |project, cx| {
3023 project.open_local_buffer("/dir/four.rs", cx)
3024 })
3025 .await
3026 .unwrap();
3027 buffer_4.update(cx, |buffer, cx| {
3028 let text = "two::TWO";
3029 buffer.edit([(20..28, text), (31..43, text)], cx);
3030 });
3031
3032 assert_eq!(
3033 search(&project, SearchQuery::text("TWO", false, true), cx)
3034 .await
3035 .unwrap(),
3036 HashMap::from_iter([
3037 ("two.rs".to_string(), vec![6..9]),
3038 ("three.rs".to_string(), vec![37..40]),
3039 ("four.rs".to_string(), vec![25..28, 36..39])
3040 ])
3041 );
3042
3043 async fn search(
3044 project: &ModelHandle<Project>,
3045 query: SearchQuery,
3046 cx: &mut gpui::TestAppContext,
3047 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3048 let results = project
3049 .update(cx, |project, cx| project.search(query, cx))
3050 .await?;
3051
3052 Ok(results
3053 .into_iter()
3054 .map(|(buffer, ranges)| {
3055 buffer.read_with(cx, |buffer, _| {
3056 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3057 let ranges = ranges
3058 .into_iter()
3059 .map(|range| range.to_offset(buffer))
3060 .collect::<Vec<_>>();
3061 (path, ranges)
3062 })
3063 })
3064 .collect())
3065 }
3066}