1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50
51 let cancel_flag = Default::default();
52 let results = project
53 .read_with(cx, |project, cx| {
54 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
55 })
56 .await;
57 assert_eq!(
58 results
59 .into_iter()
60 .map(|result| result.path)
61 .collect::<Vec<Arc<Path>>>(),
62 vec![
63 PathBuf::from("banana/carrot/date").into(),
64 PathBuf::from("banana/carrot/endive").into(),
65 ]
66 );
67}
68
69#[gpui::test]
70async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
71 cx.foreground().forbid_parking();
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
90 name: "the-rust-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 });
100 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
101 name: "the-json-language-server",
102 capabilities: lsp::ServerCapabilities {
103 completion_provider: Some(lsp::CompletionOptions {
104 trigger_characters: Some(vec![":".to_string()]),
105 ..Default::default()
106 }),
107 ..Default::default()
108 },
109 ..Default::default()
110 });
111
112 let fs = FakeFs::new(cx.background());
113 fs.insert_tree(
114 "/the-root",
115 json!({
116 "test.rs": "const A: i32 = 1;",
117 "test2.rs": "",
118 "Cargo.toml": "a = 1",
119 "package.json": "{\"a\": 1}",
120 }),
121 )
122 .await;
123
124 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
125 project.update(cx, |project, _| {
126 project.languages.add(Arc::new(rust_language));
127 project.languages.add(Arc::new(json_language));
128 });
129
130 // Open a buffer without an associated language server.
131 let toml_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/Cargo.toml", cx)
134 })
135 .await
136 .unwrap();
137
138 // Open a buffer with an associated language server.
139 let rust_buffer = project
140 .update(cx, |project, cx| {
141 project.open_local_buffer("/the-root/test.rs", cx)
142 })
143 .await
144 .unwrap();
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
231 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
232 assert_eq!(
233 fake_rust_server
234 .receive_notification::<lsp::notification::DidChangeTextDocument>()
235 .await
236 .text_document,
237 lsp::VersionedTextDocumentIdentifier::new(
238 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
239 1
240 )
241 );
242
243 // Save notifications are reported to all servers.
244 toml_buffer
245 .update(cx, |buffer, cx| buffer.save(cx))
246 .await
247 .unwrap();
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidSaveTextDocument>()
251 .await
252 .text_document,
253 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
254 );
255 assert_eq!(
256 fake_json_server
257 .receive_notification::<lsp::notification::DidSaveTextDocument>()
258 .await
259 .text_document,
260 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
261 );
262
263 // Renames are reported only to servers matching the buffer's language.
264 fs.rename(
265 Path::new("/the-root/test2.rs"),
266 Path::new("/the-root/test3.rs"),
267 Default::default(),
268 )
269 .await
270 .unwrap();
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidCloseTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
277 );
278 assert_eq!(
279 fake_rust_server
280 .receive_notification::<lsp::notification::DidOpenTextDocument>()
281 .await
282 .text_document,
283 lsp::TextDocumentItem {
284 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
285 version: 0,
286 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
287 language_id: Default::default()
288 },
289 );
290
291 rust_buffer2.update(cx, |buffer, cx| {
292 buffer.update_diagnostics(
293 DiagnosticSet::from_sorted_entries(
294 vec![DiagnosticEntry {
295 diagnostic: Default::default(),
296 range: Anchor::MIN..Anchor::MAX,
297 }],
298 &buffer.snapshot(),
299 ),
300 cx,
301 );
302 assert_eq!(
303 buffer
304 .snapshot()
305 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
306 .count(),
307 1
308 );
309 });
310
311 // When the rename changes the extension of the file, the buffer gets closed on the old
312 // language server and gets opened on the new one.
313 fs.rename(
314 Path::new("/the-root/test3.rs"),
315 Path::new("/the-root/test3.json"),
316 Default::default(),
317 )
318 .await
319 .unwrap();
320 assert_eq!(
321 fake_rust_server
322 .receive_notification::<lsp::notification::DidCloseTextDocument>()
323 .await
324 .text_document,
325 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
326 );
327 assert_eq!(
328 fake_json_server
329 .receive_notification::<lsp::notification::DidOpenTextDocument>()
330 .await
331 .text_document,
332 lsp::TextDocumentItem {
333 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
334 version: 0,
335 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
336 language_id: Default::default()
337 },
338 );
339
340 // We clear the diagnostics, since the language has changed.
341 rust_buffer2.read_with(cx, |buffer, _| {
342 assert_eq!(
343 buffer
344 .snapshot()
345 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
346 .count(),
347 0
348 );
349 });
350
351 // The renamed file's version resets after changing language server.
352 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
353 assert_eq!(
354 fake_json_server
355 .receive_notification::<lsp::notification::DidChangeTextDocument>()
356 .await
357 .text_document,
358 lsp::VersionedTextDocumentIdentifier::new(
359 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
360 1
361 )
362 );
363
364 // Restart language servers
365 project.update(cx, |project, cx| {
366 project.restart_language_servers_for_buffers(
367 vec![rust_buffer.clone(), json_buffer.clone()],
368 cx,
369 );
370 });
371
372 let mut rust_shutdown_requests = fake_rust_server
373 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
374 let mut json_shutdown_requests = fake_json_server
375 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
376 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
377
378 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
379 let mut fake_json_server = fake_json_servers.next().await.unwrap();
380
381 // Ensure rust document is reopened in new rust language server
382 assert_eq!(
383 fake_rust_server
384 .receive_notification::<lsp::notification::DidOpenTextDocument>()
385 .await
386 .text_document,
387 lsp::TextDocumentItem {
388 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
389 version: 1,
390 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
391 language_id: Default::default()
392 }
393 );
394
395 // Ensure json documents are reopened in new json language server
396 assert_set_eq!(
397 [
398 fake_json_server
399 .receive_notification::<lsp::notification::DidOpenTextDocument>()
400 .await
401 .text_document,
402 fake_json_server
403 .receive_notification::<lsp::notification::DidOpenTextDocument>()
404 .await
405 .text_document,
406 ],
407 [
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
410 version: 0,
411 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 lsp::TextDocumentItem {
415 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
416 version: 1,
417 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
418 language_id: Default::default()
419 }
420 ]
421 );
422
423 // Close notifications are reported only to servers matching the buffer's language.
424 cx.update(|_| drop(json_buffer));
425 let close_message = lsp::DidCloseTextDocumentParams {
426 text_document: lsp::TextDocumentIdentifier::new(
427 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
428 ),
429 };
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidCloseTextDocument>()
433 .await,
434 close_message,
435 );
436}
437
438#[gpui::test]
439async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
440 cx.foreground().forbid_parking();
441
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/dir",
445 json!({
446 "a.rs": "let a = 1;",
447 "b.rs": "let b = 2;"
448 }),
449 )
450 .await;
451
452 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
453
454 let buffer_a = project
455 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
456 .await
457 .unwrap();
458 let buffer_b = project
459 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
460 .await
461 .unwrap();
462
463 project.update(cx, |project, cx| {
464 project
465 .update_diagnostics(
466 0,
467 lsp::PublishDiagnosticsParams {
468 uri: Url::from_file_path("/dir/a.rs").unwrap(),
469 version: None,
470 diagnostics: vec![lsp::Diagnostic {
471 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
472 severity: Some(lsp::DiagnosticSeverity::ERROR),
473 message: "error 1".to_string(),
474 ..Default::default()
475 }],
476 },
477 &[],
478 cx,
479 )
480 .unwrap();
481 project
482 .update_diagnostics(
483 0,
484 lsp::PublishDiagnosticsParams {
485 uri: Url::from_file_path("/dir/b.rs").unwrap(),
486 version: None,
487 diagnostics: vec![lsp::Diagnostic {
488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
489 severity: Some(lsp::DiagnosticSeverity::WARNING),
490 message: "error 2".to_string(),
491 ..Default::default()
492 }],
493 },
494 &[],
495 cx,
496 )
497 .unwrap();
498 });
499
500 buffer_a.read_with(cx, |buffer, _| {
501 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
502 assert_eq!(
503 chunks
504 .iter()
505 .map(|(s, d)| (s.as_str(), *d))
506 .collect::<Vec<_>>(),
507 &[
508 ("let ", None),
509 ("a", Some(DiagnosticSeverity::ERROR)),
510 (" = 1;", None),
511 ]
512 );
513 });
514 buffer_b.read_with(cx, |buffer, _| {
515 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
516 assert_eq!(
517 chunks
518 .iter()
519 .map(|(s, d)| (s.as_str(), *d))
520 .collect::<Vec<_>>(),
521 &[
522 ("let ", None),
523 ("b", Some(DiagnosticSeverity::WARNING)),
524 (" = 2;", None),
525 ]
526 );
527 });
528}
529
530#[gpui::test]
531async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
532 cx.foreground().forbid_parking();
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/root",
537 json!({
538 "dir": {
539 "a.rs": "let a = 1;",
540 },
541 "other.rs": "let b = c;"
542 }),
543 )
544 .await;
545
546 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
547
548 let (worktree, _) = project
549 .update(cx, |project, cx| {
550 project.find_or_create_local_worktree("/root/other.rs", false, cx)
551 })
552 .await
553 .unwrap();
554 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
555
556 project.update(cx, |project, cx| {
557 project
558 .update_diagnostics(
559 0,
560 lsp::PublishDiagnosticsParams {
561 uri: Url::from_file_path("/root/other.rs").unwrap(),
562 version: None,
563 diagnostics: vec![lsp::Diagnostic {
564 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
565 severity: Some(lsp::DiagnosticSeverity::ERROR),
566 message: "unknown variable 'c'".to_string(),
567 ..Default::default()
568 }],
569 },
570 &[],
571 cx,
572 )
573 .unwrap();
574 });
575
576 let buffer = project
577 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
578 .await
579 .unwrap();
580 buffer.read_with(cx, |buffer, _| {
581 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
582 assert_eq!(
583 chunks
584 .iter()
585 .map(|(s, d)| (s.as_str(), *d))
586 .collect::<Vec<_>>(),
587 &[
588 ("let b = ", None),
589 ("c", Some(DiagnosticSeverity::ERROR)),
590 (";", None),
591 ]
592 );
593 });
594
595 project.read_with(cx, |project, cx| {
596 assert_eq!(project.diagnostic_summaries(cx).next(), None);
597 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
598 });
599}
600
601#[gpui::test]
602async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
603 cx.foreground().forbid_parking();
604
605 let progress_token = "the-progress-token";
606 let mut language = Language::new(
607 LanguageConfig {
608 name: "Rust".into(),
609 path_suffixes: vec!["rs".to_string()],
610 ..Default::default()
611 },
612 Some(tree_sitter_rust::language()),
613 );
614 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
615 disk_based_diagnostics_progress_token: Some(progress_token),
616 disk_based_diagnostics_sources: &["disk"],
617 ..Default::default()
618 });
619
620 let fs = FakeFs::new(cx.background());
621 fs.insert_tree(
622 "/dir",
623 json!({
624 "a.rs": "fn a() { A }",
625 "b.rs": "const y: i32 = 1",
626 }),
627 )
628 .await;
629
630 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
631 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
632 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
633
634 // Cause worktree to start the fake language server
635 let _buffer = project
636 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
637 .await
638 .unwrap();
639
640 let mut events = subscribe(&project, cx);
641
642 let fake_server = fake_servers.next().await.unwrap();
643 fake_server.start_progress(progress_token).await;
644 assert_eq!(
645 events.next().await.unwrap(),
646 Event::DiskBasedDiagnosticsStarted {
647 language_server_id: 0,
648 }
649 );
650
651 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
652 uri: Url::from_file_path("/dir/a.rs").unwrap(),
653 version: None,
654 diagnostics: vec![lsp::Diagnostic {
655 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
656 severity: Some(lsp::DiagnosticSeverity::ERROR),
657 message: "undefined variable 'A'".to_string(),
658 ..Default::default()
659 }],
660 });
661 assert_eq!(
662 events.next().await.unwrap(),
663 Event::DiagnosticsUpdated {
664 language_server_id: 0,
665 path: (worktree_id, Path::new("a.rs")).into()
666 }
667 );
668
669 fake_server.end_progress(progress_token);
670 assert_eq!(
671 events.next().await.unwrap(),
672 Event::DiskBasedDiagnosticsFinished {
673 language_server_id: 0
674 }
675 );
676
677 let buffer = project
678 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
679 .await
680 .unwrap();
681
682 buffer.read_with(cx, |buffer, _| {
683 let snapshot = buffer.snapshot();
684 let diagnostics = snapshot
685 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
686 .collect::<Vec<_>>();
687 assert_eq!(
688 diagnostics,
689 &[DiagnosticEntry {
690 range: Point::new(0, 9)..Point::new(0, 10),
691 diagnostic: Diagnostic {
692 severity: lsp::DiagnosticSeverity::ERROR,
693 message: "undefined variable 'A'".to_string(),
694 group_id: 0,
695 is_primary: true,
696 ..Default::default()
697 }
698 }]
699 )
700 });
701
702 // Ensure publishing empty diagnostics twice only results in one update event.
703 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
704 uri: Url::from_file_path("/dir/a.rs").unwrap(),
705 version: None,
706 diagnostics: Default::default(),
707 });
708 assert_eq!(
709 events.next().await.unwrap(),
710 Event::DiagnosticsUpdated {
711 language_server_id: 0,
712 path: (worktree_id, Path::new("a.rs")).into()
713 }
714 );
715
716 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
717 uri: Url::from_file_path("/dir/a.rs").unwrap(),
718 version: None,
719 diagnostics: Default::default(),
720 });
721 cx.foreground().run_until_parked();
722 assert_eq!(futures::poll!(events.next()), Poll::Pending);
723}
724
725#[gpui::test]
726async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
727 cx.foreground().forbid_parking();
728
729 let progress_token = "the-progress-token";
730 let mut language = Language::new(
731 LanguageConfig {
732 path_suffixes: vec!["rs".to_string()],
733 ..Default::default()
734 },
735 None,
736 );
737 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
738 disk_based_diagnostics_sources: &["disk"],
739 disk_based_diagnostics_progress_token: Some(progress_token),
740 ..Default::default()
741 });
742
743 let fs = FakeFs::new(cx.background());
744 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
745
746 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
747 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
748
749 let buffer = project
750 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
751 .await
752 .unwrap();
753
754 // Simulate diagnostics starting to update.
755 let fake_server = fake_servers.next().await.unwrap();
756 fake_server.start_progress(progress_token).await;
757
758 // Restart the server before the diagnostics finish updating.
759 project.update(cx, |project, cx| {
760 project.restart_language_servers_for_buffers([buffer], cx);
761 });
762 let mut events = subscribe(&project, cx);
763
764 // Simulate the newly started server sending more diagnostics.
765 let fake_server = fake_servers.next().await.unwrap();
766 fake_server.start_progress(progress_token).await;
767 assert_eq!(
768 events.next().await.unwrap(),
769 Event::DiskBasedDiagnosticsStarted {
770 language_server_id: 1
771 }
772 );
773 project.read_with(cx, |project, _| {
774 assert_eq!(
775 project
776 .language_servers_running_disk_based_diagnostics()
777 .collect::<Vec<_>>(),
778 [1]
779 );
780 });
781
782 // All diagnostics are considered done, despite the old server's diagnostic
783 // task never completing.
784 fake_server.end_progress(progress_token);
785 assert_eq!(
786 events.next().await.unwrap(),
787 Event::DiskBasedDiagnosticsFinished {
788 language_server_id: 1
789 }
790 );
791 project.read_with(cx, |project, _| {
792 assert_eq!(
793 project
794 .language_servers_running_disk_based_diagnostics()
795 .collect::<Vec<_>>(),
796 [0; 0]
797 );
798 });
799}
800
801#[gpui::test]
802async fn test_toggling_enable_language_server(
803 deterministic: Arc<Deterministic>,
804 cx: &mut gpui::TestAppContext,
805) {
806 deterministic.forbid_parking();
807
808 let mut rust = Language::new(
809 LanguageConfig {
810 name: Arc::from("Rust"),
811 path_suffixes: vec!["rs".to_string()],
812 ..Default::default()
813 },
814 None,
815 );
816 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
817 name: "rust-lsp",
818 ..Default::default()
819 });
820 let mut js = Language::new(
821 LanguageConfig {
822 name: Arc::from("JavaScript"),
823 path_suffixes: vec!["js".to_string()],
824 ..Default::default()
825 },
826 None,
827 );
828 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
829 name: "js-lsp",
830 ..Default::default()
831 });
832
833 let fs = FakeFs::new(cx.background());
834 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
835 .await;
836
837 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
838 project.update(cx, |project, _| {
839 project.languages.add(Arc::new(rust));
840 project.languages.add(Arc::new(js));
841 });
842
843 let _rs_buffer = project
844 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
845 .await
846 .unwrap();
847 let _js_buffer = project
848 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
849 .await
850 .unwrap();
851
852 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
853 assert_eq!(
854 fake_rust_server_1
855 .receive_notification::<lsp::notification::DidOpenTextDocument>()
856 .await
857 .text_document
858 .uri
859 .as_str(),
860 "file:///dir/a.rs"
861 );
862
863 let mut fake_js_server = fake_js_servers.next().await.unwrap();
864 assert_eq!(
865 fake_js_server
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document
869 .uri
870 .as_str(),
871 "file:///dir/b.js"
872 );
873
874 // Disable Rust language server, ensuring only that server gets stopped.
875 cx.update(|cx| {
876 cx.update_global(|settings: &mut Settings, _| {
877 settings.language_overrides.insert(
878 Arc::from("Rust"),
879 settings::LanguageSettings {
880 enable_language_server: Some(false),
881 ..Default::default()
882 },
883 );
884 })
885 });
886 fake_rust_server_1
887 .receive_notification::<lsp::notification::Exit>()
888 .await;
889
890 // Enable Rust and disable JavaScript language servers, ensuring that the
891 // former gets started again and that the latter stops.
892 cx.update(|cx| {
893 cx.update_global(|settings: &mut Settings, _| {
894 settings.language_overrides.insert(
895 Arc::from("Rust"),
896 settings::LanguageSettings {
897 enable_language_server: Some(true),
898 ..Default::default()
899 },
900 );
901 settings.language_overrides.insert(
902 Arc::from("JavaScript"),
903 settings::LanguageSettings {
904 enable_language_server: Some(false),
905 ..Default::default()
906 },
907 );
908 })
909 });
910 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
911 assert_eq!(
912 fake_rust_server_2
913 .receive_notification::<lsp::notification::DidOpenTextDocument>()
914 .await
915 .text_document
916 .uri
917 .as_str(),
918 "file:///dir/a.rs"
919 );
920 fake_js_server
921 .receive_notification::<lsp::notification::Exit>()
922 .await;
923}
924
925#[gpui::test]
926async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
927 cx.foreground().forbid_parking();
928
929 let mut language = Language::new(
930 LanguageConfig {
931 name: "Rust".into(),
932 path_suffixes: vec!["rs".to_string()],
933 ..Default::default()
934 },
935 Some(tree_sitter_rust::language()),
936 );
937 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
938 disk_based_diagnostics_sources: &["disk"],
939 ..Default::default()
940 });
941
942 let text = "
943 fn a() { A }
944 fn b() { BB }
945 fn c() { CCC }
946 "
947 .unindent();
948
949 let fs = FakeFs::new(cx.background());
950 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
951
952 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
953 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
954
955 let buffer = project
956 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
957 .await
958 .unwrap();
959
960 let mut fake_server = fake_servers.next().await.unwrap();
961 let open_notification = fake_server
962 .receive_notification::<lsp::notification::DidOpenTextDocument>()
963 .await;
964
965 // Edit the buffer, moving the content down
966 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
967 let change_notification_1 = fake_server
968 .receive_notification::<lsp::notification::DidChangeTextDocument>()
969 .await;
970 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
971
972 // Report some diagnostics for the initial version of the buffer
973 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
974 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
975 version: Some(open_notification.text_document.version),
976 diagnostics: vec![
977 lsp::Diagnostic {
978 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
979 severity: Some(DiagnosticSeverity::ERROR),
980 message: "undefined variable 'A'".to_string(),
981 source: Some("disk".to_string()),
982 ..Default::default()
983 },
984 lsp::Diagnostic {
985 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
986 severity: Some(DiagnosticSeverity::ERROR),
987 message: "undefined variable 'BB'".to_string(),
988 source: Some("disk".to_string()),
989 ..Default::default()
990 },
991 lsp::Diagnostic {
992 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
993 severity: Some(DiagnosticSeverity::ERROR),
994 source: Some("disk".to_string()),
995 message: "undefined variable 'CCC'".to_string(),
996 ..Default::default()
997 },
998 ],
999 });
1000
1001 // The diagnostics have moved down since they were created.
1002 buffer.next_notification(cx).await;
1003 buffer.read_with(cx, |buffer, _| {
1004 assert_eq!(
1005 buffer
1006 .snapshot()
1007 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1008 .collect::<Vec<_>>(),
1009 &[
1010 DiagnosticEntry {
1011 range: Point::new(3, 9)..Point::new(3, 11),
1012 diagnostic: Diagnostic {
1013 severity: DiagnosticSeverity::ERROR,
1014 message: "undefined variable 'BB'".to_string(),
1015 is_disk_based: true,
1016 group_id: 1,
1017 is_primary: true,
1018 ..Default::default()
1019 },
1020 },
1021 DiagnosticEntry {
1022 range: Point::new(4, 9)..Point::new(4, 12),
1023 diagnostic: Diagnostic {
1024 severity: DiagnosticSeverity::ERROR,
1025 message: "undefined variable 'CCC'".to_string(),
1026 is_disk_based: true,
1027 group_id: 2,
1028 is_primary: true,
1029 ..Default::default()
1030 }
1031 }
1032 ]
1033 );
1034 assert_eq!(
1035 chunks_with_diagnostics(buffer, 0..buffer.len()),
1036 [
1037 ("\n\nfn a() { ".to_string(), None),
1038 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1039 (" }\nfn b() { ".to_string(), None),
1040 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1041 (" }\nfn c() { ".to_string(), None),
1042 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1043 (" }\n".to_string(), None),
1044 ]
1045 );
1046 assert_eq!(
1047 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1048 [
1049 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1050 (" }\nfn c() { ".to_string(), None),
1051 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1052 ]
1053 );
1054 });
1055
1056 // Ensure overlapping diagnostics are highlighted correctly.
1057 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1058 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1059 version: Some(open_notification.text_document.version),
1060 diagnostics: vec![
1061 lsp::Diagnostic {
1062 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1063 severity: Some(DiagnosticSeverity::ERROR),
1064 message: "undefined variable 'A'".to_string(),
1065 source: Some("disk".to_string()),
1066 ..Default::default()
1067 },
1068 lsp::Diagnostic {
1069 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1070 severity: Some(DiagnosticSeverity::WARNING),
1071 message: "unreachable statement".to_string(),
1072 source: Some("disk".to_string()),
1073 ..Default::default()
1074 },
1075 ],
1076 });
1077
1078 buffer.next_notification(cx).await;
1079 buffer.read_with(cx, |buffer, _| {
1080 assert_eq!(
1081 buffer
1082 .snapshot()
1083 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1084 .collect::<Vec<_>>(),
1085 &[
1086 DiagnosticEntry {
1087 range: Point::new(2, 9)..Point::new(2, 12),
1088 diagnostic: Diagnostic {
1089 severity: DiagnosticSeverity::WARNING,
1090 message: "unreachable statement".to_string(),
1091 is_disk_based: true,
1092 group_id: 4,
1093 is_primary: true,
1094 ..Default::default()
1095 }
1096 },
1097 DiagnosticEntry {
1098 range: Point::new(2, 9)..Point::new(2, 10),
1099 diagnostic: Diagnostic {
1100 severity: DiagnosticSeverity::ERROR,
1101 message: "undefined variable 'A'".to_string(),
1102 is_disk_based: true,
1103 group_id: 3,
1104 is_primary: true,
1105 ..Default::default()
1106 },
1107 }
1108 ]
1109 );
1110 assert_eq!(
1111 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1112 [
1113 ("fn a() { ".to_string(), None),
1114 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1115 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1116 ("\n".to_string(), None),
1117 ]
1118 );
1119 assert_eq!(
1120 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1121 [
1122 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1123 ("\n".to_string(), None),
1124 ]
1125 );
1126 });
1127
1128 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1129 // changes since the last save.
1130 buffer.update(cx, |buffer, cx| {
1131 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1132 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1133 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1134 });
1135 let change_notification_2 = fake_server
1136 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1137 .await;
1138 assert!(
1139 change_notification_2.text_document.version > change_notification_1.text_document.version
1140 );
1141
1142 // Handle out-of-order diagnostics
1143 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1144 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1145 version: Some(change_notification_2.text_document.version),
1146 diagnostics: vec![
1147 lsp::Diagnostic {
1148 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1149 severity: Some(DiagnosticSeverity::ERROR),
1150 message: "undefined variable 'BB'".to_string(),
1151 source: Some("disk".to_string()),
1152 ..Default::default()
1153 },
1154 lsp::Diagnostic {
1155 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1156 severity: Some(DiagnosticSeverity::WARNING),
1157 message: "undefined variable 'A'".to_string(),
1158 source: Some("disk".to_string()),
1159 ..Default::default()
1160 },
1161 ],
1162 });
1163
1164 buffer.next_notification(cx).await;
1165 buffer.read_with(cx, |buffer, _| {
1166 assert_eq!(
1167 buffer
1168 .snapshot()
1169 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1170 .collect::<Vec<_>>(),
1171 &[
1172 DiagnosticEntry {
1173 range: Point::new(2, 21)..Point::new(2, 22),
1174 diagnostic: Diagnostic {
1175 severity: DiagnosticSeverity::WARNING,
1176 message: "undefined variable 'A'".to_string(),
1177 is_disk_based: true,
1178 group_id: 6,
1179 is_primary: true,
1180 ..Default::default()
1181 }
1182 },
1183 DiagnosticEntry {
1184 range: Point::new(3, 9)..Point::new(3, 14),
1185 diagnostic: Diagnostic {
1186 severity: DiagnosticSeverity::ERROR,
1187 message: "undefined variable 'BB'".to_string(),
1188 is_disk_based: true,
1189 group_id: 5,
1190 is_primary: true,
1191 ..Default::default()
1192 },
1193 }
1194 ]
1195 );
1196 });
1197}
1198
1199#[gpui::test]
1200async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1201 cx.foreground().forbid_parking();
1202
1203 let text = concat!(
1204 "let one = ;\n", //
1205 "let two = \n",
1206 "let three = 3;\n",
1207 );
1208
1209 let fs = FakeFs::new(cx.background());
1210 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1211
1212 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1213 let buffer = project
1214 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1215 .await
1216 .unwrap();
1217
1218 project.update(cx, |project, cx| {
1219 project
1220 .update_buffer_diagnostics(
1221 &buffer,
1222 vec![
1223 DiagnosticEntry {
1224 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1225 diagnostic: Diagnostic {
1226 severity: DiagnosticSeverity::ERROR,
1227 message: "syntax error 1".to_string(),
1228 ..Default::default()
1229 },
1230 },
1231 DiagnosticEntry {
1232 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1233 diagnostic: Diagnostic {
1234 severity: DiagnosticSeverity::ERROR,
1235 message: "syntax error 2".to_string(),
1236 ..Default::default()
1237 },
1238 },
1239 ],
1240 None,
1241 cx,
1242 )
1243 .unwrap();
1244 });
1245
1246 // An empty range is extended forward to include the following character.
1247 // At the end of a line, an empty range is extended backward to include
1248 // the preceding character.
1249 buffer.read_with(cx, |buffer, _| {
1250 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1251 assert_eq!(
1252 chunks
1253 .iter()
1254 .map(|(s, d)| (s.as_str(), *d))
1255 .collect::<Vec<_>>(),
1256 &[
1257 ("let one = ", None),
1258 (";", Some(DiagnosticSeverity::ERROR)),
1259 ("\nlet two =", None),
1260 (" ", Some(DiagnosticSeverity::ERROR)),
1261 ("\nlet three = 3;\n", None)
1262 ]
1263 );
1264 });
1265}
1266
1267#[gpui::test]
1268async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1269 cx.foreground().forbid_parking();
1270
1271 let mut language = Language::new(
1272 LanguageConfig {
1273 name: "Rust".into(),
1274 path_suffixes: vec!["rs".to_string()],
1275 ..Default::default()
1276 },
1277 Some(tree_sitter_rust::language()),
1278 );
1279 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1280
1281 let text = "
1282 fn a() {
1283 f1();
1284 }
1285 fn b() {
1286 f2();
1287 }
1288 fn c() {
1289 f3();
1290 }
1291 "
1292 .unindent();
1293
1294 let fs = FakeFs::new(cx.background());
1295 fs.insert_tree(
1296 "/dir",
1297 json!({
1298 "a.rs": text.clone(),
1299 }),
1300 )
1301 .await;
1302
1303 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1304 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1305 let buffer = project
1306 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1307 .await
1308 .unwrap();
1309
1310 let mut fake_server = fake_servers.next().await.unwrap();
1311 let lsp_document_version = fake_server
1312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1313 .await
1314 .text_document
1315 .version;
1316
1317 // Simulate editing the buffer after the language server computes some edits.
1318 buffer.update(cx, |buffer, cx| {
1319 buffer.edit(
1320 [(
1321 Point::new(0, 0)..Point::new(0, 0),
1322 "// above first function\n",
1323 )],
1324 cx,
1325 );
1326 buffer.edit(
1327 [(
1328 Point::new(2, 0)..Point::new(2, 0),
1329 " // inside first function\n",
1330 )],
1331 cx,
1332 );
1333 buffer.edit(
1334 [(
1335 Point::new(6, 4)..Point::new(6, 4),
1336 "// inside second function ",
1337 )],
1338 cx,
1339 );
1340
1341 assert_eq!(
1342 buffer.text(),
1343 "
1344 // above first function
1345 fn a() {
1346 // inside first function
1347 f1();
1348 }
1349 fn b() {
1350 // inside second function f2();
1351 }
1352 fn c() {
1353 f3();
1354 }
1355 "
1356 .unindent()
1357 );
1358 });
1359
1360 let edits = project
1361 .update(cx, |project, cx| {
1362 project.edits_from_lsp(
1363 &buffer,
1364 vec![
1365 // replace body of first function
1366 lsp::TextEdit {
1367 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1368 new_text: "
1369 fn a() {
1370 f10();
1371 }
1372 "
1373 .unindent(),
1374 },
1375 // edit inside second function
1376 lsp::TextEdit {
1377 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1378 new_text: "00".into(),
1379 },
1380 // edit inside third function via two distinct edits
1381 lsp::TextEdit {
1382 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1383 new_text: "4000".into(),
1384 },
1385 lsp::TextEdit {
1386 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1387 new_text: "".into(),
1388 },
1389 ],
1390 Some(lsp_document_version),
1391 cx,
1392 )
1393 })
1394 .await
1395 .unwrap();
1396
1397 buffer.update(cx, |buffer, cx| {
1398 for (range, new_text) in edits {
1399 buffer.edit([(range, new_text)], cx);
1400 }
1401 assert_eq!(
1402 buffer.text(),
1403 "
1404 // above first function
1405 fn a() {
1406 // inside first function
1407 f10();
1408 }
1409 fn b() {
1410 // inside second function f200();
1411 }
1412 fn c() {
1413 f4000();
1414 }
1415 "
1416 .unindent()
1417 );
1418 });
1419}
1420
1421#[gpui::test]
1422async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1423 cx.foreground().forbid_parking();
1424
1425 let text = "
1426 use a::b;
1427 use a::c;
1428
1429 fn f() {
1430 b();
1431 c();
1432 }
1433 "
1434 .unindent();
1435
1436 let fs = FakeFs::new(cx.background());
1437 fs.insert_tree(
1438 "/dir",
1439 json!({
1440 "a.rs": text.clone(),
1441 }),
1442 )
1443 .await;
1444
1445 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1446 let buffer = project
1447 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1448 .await
1449 .unwrap();
1450
1451 // Simulate the language server sending us a small edit in the form of a very large diff.
1452 // Rust-analyzer does this when performing a merge-imports code action.
1453 let edits = project
1454 .update(cx, |project, cx| {
1455 project.edits_from_lsp(
1456 &buffer,
1457 [
1458 // Replace the first use statement without editing the semicolon.
1459 lsp::TextEdit {
1460 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1461 new_text: "a::{b, c}".into(),
1462 },
1463 // Reinsert the remainder of the file between the semicolon and the final
1464 // newline of the file.
1465 lsp::TextEdit {
1466 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1467 new_text: "\n\n".into(),
1468 },
1469 lsp::TextEdit {
1470 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1471 new_text: "
1472 fn f() {
1473 b();
1474 c();
1475 }"
1476 .unindent(),
1477 },
1478 // Delete everything after the first newline of the file.
1479 lsp::TextEdit {
1480 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1481 new_text: "".into(),
1482 },
1483 ],
1484 None,
1485 cx,
1486 )
1487 })
1488 .await
1489 .unwrap();
1490
1491 buffer.update(cx, |buffer, cx| {
1492 let edits = edits
1493 .into_iter()
1494 .map(|(range, text)| {
1495 (
1496 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1497 text,
1498 )
1499 })
1500 .collect::<Vec<_>>();
1501
1502 assert_eq!(
1503 edits,
1504 [
1505 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1506 (Point::new(1, 0)..Point::new(2, 0), "".into())
1507 ]
1508 );
1509
1510 for (range, new_text) in edits {
1511 buffer.edit([(range, new_text)], cx);
1512 }
1513 assert_eq!(
1514 buffer.text(),
1515 "
1516 use a::{b, c};
1517
1518 fn f() {
1519 b();
1520 c();
1521 }
1522 "
1523 .unindent()
1524 );
1525 });
1526}
1527
1528#[gpui::test]
1529async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1530 cx.foreground().forbid_parking();
1531
1532 let text = "
1533 use a::b;
1534 use a::c;
1535
1536 fn f() {
1537 b();
1538 c();
1539 }
1540 "
1541 .unindent();
1542
1543 let fs = FakeFs::new(cx.background());
1544 fs.insert_tree(
1545 "/dir",
1546 json!({
1547 "a.rs": text.clone(),
1548 }),
1549 )
1550 .await;
1551
1552 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1553 let buffer = project
1554 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1555 .await
1556 .unwrap();
1557
1558 // Simulate the language server sending us edits in a non-ordered fashion,
1559 // with ranges sometimes being inverted.
1560 let edits = project
1561 .update(cx, |project, cx| {
1562 project.edits_from_lsp(
1563 &buffer,
1564 [
1565 lsp::TextEdit {
1566 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1567 new_text: "\n\n".into(),
1568 },
1569 lsp::TextEdit {
1570 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1571 new_text: "a::{b, c}".into(),
1572 },
1573 lsp::TextEdit {
1574 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1575 new_text: "".into(),
1576 },
1577 lsp::TextEdit {
1578 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1579 new_text: "
1580 fn f() {
1581 b();
1582 c();
1583 }"
1584 .unindent(),
1585 },
1586 ],
1587 None,
1588 cx,
1589 )
1590 })
1591 .await
1592 .unwrap();
1593
1594 buffer.update(cx, |buffer, cx| {
1595 let edits = edits
1596 .into_iter()
1597 .map(|(range, text)| {
1598 (
1599 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1600 text,
1601 )
1602 })
1603 .collect::<Vec<_>>();
1604
1605 assert_eq!(
1606 edits,
1607 [
1608 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1609 (Point::new(1, 0)..Point::new(2, 0), "".into())
1610 ]
1611 );
1612
1613 for (range, new_text) in edits {
1614 buffer.edit([(range, new_text)], cx);
1615 }
1616 assert_eq!(
1617 buffer.text(),
1618 "
1619 use a::{b, c};
1620
1621 fn f() {
1622 b();
1623 c();
1624 }
1625 "
1626 .unindent()
1627 );
1628 });
1629}
1630
1631fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1632 buffer: &Buffer,
1633 range: Range<T>,
1634) -> Vec<(String, Option<DiagnosticSeverity>)> {
1635 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1636 for chunk in buffer.snapshot().chunks(range, true) {
1637 if chunks.last().map_or(false, |prev_chunk| {
1638 prev_chunk.1 == chunk.diagnostic_severity
1639 }) {
1640 chunks.last_mut().unwrap().0.push_str(chunk.text);
1641 } else {
1642 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1643 }
1644 }
1645 chunks
1646}
1647
1648#[gpui::test]
1649async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
1650 let dir = temp_tree(json!({
1651 "root": {
1652 "dir1": {},
1653 "dir2": {
1654 "dir3": {}
1655 }
1656 }
1657 }));
1658
1659 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
1660 let cancel_flag = Default::default();
1661 let results = project
1662 .read_with(cx, |project, cx| {
1663 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
1664 })
1665 .await;
1666
1667 assert!(results.is_empty());
1668}
1669
1670#[gpui::test(iterations = 10)]
1671async fn test_definition(cx: &mut gpui::TestAppContext) {
1672 let mut language = Language::new(
1673 LanguageConfig {
1674 name: "Rust".into(),
1675 path_suffixes: vec!["rs".to_string()],
1676 ..Default::default()
1677 },
1678 Some(tree_sitter_rust::language()),
1679 );
1680 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1681
1682 let fs = FakeFs::new(cx.background());
1683 fs.insert_tree(
1684 "/dir",
1685 json!({
1686 "a.rs": "const fn a() { A }",
1687 "b.rs": "const y: i32 = crate::a()",
1688 }),
1689 )
1690 .await;
1691
1692 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1693 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1694
1695 let buffer = project
1696 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1697 .await
1698 .unwrap();
1699
1700 let fake_server = fake_servers.next().await.unwrap();
1701 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1702 let params = params.text_document_position_params;
1703 assert_eq!(
1704 params.text_document.uri.to_file_path().unwrap(),
1705 Path::new("/dir/b.rs"),
1706 );
1707 assert_eq!(params.position, lsp::Position::new(0, 22));
1708
1709 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1710 lsp::Location::new(
1711 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1712 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1713 ),
1714 )))
1715 });
1716
1717 let mut definitions = project
1718 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1719 .await
1720 .unwrap();
1721
1722 // Assert no new language server started
1723 cx.foreground().run_until_parked();
1724 assert!(fake_servers.try_next().is_err());
1725
1726 assert_eq!(definitions.len(), 1);
1727 let definition = definitions.pop().unwrap();
1728 cx.update(|cx| {
1729 let target_buffer = definition.target.buffer.read(cx);
1730 assert_eq!(
1731 target_buffer
1732 .file()
1733 .unwrap()
1734 .as_local()
1735 .unwrap()
1736 .abs_path(cx),
1737 Path::new("/dir/a.rs"),
1738 );
1739 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1740 assert_eq!(
1741 list_worktrees(&project, cx),
1742 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1743 );
1744
1745 drop(definition);
1746 });
1747 cx.read(|cx| {
1748 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1749 });
1750
1751 fn list_worktrees<'a>(
1752 project: &'a ModelHandle<Project>,
1753 cx: &'a AppContext,
1754 ) -> Vec<(&'a Path, bool)> {
1755 project
1756 .read(cx)
1757 .worktrees(cx)
1758 .map(|worktree| {
1759 let worktree = worktree.read(cx);
1760 (
1761 worktree.as_local().unwrap().abs_path().as_ref(),
1762 worktree.is_visible(),
1763 )
1764 })
1765 .collect::<Vec<_>>()
1766 }
1767}
1768
1769#[gpui::test]
1770async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1771 let mut language = Language::new(
1772 LanguageConfig {
1773 name: "TypeScript".into(),
1774 path_suffixes: vec!["ts".to_string()],
1775 ..Default::default()
1776 },
1777 Some(tree_sitter_typescript::language_typescript()),
1778 );
1779 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1780
1781 let fs = FakeFs::new(cx.background());
1782 fs.insert_tree(
1783 "/dir",
1784 json!({
1785 "a.ts": "",
1786 }),
1787 )
1788 .await;
1789
1790 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1791 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1792 let buffer = project
1793 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1794 .await
1795 .unwrap();
1796
1797 let fake_server = fake_language_servers.next().await.unwrap();
1798
1799 let text = "let a = b.fqn";
1800 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1801 let completions = project.update(cx, |project, cx| {
1802 project.completions(&buffer, text.len(), cx)
1803 });
1804
1805 fake_server
1806 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1807 Ok(Some(lsp::CompletionResponse::Array(vec![
1808 lsp::CompletionItem {
1809 label: "fullyQualifiedName?".into(),
1810 insert_text: Some("fullyQualifiedName".into()),
1811 ..Default::default()
1812 },
1813 ])))
1814 })
1815 .next()
1816 .await;
1817 let completions = completions.await.unwrap();
1818 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1819 assert_eq!(completions.len(), 1);
1820 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1821 assert_eq!(
1822 completions[0].old_range.to_offset(&snapshot),
1823 text.len() - 3..text.len()
1824 );
1825
1826 let text = "let a = \"atoms/cmp\"";
1827 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1828 let completions = project.update(cx, |project, cx| {
1829 project.completions(&buffer, text.len() - 1, cx)
1830 });
1831
1832 fake_server
1833 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1834 Ok(Some(lsp::CompletionResponse::Array(vec![
1835 lsp::CompletionItem {
1836 label: "component".into(),
1837 ..Default::default()
1838 },
1839 ])))
1840 })
1841 .next()
1842 .await;
1843 let completions = completions.await.unwrap();
1844 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1845 assert_eq!(completions.len(), 1);
1846 assert_eq!(completions[0].new_text, "component");
1847 assert_eq!(
1848 completions[0].old_range.to_offset(&snapshot),
1849 text.len() - 4..text.len() - 1
1850 );
1851}
1852
1853#[gpui::test]
1854async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1855 let mut language = Language::new(
1856 LanguageConfig {
1857 name: "TypeScript".into(),
1858 path_suffixes: vec!["ts".to_string()],
1859 ..Default::default()
1860 },
1861 Some(tree_sitter_typescript::language_typescript()),
1862 );
1863 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1864
1865 let fs = FakeFs::new(cx.background());
1866 fs.insert_tree(
1867 "/dir",
1868 json!({
1869 "a.ts": "",
1870 }),
1871 )
1872 .await;
1873
1874 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1875 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1876 let buffer = project
1877 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1878 .await
1879 .unwrap();
1880
1881 let fake_server = fake_language_servers.next().await.unwrap();
1882
1883 let text = "let a = b.fqn";
1884 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1885 let completions = project.update(cx, |project, cx| {
1886 project.completions(&buffer, text.len(), cx)
1887 });
1888
1889 fake_server
1890 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1891 Ok(Some(lsp::CompletionResponse::Array(vec![
1892 lsp::CompletionItem {
1893 label: "fullyQualifiedName?".into(),
1894 insert_text: Some("fully\rQualified\r\nName".into()),
1895 ..Default::default()
1896 },
1897 ])))
1898 })
1899 .next()
1900 .await;
1901 let completions = completions.await.unwrap();
1902 assert_eq!(completions.len(), 1);
1903 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1904}
1905
1906#[gpui::test(iterations = 10)]
1907async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1908 let mut language = Language::new(
1909 LanguageConfig {
1910 name: "TypeScript".into(),
1911 path_suffixes: vec!["ts".to_string()],
1912 ..Default::default()
1913 },
1914 None,
1915 );
1916 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1917
1918 let fs = FakeFs::new(cx.background());
1919 fs.insert_tree(
1920 "/dir",
1921 json!({
1922 "a.ts": "a",
1923 }),
1924 )
1925 .await;
1926
1927 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1928 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1929 let buffer = project
1930 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1931 .await
1932 .unwrap();
1933
1934 let fake_server = fake_language_servers.next().await.unwrap();
1935
1936 // Language server returns code actions that contain commands, and not edits.
1937 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1938 fake_server
1939 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1940 Ok(Some(vec![
1941 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1942 title: "The code action".into(),
1943 command: Some(lsp::Command {
1944 title: "The command".into(),
1945 command: "_the/command".into(),
1946 arguments: Some(vec![json!("the-argument")]),
1947 }),
1948 ..Default::default()
1949 }),
1950 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1951 title: "two".into(),
1952 ..Default::default()
1953 }),
1954 ]))
1955 })
1956 .next()
1957 .await;
1958
1959 let action = actions.await.unwrap()[0].clone();
1960 let apply = project.update(cx, |project, cx| {
1961 project.apply_code_action(buffer.clone(), action, true, cx)
1962 });
1963
1964 // Resolving the code action does not populate its edits. In absence of
1965 // edits, we must execute the given command.
1966 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1967 |action, _| async move { Ok(action) },
1968 );
1969
1970 // While executing the command, the language server sends the editor
1971 // a `workspaceEdit` request.
1972 fake_server
1973 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1974 let fake = fake_server.clone();
1975 move |params, _| {
1976 assert_eq!(params.command, "_the/command");
1977 let fake = fake.clone();
1978 async move {
1979 fake.server
1980 .request::<lsp::request::ApplyWorkspaceEdit>(
1981 lsp::ApplyWorkspaceEditParams {
1982 label: None,
1983 edit: lsp::WorkspaceEdit {
1984 changes: Some(
1985 [(
1986 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1987 vec![lsp::TextEdit {
1988 range: lsp::Range::new(
1989 lsp::Position::new(0, 0),
1990 lsp::Position::new(0, 0),
1991 ),
1992 new_text: "X".into(),
1993 }],
1994 )]
1995 .into_iter()
1996 .collect(),
1997 ),
1998 ..Default::default()
1999 },
2000 },
2001 )
2002 .await
2003 .unwrap();
2004 Ok(Some(json!(null)))
2005 }
2006 }
2007 })
2008 .next()
2009 .await;
2010
2011 // Applying the code action returns a project transaction containing the edits
2012 // sent by the language server in its `workspaceEdit` request.
2013 let transaction = apply.await.unwrap();
2014 assert!(transaction.0.contains_key(&buffer));
2015 buffer.update(cx, |buffer, cx| {
2016 assert_eq!(buffer.text(), "Xa");
2017 buffer.undo(cx);
2018 assert_eq!(buffer.text(), "a");
2019 });
2020}
2021
2022#[gpui::test]
2023async fn test_save_file(cx: &mut gpui::TestAppContext) {
2024 let fs = FakeFs::new(cx.background());
2025 fs.insert_tree(
2026 "/dir",
2027 json!({
2028 "file1": "the old contents",
2029 }),
2030 )
2031 .await;
2032
2033 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2034 let buffer = project
2035 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2036 .await
2037 .unwrap();
2038 buffer
2039 .update(cx, |buffer, cx| {
2040 assert_eq!(buffer.text(), "the old contents");
2041 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2042 buffer.save(cx)
2043 })
2044 .await
2045 .unwrap();
2046
2047 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2048 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2049}
2050
2051#[gpui::test]
2052async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2053 let fs = FakeFs::new(cx.background());
2054 fs.insert_tree(
2055 "/dir",
2056 json!({
2057 "file1": "the old contents",
2058 }),
2059 )
2060 .await;
2061
2062 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2063 let buffer = project
2064 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2065 .await
2066 .unwrap();
2067 buffer
2068 .update(cx, |buffer, cx| {
2069 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2070 buffer.save(cx)
2071 })
2072 .await
2073 .unwrap();
2074
2075 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2076 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2077}
2078
2079#[gpui::test]
2080async fn test_save_as(cx: &mut gpui::TestAppContext) {
2081 let fs = FakeFs::new(cx.background());
2082 fs.insert_tree("/dir", json!({})).await;
2083
2084 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2085 let buffer = project.update(cx, |project, cx| {
2086 project.create_buffer("", None, cx).unwrap()
2087 });
2088 buffer.update(cx, |buffer, cx| {
2089 buffer.edit([(0..0, "abc")], cx);
2090 assert!(buffer.is_dirty());
2091 assert!(!buffer.has_conflict());
2092 });
2093 project
2094 .update(cx, |project, cx| {
2095 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2096 })
2097 .await
2098 .unwrap();
2099 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2100 buffer.read_with(cx, |buffer, cx| {
2101 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2102 assert!(!buffer.is_dirty());
2103 assert!(!buffer.has_conflict());
2104 });
2105
2106 let opened_buffer = project
2107 .update(cx, |project, cx| {
2108 project.open_local_buffer("/dir/file1", cx)
2109 })
2110 .await
2111 .unwrap();
2112 assert_eq!(opened_buffer, buffer);
2113}
2114
2115#[gpui::test(retries = 5)]
2116async fn test_rescan_and_remote_updates(
2117 deterministic: Arc<Deterministic>,
2118 cx: &mut gpui::TestAppContext,
2119) {
2120 let dir = temp_tree(json!({
2121 "a": {
2122 "file1": "",
2123 "file2": "",
2124 "file3": "",
2125 },
2126 "b": {
2127 "c": {
2128 "file4": "",
2129 "file5": "",
2130 }
2131 }
2132 }));
2133
2134 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2135 let rpc = project.read_with(cx, |p, _| p.client.clone());
2136
2137 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2138 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2139 async move { buffer.await.unwrap() }
2140 };
2141 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2142 project.read_with(cx, |project, cx| {
2143 let tree = project.worktrees(cx).next().unwrap();
2144 tree.read(cx)
2145 .entry_for_path(path)
2146 .expect(&format!("no entry for path {}", path))
2147 .id
2148 })
2149 };
2150
2151 let buffer2 = buffer_for_path("a/file2", cx).await;
2152 let buffer3 = buffer_for_path("a/file3", cx).await;
2153 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2154 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2155
2156 let file2_id = id_for_path("a/file2", &cx);
2157 let file3_id = id_for_path("a/file3", &cx);
2158 let file4_id = id_for_path("b/c/file4", &cx);
2159
2160 // Create a remote copy of this worktree.
2161 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2162 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2163 let remote = cx.update(|cx| {
2164 Worktree::remote(
2165 1,
2166 1,
2167 proto::WorktreeMetadata {
2168 id: initial_snapshot.id().to_proto(),
2169 root_name: initial_snapshot.root_name().into(),
2170 visible: true,
2171 },
2172 rpc.clone(),
2173 cx,
2174 )
2175 });
2176 remote.update(cx, |remote, _| {
2177 let update = initial_snapshot.build_initial_update(1);
2178 remote.as_remote_mut().unwrap().update_from_remote(update);
2179 });
2180 deterministic.run_until_parked();
2181
2182 cx.read(|cx| {
2183 assert!(!buffer2.read(cx).is_dirty());
2184 assert!(!buffer3.read(cx).is_dirty());
2185 assert!(!buffer4.read(cx).is_dirty());
2186 assert!(!buffer5.read(cx).is_dirty());
2187 });
2188
2189 // Rename and delete files and directories.
2190 tree.flush_fs_events(&cx).await;
2191 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2192 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2193 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2194 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2195 tree.flush_fs_events(&cx).await;
2196
2197 let expected_paths = vec![
2198 "a",
2199 "a/file1",
2200 "a/file2.new",
2201 "b",
2202 "d",
2203 "d/file3",
2204 "d/file4",
2205 ];
2206
2207 cx.read(|app| {
2208 assert_eq!(
2209 tree.read(app)
2210 .paths()
2211 .map(|p| p.to_str().unwrap())
2212 .collect::<Vec<_>>(),
2213 expected_paths
2214 );
2215
2216 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2217 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2218 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2219
2220 assert_eq!(
2221 buffer2.read(app).file().unwrap().path().as_ref(),
2222 Path::new("a/file2.new")
2223 );
2224 assert_eq!(
2225 buffer3.read(app).file().unwrap().path().as_ref(),
2226 Path::new("d/file3")
2227 );
2228 assert_eq!(
2229 buffer4.read(app).file().unwrap().path().as_ref(),
2230 Path::new("d/file4")
2231 );
2232 assert_eq!(
2233 buffer5.read(app).file().unwrap().path().as_ref(),
2234 Path::new("b/c/file5")
2235 );
2236
2237 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2238 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2239 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2240 assert!(buffer5.read(app).file().unwrap().is_deleted());
2241 });
2242
2243 // Update the remote worktree. Check that it becomes consistent with the
2244 // local worktree.
2245 remote.update(cx, |remote, cx| {
2246 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2247 &initial_snapshot,
2248 1,
2249 1,
2250 true,
2251 );
2252 remote.as_remote_mut().unwrap().update_from_remote(update);
2253 });
2254 deterministic.run_until_parked();
2255 remote.read_with(cx, |remote, _| {
2256 assert_eq!(
2257 remote
2258 .paths()
2259 .map(|p| p.to_str().unwrap())
2260 .collect::<Vec<_>>(),
2261 expected_paths
2262 );
2263 });
2264}
2265
2266#[gpui::test]
2267async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2268 let fs = FakeFs::new(cx.background());
2269 fs.insert_tree(
2270 "/dir",
2271 json!({
2272 "a.txt": "a-contents",
2273 "b.txt": "b-contents",
2274 }),
2275 )
2276 .await;
2277
2278 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2279
2280 // Spawn multiple tasks to open paths, repeating some paths.
2281 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2282 (
2283 p.open_local_buffer("/dir/a.txt", cx),
2284 p.open_local_buffer("/dir/b.txt", cx),
2285 p.open_local_buffer("/dir/a.txt", cx),
2286 )
2287 });
2288
2289 let buffer_a_1 = buffer_a_1.await.unwrap();
2290 let buffer_a_2 = buffer_a_2.await.unwrap();
2291 let buffer_b = buffer_b.await.unwrap();
2292 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2293 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2294
2295 // There is only one buffer per path.
2296 let buffer_a_id = buffer_a_1.id();
2297 assert_eq!(buffer_a_2.id(), buffer_a_id);
2298
2299 // Open the same path again while it is still open.
2300 drop(buffer_a_1);
2301 let buffer_a_3 = project
2302 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2303 .await
2304 .unwrap();
2305
2306 // There's still only one buffer per path.
2307 assert_eq!(buffer_a_3.id(), buffer_a_id);
2308}
2309
2310#[gpui::test]
2311async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2312 let fs = FakeFs::new(cx.background());
2313 fs.insert_tree(
2314 "/dir",
2315 json!({
2316 "file1": "abc",
2317 "file2": "def",
2318 "file3": "ghi",
2319 }),
2320 )
2321 .await;
2322
2323 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2324
2325 let buffer1 = project
2326 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2327 .await
2328 .unwrap();
2329 let events = Rc::new(RefCell::new(Vec::new()));
2330
2331 // initially, the buffer isn't dirty.
2332 buffer1.update(cx, |buffer, cx| {
2333 cx.subscribe(&buffer1, {
2334 let events = events.clone();
2335 move |_, _, event, _| match event {
2336 BufferEvent::Operation(_) => {}
2337 _ => events.borrow_mut().push(event.clone()),
2338 }
2339 })
2340 .detach();
2341
2342 assert!(!buffer.is_dirty());
2343 assert!(events.borrow().is_empty());
2344
2345 buffer.edit([(1..2, "")], cx);
2346 });
2347
2348 // after the first edit, the buffer is dirty, and emits a dirtied event.
2349 buffer1.update(cx, |buffer, cx| {
2350 assert!(buffer.text() == "ac");
2351 assert!(buffer.is_dirty());
2352 assert_eq!(
2353 *events.borrow(),
2354 &[language::Event::Edited, language::Event::DirtyChanged]
2355 );
2356 events.borrow_mut().clear();
2357 buffer.did_save(
2358 buffer.version(),
2359 buffer.as_rope().fingerprint(),
2360 buffer.file().unwrap().mtime(),
2361 None,
2362 cx,
2363 );
2364 });
2365
2366 // after saving, the buffer is not dirty, and emits a saved event.
2367 buffer1.update(cx, |buffer, cx| {
2368 assert!(!buffer.is_dirty());
2369 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2370 events.borrow_mut().clear();
2371
2372 buffer.edit([(1..1, "B")], cx);
2373 buffer.edit([(2..2, "D")], cx);
2374 });
2375
2376 // after editing again, the buffer is dirty, and emits another dirty event.
2377 buffer1.update(cx, |buffer, cx| {
2378 assert!(buffer.text() == "aBDc");
2379 assert!(buffer.is_dirty());
2380 assert_eq!(
2381 *events.borrow(),
2382 &[
2383 language::Event::Edited,
2384 language::Event::DirtyChanged,
2385 language::Event::Edited,
2386 ],
2387 );
2388 events.borrow_mut().clear();
2389
2390 // After restoring the buffer to its previously-saved state,
2391 // the buffer is not considered dirty anymore.
2392 buffer.edit([(1..3, "")], cx);
2393 assert!(buffer.text() == "ac");
2394 assert!(!buffer.is_dirty());
2395 });
2396
2397 assert_eq!(
2398 *events.borrow(),
2399 &[language::Event::Edited, language::Event::DirtyChanged]
2400 );
2401
2402 // When a file is deleted, the buffer is considered dirty.
2403 let events = Rc::new(RefCell::new(Vec::new()));
2404 let buffer2 = project
2405 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2406 .await
2407 .unwrap();
2408 buffer2.update(cx, |_, cx| {
2409 cx.subscribe(&buffer2, {
2410 let events = events.clone();
2411 move |_, _, event, _| events.borrow_mut().push(event.clone())
2412 })
2413 .detach();
2414 });
2415
2416 fs.remove_file("/dir/file2".as_ref(), Default::default())
2417 .await
2418 .unwrap();
2419 cx.foreground().run_until_parked();
2420 assert_eq!(
2421 *events.borrow(),
2422 &[
2423 language::Event::DirtyChanged,
2424 language::Event::FileHandleChanged
2425 ]
2426 );
2427
2428 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2429 let events = Rc::new(RefCell::new(Vec::new()));
2430 let buffer3 = project
2431 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2432 .await
2433 .unwrap();
2434 buffer3.update(cx, |_, cx| {
2435 cx.subscribe(&buffer3, {
2436 let events = events.clone();
2437 move |_, _, event, _| events.borrow_mut().push(event.clone())
2438 })
2439 .detach();
2440 });
2441
2442 buffer3.update(cx, |buffer, cx| {
2443 buffer.edit([(0..0, "x")], cx);
2444 });
2445 events.borrow_mut().clear();
2446 fs.remove_file("/dir/file3".as_ref(), Default::default())
2447 .await
2448 .unwrap();
2449 cx.foreground().run_until_parked();
2450 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2451 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2452}
2453
2454#[gpui::test]
2455async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2456 let initial_contents = "aaa\nbbbbb\nc\n";
2457 let fs = FakeFs::new(cx.background());
2458 fs.insert_tree(
2459 "/dir",
2460 json!({
2461 "the-file": initial_contents,
2462 }),
2463 )
2464 .await;
2465 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2466 let buffer = project
2467 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2468 .await
2469 .unwrap();
2470
2471 let anchors = (0..3)
2472 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2473 .collect::<Vec<_>>();
2474
2475 // Change the file on disk, adding two new lines of text, and removing
2476 // one line.
2477 buffer.read_with(cx, |buffer, _| {
2478 assert!(!buffer.is_dirty());
2479 assert!(!buffer.has_conflict());
2480 });
2481 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2482 fs.save(
2483 "/dir/the-file".as_ref(),
2484 &new_contents.into(),
2485 LineEnding::Unix,
2486 )
2487 .await
2488 .unwrap();
2489
2490 // Because the buffer was not modified, it is reloaded from disk. Its
2491 // contents are edited according to the diff between the old and new
2492 // file contents.
2493 cx.foreground().run_until_parked();
2494 buffer.update(cx, |buffer, _| {
2495 assert_eq!(buffer.text(), new_contents);
2496 assert!(!buffer.is_dirty());
2497 assert!(!buffer.has_conflict());
2498
2499 let anchor_positions = anchors
2500 .iter()
2501 .map(|anchor| anchor.to_point(&*buffer))
2502 .collect::<Vec<_>>();
2503 assert_eq!(
2504 anchor_positions,
2505 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2506 );
2507 });
2508
2509 // Modify the buffer
2510 buffer.update(cx, |buffer, cx| {
2511 buffer.edit([(0..0, " ")], cx);
2512 assert!(buffer.is_dirty());
2513 assert!(!buffer.has_conflict());
2514 });
2515
2516 // Change the file on disk again, adding blank lines to the beginning.
2517 fs.save(
2518 "/dir/the-file".as_ref(),
2519 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2520 LineEnding::Unix,
2521 )
2522 .await
2523 .unwrap();
2524
2525 // Because the buffer is modified, it doesn't reload from disk, but is
2526 // marked as having a conflict.
2527 cx.foreground().run_until_parked();
2528 buffer.read_with(cx, |buffer, _| {
2529 assert!(buffer.has_conflict());
2530 });
2531}
2532
2533#[gpui::test]
2534async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2535 let fs = FakeFs::new(cx.background());
2536 fs.insert_tree(
2537 "/dir",
2538 json!({
2539 "file1": "a\nb\nc\n",
2540 "file2": "one\r\ntwo\r\nthree\r\n",
2541 }),
2542 )
2543 .await;
2544
2545 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2546 let buffer1 = project
2547 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2548 .await
2549 .unwrap();
2550 let buffer2 = project
2551 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2552 .await
2553 .unwrap();
2554
2555 buffer1.read_with(cx, |buffer, _| {
2556 assert_eq!(buffer.text(), "a\nb\nc\n");
2557 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2558 });
2559 buffer2.read_with(cx, |buffer, _| {
2560 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2561 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2562 });
2563
2564 // Change a file's line endings on disk from unix to windows. The buffer's
2565 // state updates correctly.
2566 fs.save(
2567 "/dir/file1".as_ref(),
2568 &"aaa\nb\nc\n".into(),
2569 LineEnding::Windows,
2570 )
2571 .await
2572 .unwrap();
2573 cx.foreground().run_until_parked();
2574 buffer1.read_with(cx, |buffer, _| {
2575 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2576 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2577 });
2578
2579 // Save a file with windows line endings. The file is written correctly.
2580 buffer2
2581 .update(cx, |buffer, cx| {
2582 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2583 buffer.save(cx)
2584 })
2585 .await
2586 .unwrap();
2587 assert_eq!(
2588 fs.load("/dir/file2".as_ref()).await.unwrap(),
2589 "one\r\ntwo\r\nthree\r\nfour\r\n",
2590 );
2591}
2592
2593#[gpui::test]
2594async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2595 cx.foreground().forbid_parking();
2596
2597 let fs = FakeFs::new(cx.background());
2598 fs.insert_tree(
2599 "/the-dir",
2600 json!({
2601 "a.rs": "
2602 fn foo(mut v: Vec<usize>) {
2603 for x in &v {
2604 v.push(1);
2605 }
2606 }
2607 "
2608 .unindent(),
2609 }),
2610 )
2611 .await;
2612
2613 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2614 let buffer = project
2615 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2616 .await
2617 .unwrap();
2618
2619 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2620 let message = lsp::PublishDiagnosticsParams {
2621 uri: buffer_uri.clone(),
2622 diagnostics: vec![
2623 lsp::Diagnostic {
2624 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2625 severity: Some(DiagnosticSeverity::WARNING),
2626 message: "error 1".to_string(),
2627 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2628 location: lsp::Location {
2629 uri: buffer_uri.clone(),
2630 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2631 },
2632 message: "error 1 hint 1".to_string(),
2633 }]),
2634 ..Default::default()
2635 },
2636 lsp::Diagnostic {
2637 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2638 severity: Some(DiagnosticSeverity::HINT),
2639 message: "error 1 hint 1".to_string(),
2640 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2641 location: lsp::Location {
2642 uri: buffer_uri.clone(),
2643 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2644 },
2645 message: "original diagnostic".to_string(),
2646 }]),
2647 ..Default::default()
2648 },
2649 lsp::Diagnostic {
2650 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2651 severity: Some(DiagnosticSeverity::ERROR),
2652 message: "error 2".to_string(),
2653 related_information: Some(vec![
2654 lsp::DiagnosticRelatedInformation {
2655 location: lsp::Location {
2656 uri: buffer_uri.clone(),
2657 range: lsp::Range::new(
2658 lsp::Position::new(1, 13),
2659 lsp::Position::new(1, 15),
2660 ),
2661 },
2662 message: "error 2 hint 1".to_string(),
2663 },
2664 lsp::DiagnosticRelatedInformation {
2665 location: lsp::Location {
2666 uri: buffer_uri.clone(),
2667 range: lsp::Range::new(
2668 lsp::Position::new(1, 13),
2669 lsp::Position::new(1, 15),
2670 ),
2671 },
2672 message: "error 2 hint 2".to_string(),
2673 },
2674 ]),
2675 ..Default::default()
2676 },
2677 lsp::Diagnostic {
2678 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2679 severity: Some(DiagnosticSeverity::HINT),
2680 message: "error 2 hint 1".to_string(),
2681 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2682 location: lsp::Location {
2683 uri: buffer_uri.clone(),
2684 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2685 },
2686 message: "original diagnostic".to_string(),
2687 }]),
2688 ..Default::default()
2689 },
2690 lsp::Diagnostic {
2691 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2692 severity: Some(DiagnosticSeverity::HINT),
2693 message: "error 2 hint 2".to_string(),
2694 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2695 location: lsp::Location {
2696 uri: buffer_uri.clone(),
2697 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2698 },
2699 message: "original diagnostic".to_string(),
2700 }]),
2701 ..Default::default()
2702 },
2703 ],
2704 version: None,
2705 };
2706
2707 project
2708 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2709 .unwrap();
2710 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2711
2712 assert_eq!(
2713 buffer
2714 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2715 .collect::<Vec<_>>(),
2716 &[
2717 DiagnosticEntry {
2718 range: Point::new(1, 8)..Point::new(1, 9),
2719 diagnostic: Diagnostic {
2720 severity: DiagnosticSeverity::WARNING,
2721 message: "error 1".to_string(),
2722 group_id: 0,
2723 is_primary: true,
2724 ..Default::default()
2725 }
2726 },
2727 DiagnosticEntry {
2728 range: Point::new(1, 8)..Point::new(1, 9),
2729 diagnostic: Diagnostic {
2730 severity: DiagnosticSeverity::HINT,
2731 message: "error 1 hint 1".to_string(),
2732 group_id: 0,
2733 is_primary: false,
2734 ..Default::default()
2735 }
2736 },
2737 DiagnosticEntry {
2738 range: Point::new(1, 13)..Point::new(1, 15),
2739 diagnostic: Diagnostic {
2740 severity: DiagnosticSeverity::HINT,
2741 message: "error 2 hint 1".to_string(),
2742 group_id: 1,
2743 is_primary: false,
2744 ..Default::default()
2745 }
2746 },
2747 DiagnosticEntry {
2748 range: Point::new(1, 13)..Point::new(1, 15),
2749 diagnostic: Diagnostic {
2750 severity: DiagnosticSeverity::HINT,
2751 message: "error 2 hint 2".to_string(),
2752 group_id: 1,
2753 is_primary: false,
2754 ..Default::default()
2755 }
2756 },
2757 DiagnosticEntry {
2758 range: Point::new(2, 8)..Point::new(2, 17),
2759 diagnostic: Diagnostic {
2760 severity: DiagnosticSeverity::ERROR,
2761 message: "error 2".to_string(),
2762 group_id: 1,
2763 is_primary: true,
2764 ..Default::default()
2765 }
2766 }
2767 ]
2768 );
2769
2770 assert_eq!(
2771 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2772 &[
2773 DiagnosticEntry {
2774 range: Point::new(1, 8)..Point::new(1, 9),
2775 diagnostic: Diagnostic {
2776 severity: DiagnosticSeverity::WARNING,
2777 message: "error 1".to_string(),
2778 group_id: 0,
2779 is_primary: true,
2780 ..Default::default()
2781 }
2782 },
2783 DiagnosticEntry {
2784 range: Point::new(1, 8)..Point::new(1, 9),
2785 diagnostic: Diagnostic {
2786 severity: DiagnosticSeverity::HINT,
2787 message: "error 1 hint 1".to_string(),
2788 group_id: 0,
2789 is_primary: false,
2790 ..Default::default()
2791 }
2792 },
2793 ]
2794 );
2795 assert_eq!(
2796 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2797 &[
2798 DiagnosticEntry {
2799 range: Point::new(1, 13)..Point::new(1, 15),
2800 diagnostic: Diagnostic {
2801 severity: DiagnosticSeverity::HINT,
2802 message: "error 2 hint 1".to_string(),
2803 group_id: 1,
2804 is_primary: false,
2805 ..Default::default()
2806 }
2807 },
2808 DiagnosticEntry {
2809 range: Point::new(1, 13)..Point::new(1, 15),
2810 diagnostic: Diagnostic {
2811 severity: DiagnosticSeverity::HINT,
2812 message: "error 2 hint 2".to_string(),
2813 group_id: 1,
2814 is_primary: false,
2815 ..Default::default()
2816 }
2817 },
2818 DiagnosticEntry {
2819 range: Point::new(2, 8)..Point::new(2, 17),
2820 diagnostic: Diagnostic {
2821 severity: DiagnosticSeverity::ERROR,
2822 message: "error 2".to_string(),
2823 group_id: 1,
2824 is_primary: true,
2825 ..Default::default()
2826 }
2827 }
2828 ]
2829 );
2830}
2831
2832#[gpui::test]
2833async fn test_rename(cx: &mut gpui::TestAppContext) {
2834 cx.foreground().forbid_parking();
2835
2836 let mut language = Language::new(
2837 LanguageConfig {
2838 name: "Rust".into(),
2839 path_suffixes: vec!["rs".to_string()],
2840 ..Default::default()
2841 },
2842 Some(tree_sitter_rust::language()),
2843 );
2844 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
2845 capabilities: lsp::ServerCapabilities {
2846 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2847 prepare_provider: Some(true),
2848 work_done_progress_options: Default::default(),
2849 })),
2850 ..Default::default()
2851 },
2852 ..Default::default()
2853 });
2854
2855 let fs = FakeFs::new(cx.background());
2856 fs.insert_tree(
2857 "/dir",
2858 json!({
2859 "one.rs": "const ONE: usize = 1;",
2860 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2861 }),
2862 )
2863 .await;
2864
2865 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2866 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2867 let buffer = project
2868 .update(cx, |project, cx| {
2869 project.open_local_buffer("/dir/one.rs", cx)
2870 })
2871 .await
2872 .unwrap();
2873
2874 let fake_server = fake_servers.next().await.unwrap();
2875
2876 let response = project.update(cx, |project, cx| {
2877 project.prepare_rename(buffer.clone(), 7, cx)
2878 });
2879 fake_server
2880 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2881 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2882 assert_eq!(params.position, lsp::Position::new(0, 7));
2883 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2884 lsp::Position::new(0, 6),
2885 lsp::Position::new(0, 9),
2886 ))))
2887 })
2888 .next()
2889 .await
2890 .unwrap();
2891 let range = response.await.unwrap().unwrap();
2892 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2893 assert_eq!(range, 6..9);
2894
2895 let response = project.update(cx, |project, cx| {
2896 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2897 });
2898 fake_server
2899 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2900 assert_eq!(
2901 params.text_document_position.text_document.uri.as_str(),
2902 "file:///dir/one.rs"
2903 );
2904 assert_eq!(
2905 params.text_document_position.position,
2906 lsp::Position::new(0, 7)
2907 );
2908 assert_eq!(params.new_name, "THREE");
2909 Ok(Some(lsp::WorkspaceEdit {
2910 changes: Some(
2911 [
2912 (
2913 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2914 vec![lsp::TextEdit::new(
2915 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2916 "THREE".to_string(),
2917 )],
2918 ),
2919 (
2920 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2921 vec![
2922 lsp::TextEdit::new(
2923 lsp::Range::new(
2924 lsp::Position::new(0, 24),
2925 lsp::Position::new(0, 27),
2926 ),
2927 "THREE".to_string(),
2928 ),
2929 lsp::TextEdit::new(
2930 lsp::Range::new(
2931 lsp::Position::new(0, 35),
2932 lsp::Position::new(0, 38),
2933 ),
2934 "THREE".to_string(),
2935 ),
2936 ],
2937 ),
2938 ]
2939 .into_iter()
2940 .collect(),
2941 ),
2942 ..Default::default()
2943 }))
2944 })
2945 .next()
2946 .await
2947 .unwrap();
2948 let mut transaction = response.await.unwrap().0;
2949 assert_eq!(transaction.len(), 2);
2950 assert_eq!(
2951 transaction
2952 .remove_entry(&buffer)
2953 .unwrap()
2954 .0
2955 .read_with(cx, |buffer, _| buffer.text()),
2956 "const THREE: usize = 1;"
2957 );
2958 assert_eq!(
2959 transaction
2960 .into_keys()
2961 .next()
2962 .unwrap()
2963 .read_with(cx, |buffer, _| buffer.text()),
2964 "const TWO: usize = one::THREE + one::THREE;"
2965 );
2966}
2967
2968#[gpui::test]
2969async fn test_search(cx: &mut gpui::TestAppContext) {
2970 let fs = FakeFs::new(cx.background());
2971 fs.insert_tree(
2972 "/dir",
2973 json!({
2974 "one.rs": "const ONE: usize = 1;",
2975 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
2976 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
2977 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
2978 }),
2979 )
2980 .await;
2981 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2982 assert_eq!(
2983 search(&project, SearchQuery::text("TWO", false, true), cx)
2984 .await
2985 .unwrap(),
2986 HashMap::from_iter([
2987 ("two.rs".to_string(), vec![6..9]),
2988 ("three.rs".to_string(), vec![37..40])
2989 ])
2990 );
2991
2992 let buffer_4 = project
2993 .update(cx, |project, cx| {
2994 project.open_local_buffer("/dir/four.rs", cx)
2995 })
2996 .await
2997 .unwrap();
2998 buffer_4.update(cx, |buffer, cx| {
2999 let text = "two::TWO";
3000 buffer.edit([(20..28, text), (31..43, text)], cx);
3001 });
3002
3003 assert_eq!(
3004 search(&project, SearchQuery::text("TWO", false, true), cx)
3005 .await
3006 .unwrap(),
3007 HashMap::from_iter([
3008 ("two.rs".to_string(), vec![6..9]),
3009 ("three.rs".to_string(), vec![37..40]),
3010 ("four.rs".to_string(), vec![25..28, 36..39])
3011 ])
3012 );
3013
3014 async fn search(
3015 project: &ModelHandle<Project>,
3016 query: SearchQuery,
3017 cx: &mut gpui::TestAppContext,
3018 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3019 let results = project
3020 .update(cx, |project, cx| project.search(query, cx))
3021 .await?;
3022
3023 Ok(results
3024 .into_iter()
3025 .map(|(buffer, ranges)| {
3026 buffer.read_with(cx, |buffer, _| {
3027 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3028 let ranges = ranges
3029 .into_iter()
3030 .map(|range| range.to_offset(buffer))
3031 .collect::<Vec<_>>();
3032 (path, ranges)
3033 })
3034 })
3035 .collect())
3036 }
3037}