1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_symlinks(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41 project.read_with(cx, |project, cx| {
42 let tree = project.worktrees(cx).next().unwrap().read(cx);
43 assert_eq!(tree.file_count(), 5);
44 assert_eq!(
45 tree.inode_for_path("fennel/grape"),
46 tree.inode_for_path("finnochio/grape")
47 );
48 });
49}
50
51#[gpui::test]
52async fn test_managing_language_servers(
53 deterministic: Arc<Deterministic>,
54 cx: &mut gpui::TestAppContext,
55) {
56 cx.foreground().forbid_parking();
57
58 let mut rust_language = Language::new(
59 LanguageConfig {
60 name: "Rust".into(),
61 path_suffixes: vec!["rs".to_string()],
62 ..Default::default()
63 },
64 Some(tree_sitter_rust::language()),
65 );
66 let mut json_language = Language::new(
67 LanguageConfig {
68 name: "JSON".into(),
69 path_suffixes: vec!["json".to_string()],
70 ..Default::default()
71 },
72 None,
73 );
74 let mut fake_rust_servers = rust_language
75 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
76 name: "the-rust-language-server",
77 capabilities: lsp::ServerCapabilities {
78 completion_provider: Some(lsp::CompletionOptions {
79 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
80 ..Default::default()
81 }),
82 ..Default::default()
83 },
84 ..Default::default()
85 }))
86 .await;
87 let mut fake_json_servers = json_language
88 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
89 name: "the-json-language-server",
90 capabilities: lsp::ServerCapabilities {
91 completion_provider: Some(lsp::CompletionOptions {
92 trigger_characters: Some(vec![":".to_string()]),
93 ..Default::default()
94 }),
95 ..Default::default()
96 },
97 ..Default::default()
98 }))
99 .await;
100
101 let fs = FakeFs::new(cx.background());
102 fs.insert_tree(
103 "/the-root",
104 json!({
105 "test.rs": "const A: i32 = 1;",
106 "test2.rs": "",
107 "Cargo.toml": "a = 1",
108 "package.json": "{\"a\": 1}",
109 }),
110 )
111 .await;
112
113 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
114
115 // Open a buffer without an associated language server.
116 let toml_buffer = project
117 .update(cx, |project, cx| {
118 project.open_local_buffer("/the-root/Cargo.toml", cx)
119 })
120 .await
121 .unwrap();
122
123 // Open a buffer with an associated language server before the language for it has been loaded.
124 let rust_buffer = project
125 .update(cx, |project, cx| {
126 project.open_local_buffer("/the-root/test.rs", cx)
127 })
128 .await
129 .unwrap();
130 rust_buffer.read_with(cx, |buffer, _| {
131 assert_eq!(buffer.language().map(|l| l.name()), None);
132 });
133
134 // Now we add the languages to the project, and ensure they get assigned to all
135 // the relevant open buffers.
136 project.update(cx, |project, _| {
137 project.languages.add(Arc::new(json_language));
138 project.languages.add(Arc::new(rust_language));
139 });
140 deterministic.run_until_parked();
141 rust_buffer.read_with(cx, |buffer, _| {
142 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
143 });
144
145 // A server is started up, and it is notified about Rust files.
146 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
147 assert_eq!(
148 fake_rust_server
149 .receive_notification::<lsp::notification::DidOpenTextDocument>()
150 .await
151 .text_document,
152 lsp::TextDocumentItem {
153 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
154 version: 0,
155 text: "const A: i32 = 1;".to_string(),
156 language_id: Default::default()
157 }
158 );
159
160 // The buffer is configured based on the language server's capabilities.
161 rust_buffer.read_with(cx, |buffer, _| {
162 assert_eq!(
163 buffer.completion_triggers(),
164 &[".".to_string(), "::".to_string()]
165 );
166 });
167 toml_buffer.read_with(cx, |buffer, _| {
168 assert!(buffer.completion_triggers().is_empty());
169 });
170
171 // Edit a buffer. The changes are reported to the language server.
172 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
173 assert_eq!(
174 fake_rust_server
175 .receive_notification::<lsp::notification::DidChangeTextDocument>()
176 .await
177 .text_document,
178 lsp::VersionedTextDocumentIdentifier::new(
179 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
180 1
181 )
182 );
183
184 // Open a third buffer with a different associated language server.
185 let json_buffer = project
186 .update(cx, |project, cx| {
187 project.open_local_buffer("/the-root/package.json", cx)
188 })
189 .await
190 .unwrap();
191
192 // A json language server is started up and is only notified about the json buffer.
193 let mut fake_json_server = fake_json_servers.next().await.unwrap();
194 assert_eq!(
195 fake_json_server
196 .receive_notification::<lsp::notification::DidOpenTextDocument>()
197 .await
198 .text_document,
199 lsp::TextDocumentItem {
200 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
201 version: 0,
202 text: "{\"a\": 1}".to_string(),
203 language_id: Default::default()
204 }
205 );
206
207 // This buffer is configured based on the second language server's
208 // capabilities.
209 json_buffer.read_with(cx, |buffer, _| {
210 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
211 });
212
213 // When opening another buffer whose language server is already running,
214 // it is also configured based on the existing language server's capabilities.
215 let rust_buffer2 = project
216 .update(cx, |project, cx| {
217 project.open_local_buffer("/the-root/test2.rs", cx)
218 })
219 .await
220 .unwrap();
221 rust_buffer2.read_with(cx, |buffer, _| {
222 assert_eq!(
223 buffer.completion_triggers(),
224 &[".".to_string(), "::".to_string()]
225 );
226 });
227
228 // Changes are reported only to servers matching the buffer's language.
229 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
230 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
231 assert_eq!(
232 fake_rust_server
233 .receive_notification::<lsp::notification::DidChangeTextDocument>()
234 .await
235 .text_document,
236 lsp::VersionedTextDocumentIdentifier::new(
237 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
238 1
239 )
240 );
241
242 // Save notifications are reported to all servers.
243 toml_buffer
244 .update(cx, |buffer, cx| buffer.save(cx))
245 .await
246 .unwrap();
247 assert_eq!(
248 fake_rust_server
249 .receive_notification::<lsp::notification::DidSaveTextDocument>()
250 .await
251 .text_document,
252 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
253 );
254 assert_eq!(
255 fake_json_server
256 .receive_notification::<lsp::notification::DidSaveTextDocument>()
257 .await
258 .text_document,
259 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
260 );
261
262 // Renames are reported only to servers matching the buffer's language.
263 fs.rename(
264 Path::new("/the-root/test2.rs"),
265 Path::new("/the-root/test3.rs"),
266 Default::default(),
267 )
268 .await
269 .unwrap();
270 assert_eq!(
271 fake_rust_server
272 .receive_notification::<lsp::notification::DidCloseTextDocument>()
273 .await
274 .text_document,
275 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
276 );
277 assert_eq!(
278 fake_rust_server
279 .receive_notification::<lsp::notification::DidOpenTextDocument>()
280 .await
281 .text_document,
282 lsp::TextDocumentItem {
283 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
284 version: 0,
285 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
286 language_id: Default::default()
287 },
288 );
289
290 rust_buffer2.update(cx, |buffer, cx| {
291 buffer.update_diagnostics(
292 DiagnosticSet::from_sorted_entries(
293 vec![DiagnosticEntry {
294 diagnostic: Default::default(),
295 range: Anchor::MIN..Anchor::MAX,
296 }],
297 &buffer.snapshot(),
298 ),
299 cx,
300 );
301 assert_eq!(
302 buffer
303 .snapshot()
304 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
305 .count(),
306 1
307 );
308 });
309
310 // When the rename changes the extension of the file, the buffer gets closed on the old
311 // language server and gets opened on the new one.
312 fs.rename(
313 Path::new("/the-root/test3.rs"),
314 Path::new("/the-root/test3.json"),
315 Default::default(),
316 )
317 .await
318 .unwrap();
319 assert_eq!(
320 fake_rust_server
321 .receive_notification::<lsp::notification::DidCloseTextDocument>()
322 .await
323 .text_document,
324 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
325 );
326 assert_eq!(
327 fake_json_server
328 .receive_notification::<lsp::notification::DidOpenTextDocument>()
329 .await
330 .text_document,
331 lsp::TextDocumentItem {
332 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
333 version: 0,
334 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
335 language_id: Default::default()
336 },
337 );
338
339 // We clear the diagnostics, since the language has changed.
340 rust_buffer2.read_with(cx, |buffer, _| {
341 assert_eq!(
342 buffer
343 .snapshot()
344 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
345 .count(),
346 0
347 );
348 });
349
350 // The renamed file's version resets after changing language server.
351 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
352 assert_eq!(
353 fake_json_server
354 .receive_notification::<lsp::notification::DidChangeTextDocument>()
355 .await
356 .text_document,
357 lsp::VersionedTextDocumentIdentifier::new(
358 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
359 1
360 )
361 );
362
363 // Restart language servers
364 project.update(cx, |project, cx| {
365 project.restart_language_servers_for_buffers(
366 vec![rust_buffer.clone(), json_buffer.clone()],
367 cx,
368 );
369 });
370
371 let mut rust_shutdown_requests = fake_rust_server
372 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
373 let mut json_shutdown_requests = fake_json_server
374 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
375 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
376
377 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
378 let mut fake_json_server = fake_json_servers.next().await.unwrap();
379
380 // Ensure rust document is reopened in new rust language server
381 assert_eq!(
382 fake_rust_server
383 .receive_notification::<lsp::notification::DidOpenTextDocument>()
384 .await
385 .text_document,
386 lsp::TextDocumentItem {
387 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
388 version: 1,
389 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
390 language_id: Default::default()
391 }
392 );
393
394 // Ensure json documents are reopened in new json language server
395 assert_set_eq!(
396 [
397 fake_json_server
398 .receive_notification::<lsp::notification::DidOpenTextDocument>()
399 .await
400 .text_document,
401 fake_json_server
402 .receive_notification::<lsp::notification::DidOpenTextDocument>()
403 .await
404 .text_document,
405 ],
406 [
407 lsp::TextDocumentItem {
408 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
409 version: 0,
410 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
411 language_id: Default::default()
412 },
413 lsp::TextDocumentItem {
414 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
415 version: 1,
416 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
417 language_id: Default::default()
418 }
419 ]
420 );
421
422 // Close notifications are reported only to servers matching the buffer's language.
423 cx.update(|_| drop(json_buffer));
424 let close_message = lsp::DidCloseTextDocumentParams {
425 text_document: lsp::TextDocumentIdentifier::new(
426 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
427 ),
428 };
429 assert_eq!(
430 fake_json_server
431 .receive_notification::<lsp::notification::DidCloseTextDocument>()
432 .await,
433 close_message,
434 );
435}
436
437#[gpui::test]
438async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
439 cx.foreground().forbid_parking();
440
441 let fs = FakeFs::new(cx.background());
442 fs.insert_tree(
443 "/dir",
444 json!({
445 "a.rs": "let a = 1;",
446 "b.rs": "let b = 2;"
447 }),
448 )
449 .await;
450
451 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
452
453 let buffer_a = project
454 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
455 .await
456 .unwrap();
457 let buffer_b = project
458 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
459 .await
460 .unwrap();
461
462 project.update(cx, |project, cx| {
463 project
464 .update_diagnostics(
465 0,
466 lsp::PublishDiagnosticsParams {
467 uri: Url::from_file_path("/dir/a.rs").unwrap(),
468 version: None,
469 diagnostics: vec![lsp::Diagnostic {
470 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
471 severity: Some(lsp::DiagnosticSeverity::ERROR),
472 message: "error 1".to_string(),
473 ..Default::default()
474 }],
475 },
476 &[],
477 cx,
478 )
479 .unwrap();
480 project
481 .update_diagnostics(
482 0,
483 lsp::PublishDiagnosticsParams {
484 uri: Url::from_file_path("/dir/b.rs").unwrap(),
485 version: None,
486 diagnostics: vec![lsp::Diagnostic {
487 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
488 severity: Some(lsp::DiagnosticSeverity::WARNING),
489 message: "error 2".to_string(),
490 ..Default::default()
491 }],
492 },
493 &[],
494 cx,
495 )
496 .unwrap();
497 });
498
499 buffer_a.read_with(cx, |buffer, _| {
500 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
501 assert_eq!(
502 chunks
503 .iter()
504 .map(|(s, d)| (s.as_str(), *d))
505 .collect::<Vec<_>>(),
506 &[
507 ("let ", None),
508 ("a", Some(DiagnosticSeverity::ERROR)),
509 (" = 1;", None),
510 ]
511 );
512 });
513 buffer_b.read_with(cx, |buffer, _| {
514 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
515 assert_eq!(
516 chunks
517 .iter()
518 .map(|(s, d)| (s.as_str(), *d))
519 .collect::<Vec<_>>(),
520 &[
521 ("let ", None),
522 ("b", Some(DiagnosticSeverity::WARNING)),
523 (" = 2;", None),
524 ]
525 );
526 });
527}
528
529#[gpui::test]
530async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
531 cx.foreground().forbid_parking();
532
533 let fs = FakeFs::new(cx.background());
534 fs.insert_tree(
535 "/root",
536 json!({
537 "dir": {
538 "a.rs": "let a = 1;",
539 },
540 "other.rs": "let b = c;"
541 }),
542 )
543 .await;
544
545 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
546
547 let (worktree, _) = project
548 .update(cx, |project, cx| {
549 project.find_or_create_local_worktree("/root/other.rs", false, cx)
550 })
551 .await
552 .unwrap();
553 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
554
555 project.update(cx, |project, cx| {
556 project
557 .update_diagnostics(
558 0,
559 lsp::PublishDiagnosticsParams {
560 uri: Url::from_file_path("/root/other.rs").unwrap(),
561 version: None,
562 diagnostics: vec![lsp::Diagnostic {
563 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
564 severity: Some(lsp::DiagnosticSeverity::ERROR),
565 message: "unknown variable 'c'".to_string(),
566 ..Default::default()
567 }],
568 },
569 &[],
570 cx,
571 )
572 .unwrap();
573 });
574
575 let buffer = project
576 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
577 .await
578 .unwrap();
579 buffer.read_with(cx, |buffer, _| {
580 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
581 assert_eq!(
582 chunks
583 .iter()
584 .map(|(s, d)| (s.as_str(), *d))
585 .collect::<Vec<_>>(),
586 &[
587 ("let b = ", None),
588 ("c", Some(DiagnosticSeverity::ERROR)),
589 (";", None),
590 ]
591 );
592 });
593
594 project.read_with(cx, |project, cx| {
595 assert_eq!(project.diagnostic_summaries(cx).next(), None);
596 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
597 });
598}
599
600#[gpui::test]
601async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
602 cx.foreground().forbid_parking();
603
604 let progress_token = "the-progress-token";
605 let mut language = Language::new(
606 LanguageConfig {
607 name: "Rust".into(),
608 path_suffixes: vec!["rs".to_string()],
609 ..Default::default()
610 },
611 Some(tree_sitter_rust::language()),
612 );
613 let mut fake_servers = language
614 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
615 disk_based_diagnostics_progress_token: Some(progress_token.into()),
616 disk_based_diagnostics_sources: vec!["disk".into()],
617 ..Default::default()
618 }))
619 .await;
620
621 let fs = FakeFs::new(cx.background());
622 fs.insert_tree(
623 "/dir",
624 json!({
625 "a.rs": "fn a() { A }",
626 "b.rs": "const y: i32 = 1",
627 }),
628 )
629 .await;
630
631 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
632 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
633 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
634
635 // Cause worktree to start the fake language server
636 let _buffer = project
637 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
638 .await
639 .unwrap();
640
641 let mut events = subscribe(&project, cx);
642
643 let fake_server = fake_servers.next().await.unwrap();
644 fake_server.start_progress(progress_token).await;
645 assert_eq!(
646 events.next().await.unwrap(),
647 Event::DiskBasedDiagnosticsStarted {
648 language_server_id: 0,
649 }
650 );
651
652 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
653 uri: Url::from_file_path("/dir/a.rs").unwrap(),
654 version: None,
655 diagnostics: vec![lsp::Diagnostic {
656 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
657 severity: Some(lsp::DiagnosticSeverity::ERROR),
658 message: "undefined variable 'A'".to_string(),
659 ..Default::default()
660 }],
661 });
662 assert_eq!(
663 events.next().await.unwrap(),
664 Event::DiagnosticsUpdated {
665 language_server_id: 0,
666 path: (worktree_id, Path::new("a.rs")).into()
667 }
668 );
669
670 fake_server.end_progress(progress_token);
671 assert_eq!(
672 events.next().await.unwrap(),
673 Event::DiskBasedDiagnosticsFinished {
674 language_server_id: 0
675 }
676 );
677
678 let buffer = project
679 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
680 .await
681 .unwrap();
682
683 buffer.read_with(cx, |buffer, _| {
684 let snapshot = buffer.snapshot();
685 let diagnostics = snapshot
686 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
687 .collect::<Vec<_>>();
688 assert_eq!(
689 diagnostics,
690 &[DiagnosticEntry {
691 range: Point::new(0, 9)..Point::new(0, 10),
692 diagnostic: Diagnostic {
693 severity: lsp::DiagnosticSeverity::ERROR,
694 message: "undefined variable 'A'".to_string(),
695 group_id: 0,
696 is_primary: true,
697 ..Default::default()
698 }
699 }]
700 )
701 });
702
703 // Ensure publishing empty diagnostics twice only results in one update event.
704 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
705 uri: Url::from_file_path("/dir/a.rs").unwrap(),
706 version: None,
707 diagnostics: Default::default(),
708 });
709 assert_eq!(
710 events.next().await.unwrap(),
711 Event::DiagnosticsUpdated {
712 language_server_id: 0,
713 path: (worktree_id, Path::new("a.rs")).into()
714 }
715 );
716
717 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
718 uri: Url::from_file_path("/dir/a.rs").unwrap(),
719 version: None,
720 diagnostics: Default::default(),
721 });
722 cx.foreground().run_until_parked();
723 assert_eq!(futures::poll!(events.next()), Poll::Pending);
724}
725
726#[gpui::test]
727async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
728 cx.foreground().forbid_parking();
729
730 let progress_token = "the-progress-token";
731 let mut language = Language::new(
732 LanguageConfig {
733 path_suffixes: vec!["rs".to_string()],
734 ..Default::default()
735 },
736 None,
737 );
738 let mut fake_servers = language
739 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
740 disk_based_diagnostics_sources: vec!["disk".into()],
741 disk_based_diagnostics_progress_token: Some(progress_token.into()),
742 ..Default::default()
743 }))
744 .await;
745
746 let fs = FakeFs::new(cx.background());
747 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
748
749 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
750 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
751
752 let buffer = project
753 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
754 .await
755 .unwrap();
756
757 // Simulate diagnostics starting to update.
758 let fake_server = fake_servers.next().await.unwrap();
759 fake_server.start_progress(progress_token).await;
760
761 // Restart the server before the diagnostics finish updating.
762 project.update(cx, |project, cx| {
763 project.restart_language_servers_for_buffers([buffer], cx);
764 });
765 let mut events = subscribe(&project, cx);
766
767 // Simulate the newly started server sending more diagnostics.
768 let fake_server = fake_servers.next().await.unwrap();
769 fake_server.start_progress(progress_token).await;
770 assert_eq!(
771 events.next().await.unwrap(),
772 Event::DiskBasedDiagnosticsStarted {
773 language_server_id: 1
774 }
775 );
776 project.read_with(cx, |project, _| {
777 assert_eq!(
778 project
779 .language_servers_running_disk_based_diagnostics()
780 .collect::<Vec<_>>(),
781 [1]
782 );
783 });
784
785 // All diagnostics are considered done, despite the old server's diagnostic
786 // task never completing.
787 fake_server.end_progress(progress_token);
788 assert_eq!(
789 events.next().await.unwrap(),
790 Event::DiskBasedDiagnosticsFinished {
791 language_server_id: 1
792 }
793 );
794 project.read_with(cx, |project, _| {
795 assert_eq!(
796 project
797 .language_servers_running_disk_based_diagnostics()
798 .collect::<Vec<_>>(),
799 [0; 0]
800 );
801 });
802}
803
804#[gpui::test]
805async fn test_toggling_enable_language_server(
806 deterministic: Arc<Deterministic>,
807 cx: &mut gpui::TestAppContext,
808) {
809 deterministic.forbid_parking();
810
811 let mut rust = Language::new(
812 LanguageConfig {
813 name: Arc::from("Rust"),
814 path_suffixes: vec!["rs".to_string()],
815 ..Default::default()
816 },
817 None,
818 );
819 let mut fake_rust_servers = rust
820 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
821 name: "rust-lsp",
822 ..Default::default()
823 }))
824 .await;
825 let mut js = Language::new(
826 LanguageConfig {
827 name: Arc::from("JavaScript"),
828 path_suffixes: vec!["js".to_string()],
829 ..Default::default()
830 },
831 None,
832 );
833 let mut fake_js_servers = js
834 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
835 name: "js-lsp",
836 ..Default::default()
837 }))
838 .await;
839
840 let fs = FakeFs::new(cx.background());
841 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
842 .await;
843
844 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
845 project.update(cx, |project, _| {
846 project.languages.add(Arc::new(rust));
847 project.languages.add(Arc::new(js));
848 });
849
850 let _rs_buffer = project
851 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
852 .await
853 .unwrap();
854 let _js_buffer = project
855 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
856 .await
857 .unwrap();
858
859 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
860 assert_eq!(
861 fake_rust_server_1
862 .receive_notification::<lsp::notification::DidOpenTextDocument>()
863 .await
864 .text_document
865 .uri
866 .as_str(),
867 "file:///dir/a.rs"
868 );
869
870 let mut fake_js_server = fake_js_servers.next().await.unwrap();
871 assert_eq!(
872 fake_js_server
873 .receive_notification::<lsp::notification::DidOpenTextDocument>()
874 .await
875 .text_document
876 .uri
877 .as_str(),
878 "file:///dir/b.js"
879 );
880
881 // Disable Rust language server, ensuring only that server gets stopped.
882 cx.update(|cx| {
883 cx.update_global(|settings: &mut Settings, _| {
884 settings.language_overrides.insert(
885 Arc::from("Rust"),
886 settings::EditorSettings {
887 enable_language_server: Some(false),
888 ..Default::default()
889 },
890 );
891 })
892 });
893 fake_rust_server_1
894 .receive_notification::<lsp::notification::Exit>()
895 .await;
896
897 // Enable Rust and disable JavaScript language servers, ensuring that the
898 // former gets started again and that the latter stops.
899 cx.update(|cx| {
900 cx.update_global(|settings: &mut Settings, _| {
901 settings.language_overrides.insert(
902 Arc::from("Rust"),
903 settings::EditorSettings {
904 enable_language_server: Some(true),
905 ..Default::default()
906 },
907 );
908 settings.language_overrides.insert(
909 Arc::from("JavaScript"),
910 settings::EditorSettings {
911 enable_language_server: Some(false),
912 ..Default::default()
913 },
914 );
915 })
916 });
917 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
918 assert_eq!(
919 fake_rust_server_2
920 .receive_notification::<lsp::notification::DidOpenTextDocument>()
921 .await
922 .text_document
923 .uri
924 .as_str(),
925 "file:///dir/a.rs"
926 );
927 fake_js_server
928 .receive_notification::<lsp::notification::Exit>()
929 .await;
930}
931
932#[gpui::test]
933async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
934 cx.foreground().forbid_parking();
935
936 let mut language = Language::new(
937 LanguageConfig {
938 name: "Rust".into(),
939 path_suffixes: vec!["rs".to_string()],
940 ..Default::default()
941 },
942 Some(tree_sitter_rust::language()),
943 );
944 let mut fake_servers = language
945 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
946 disk_based_diagnostics_sources: vec!["disk".into()],
947 ..Default::default()
948 }))
949 .await;
950
951 let text = "
952 fn a() { A }
953 fn b() { BB }
954 fn c() { CCC }
955 "
956 .unindent();
957
958 let fs = FakeFs::new(cx.background());
959 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
960
961 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
962 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
963
964 let buffer = project
965 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
966 .await
967 .unwrap();
968
969 let mut fake_server = fake_servers.next().await.unwrap();
970 let open_notification = fake_server
971 .receive_notification::<lsp::notification::DidOpenTextDocument>()
972 .await;
973
974 // Edit the buffer, moving the content down
975 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
976 let change_notification_1 = fake_server
977 .receive_notification::<lsp::notification::DidChangeTextDocument>()
978 .await;
979 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
980
981 // Report some diagnostics for the initial version of the buffer
982 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
983 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
984 version: Some(open_notification.text_document.version),
985 diagnostics: vec![
986 lsp::Diagnostic {
987 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
988 severity: Some(DiagnosticSeverity::ERROR),
989 message: "undefined variable 'A'".to_string(),
990 source: Some("disk".to_string()),
991 ..Default::default()
992 },
993 lsp::Diagnostic {
994 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
995 severity: Some(DiagnosticSeverity::ERROR),
996 message: "undefined variable 'BB'".to_string(),
997 source: Some("disk".to_string()),
998 ..Default::default()
999 },
1000 lsp::Diagnostic {
1001 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1002 severity: Some(DiagnosticSeverity::ERROR),
1003 source: Some("disk".to_string()),
1004 message: "undefined variable 'CCC'".to_string(),
1005 ..Default::default()
1006 },
1007 ],
1008 });
1009
1010 // The diagnostics have moved down since they were created.
1011 buffer.next_notification(cx).await;
1012 buffer.read_with(cx, |buffer, _| {
1013 assert_eq!(
1014 buffer
1015 .snapshot()
1016 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1017 .collect::<Vec<_>>(),
1018 &[
1019 DiagnosticEntry {
1020 range: Point::new(3, 9)..Point::new(3, 11),
1021 diagnostic: Diagnostic {
1022 severity: DiagnosticSeverity::ERROR,
1023 message: "undefined variable 'BB'".to_string(),
1024 is_disk_based: true,
1025 group_id: 1,
1026 is_primary: true,
1027 ..Default::default()
1028 },
1029 },
1030 DiagnosticEntry {
1031 range: Point::new(4, 9)..Point::new(4, 12),
1032 diagnostic: Diagnostic {
1033 severity: DiagnosticSeverity::ERROR,
1034 message: "undefined variable 'CCC'".to_string(),
1035 is_disk_based: true,
1036 group_id: 2,
1037 is_primary: true,
1038 ..Default::default()
1039 }
1040 }
1041 ]
1042 );
1043 assert_eq!(
1044 chunks_with_diagnostics(buffer, 0..buffer.len()),
1045 [
1046 ("\n\nfn a() { ".to_string(), None),
1047 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1048 (" }\nfn b() { ".to_string(), None),
1049 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1050 (" }\nfn c() { ".to_string(), None),
1051 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1052 (" }\n".to_string(), None),
1053 ]
1054 );
1055 assert_eq!(
1056 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1057 [
1058 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1059 (" }\nfn c() { ".to_string(), None),
1060 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1061 ]
1062 );
1063 });
1064
1065 // Ensure overlapping diagnostics are highlighted correctly.
1066 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1067 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1068 version: Some(open_notification.text_document.version),
1069 diagnostics: vec![
1070 lsp::Diagnostic {
1071 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1072 severity: Some(DiagnosticSeverity::ERROR),
1073 message: "undefined variable 'A'".to_string(),
1074 source: Some("disk".to_string()),
1075 ..Default::default()
1076 },
1077 lsp::Diagnostic {
1078 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1079 severity: Some(DiagnosticSeverity::WARNING),
1080 message: "unreachable statement".to_string(),
1081 source: Some("disk".to_string()),
1082 ..Default::default()
1083 },
1084 ],
1085 });
1086
1087 buffer.next_notification(cx).await;
1088 buffer.read_with(cx, |buffer, _| {
1089 assert_eq!(
1090 buffer
1091 .snapshot()
1092 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1093 .collect::<Vec<_>>(),
1094 &[
1095 DiagnosticEntry {
1096 range: Point::new(2, 9)..Point::new(2, 12),
1097 diagnostic: Diagnostic {
1098 severity: DiagnosticSeverity::WARNING,
1099 message: "unreachable statement".to_string(),
1100 is_disk_based: true,
1101 group_id: 4,
1102 is_primary: true,
1103 ..Default::default()
1104 }
1105 },
1106 DiagnosticEntry {
1107 range: Point::new(2, 9)..Point::new(2, 10),
1108 diagnostic: Diagnostic {
1109 severity: DiagnosticSeverity::ERROR,
1110 message: "undefined variable 'A'".to_string(),
1111 is_disk_based: true,
1112 group_id: 3,
1113 is_primary: true,
1114 ..Default::default()
1115 },
1116 }
1117 ]
1118 );
1119 assert_eq!(
1120 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1121 [
1122 ("fn a() { ".to_string(), None),
1123 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1124 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1125 ("\n".to_string(), None),
1126 ]
1127 );
1128 assert_eq!(
1129 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1130 [
1131 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1132 ("\n".to_string(), None),
1133 ]
1134 );
1135 });
1136
1137 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1138 // changes since the last save.
1139 buffer.update(cx, |buffer, cx| {
1140 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1141 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1142 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1143 });
1144 let change_notification_2 = fake_server
1145 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1146 .await;
1147 assert!(
1148 change_notification_2.text_document.version > change_notification_1.text_document.version
1149 );
1150
1151 // Handle out-of-order diagnostics
1152 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1153 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1154 version: Some(change_notification_2.text_document.version),
1155 diagnostics: vec![
1156 lsp::Diagnostic {
1157 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1158 severity: Some(DiagnosticSeverity::ERROR),
1159 message: "undefined variable 'BB'".to_string(),
1160 source: Some("disk".to_string()),
1161 ..Default::default()
1162 },
1163 lsp::Diagnostic {
1164 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1165 severity: Some(DiagnosticSeverity::WARNING),
1166 message: "undefined variable 'A'".to_string(),
1167 source: Some("disk".to_string()),
1168 ..Default::default()
1169 },
1170 ],
1171 });
1172
1173 buffer.next_notification(cx).await;
1174 buffer.read_with(cx, |buffer, _| {
1175 assert_eq!(
1176 buffer
1177 .snapshot()
1178 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1179 .collect::<Vec<_>>(),
1180 &[
1181 DiagnosticEntry {
1182 range: Point::new(2, 21)..Point::new(2, 22),
1183 diagnostic: Diagnostic {
1184 severity: DiagnosticSeverity::WARNING,
1185 message: "undefined variable 'A'".to_string(),
1186 is_disk_based: true,
1187 group_id: 6,
1188 is_primary: true,
1189 ..Default::default()
1190 }
1191 },
1192 DiagnosticEntry {
1193 range: Point::new(3, 9)..Point::new(3, 14),
1194 diagnostic: Diagnostic {
1195 severity: DiagnosticSeverity::ERROR,
1196 message: "undefined variable 'BB'".to_string(),
1197 is_disk_based: true,
1198 group_id: 5,
1199 is_primary: true,
1200 ..Default::default()
1201 },
1202 }
1203 ]
1204 );
1205 });
1206}
1207
1208#[gpui::test]
1209async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1210 cx.foreground().forbid_parking();
1211
1212 let text = concat!(
1213 "let one = ;\n", //
1214 "let two = \n",
1215 "let three = 3;\n",
1216 );
1217
1218 let fs = FakeFs::new(cx.background());
1219 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1220
1221 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1222 let buffer = project
1223 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1224 .await
1225 .unwrap();
1226
1227 project.update(cx, |project, cx| {
1228 project
1229 .update_buffer_diagnostics(
1230 &buffer,
1231 vec![
1232 DiagnosticEntry {
1233 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1234 diagnostic: Diagnostic {
1235 severity: DiagnosticSeverity::ERROR,
1236 message: "syntax error 1".to_string(),
1237 ..Default::default()
1238 },
1239 },
1240 DiagnosticEntry {
1241 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1242 diagnostic: Diagnostic {
1243 severity: DiagnosticSeverity::ERROR,
1244 message: "syntax error 2".to_string(),
1245 ..Default::default()
1246 },
1247 },
1248 ],
1249 None,
1250 cx,
1251 )
1252 .unwrap();
1253 });
1254
1255 // An empty range is extended forward to include the following character.
1256 // At the end of a line, an empty range is extended backward to include
1257 // the preceding character.
1258 buffer.read_with(cx, |buffer, _| {
1259 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1260 assert_eq!(
1261 chunks
1262 .iter()
1263 .map(|(s, d)| (s.as_str(), *d))
1264 .collect::<Vec<_>>(),
1265 &[
1266 ("let one = ", None),
1267 (";", Some(DiagnosticSeverity::ERROR)),
1268 ("\nlet two =", None),
1269 (" ", Some(DiagnosticSeverity::ERROR)),
1270 ("\nlet three = 3;\n", None)
1271 ]
1272 );
1273 });
1274}
1275
1276#[gpui::test]
1277async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1278 cx.foreground().forbid_parking();
1279
1280 let mut language = Language::new(
1281 LanguageConfig {
1282 name: "Rust".into(),
1283 path_suffixes: vec!["rs".to_string()],
1284 ..Default::default()
1285 },
1286 Some(tree_sitter_rust::language()),
1287 );
1288 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1289
1290 let text = "
1291 fn a() {
1292 f1();
1293 }
1294 fn b() {
1295 f2();
1296 }
1297 fn c() {
1298 f3();
1299 }
1300 "
1301 .unindent();
1302
1303 let fs = FakeFs::new(cx.background());
1304 fs.insert_tree(
1305 "/dir",
1306 json!({
1307 "a.rs": text.clone(),
1308 }),
1309 )
1310 .await;
1311
1312 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1313 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1314 let buffer = project
1315 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1316 .await
1317 .unwrap();
1318
1319 let mut fake_server = fake_servers.next().await.unwrap();
1320 let lsp_document_version = fake_server
1321 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1322 .await
1323 .text_document
1324 .version;
1325
1326 // Simulate editing the buffer after the language server computes some edits.
1327 buffer.update(cx, |buffer, cx| {
1328 buffer.edit(
1329 [(
1330 Point::new(0, 0)..Point::new(0, 0),
1331 "// above first function\n",
1332 )],
1333 cx,
1334 );
1335 buffer.edit(
1336 [(
1337 Point::new(2, 0)..Point::new(2, 0),
1338 " // inside first function\n",
1339 )],
1340 cx,
1341 );
1342 buffer.edit(
1343 [(
1344 Point::new(6, 4)..Point::new(6, 4),
1345 "// inside second function ",
1346 )],
1347 cx,
1348 );
1349
1350 assert_eq!(
1351 buffer.text(),
1352 "
1353 // above first function
1354 fn a() {
1355 // inside first function
1356 f1();
1357 }
1358 fn b() {
1359 // inside second function f2();
1360 }
1361 fn c() {
1362 f3();
1363 }
1364 "
1365 .unindent()
1366 );
1367 });
1368
1369 let edits = project
1370 .update(cx, |project, cx| {
1371 project.edits_from_lsp(
1372 &buffer,
1373 vec![
1374 // replace body of first function
1375 lsp::TextEdit {
1376 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1377 new_text: "
1378 fn a() {
1379 f10();
1380 }
1381 "
1382 .unindent(),
1383 },
1384 // edit inside second function
1385 lsp::TextEdit {
1386 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1387 new_text: "00".into(),
1388 },
1389 // edit inside third function via two distinct edits
1390 lsp::TextEdit {
1391 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1392 new_text: "4000".into(),
1393 },
1394 lsp::TextEdit {
1395 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1396 new_text: "".into(),
1397 },
1398 ],
1399 Some(lsp_document_version),
1400 cx,
1401 )
1402 })
1403 .await
1404 .unwrap();
1405
1406 buffer.update(cx, |buffer, cx| {
1407 for (range, new_text) in edits {
1408 buffer.edit([(range, new_text)], cx);
1409 }
1410 assert_eq!(
1411 buffer.text(),
1412 "
1413 // above first function
1414 fn a() {
1415 // inside first function
1416 f10();
1417 }
1418 fn b() {
1419 // inside second function f200();
1420 }
1421 fn c() {
1422 f4000();
1423 }
1424 "
1425 .unindent()
1426 );
1427 });
1428}
1429
1430#[gpui::test]
1431async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1432 cx.foreground().forbid_parking();
1433
1434 let text = "
1435 use a::b;
1436 use a::c;
1437
1438 fn f() {
1439 b();
1440 c();
1441 }
1442 "
1443 .unindent();
1444
1445 let fs = FakeFs::new(cx.background());
1446 fs.insert_tree(
1447 "/dir",
1448 json!({
1449 "a.rs": text.clone(),
1450 }),
1451 )
1452 .await;
1453
1454 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1455 let buffer = project
1456 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1457 .await
1458 .unwrap();
1459
1460 // Simulate the language server sending us a small edit in the form of a very large diff.
1461 // Rust-analyzer does this when performing a merge-imports code action.
1462 let edits = project
1463 .update(cx, |project, cx| {
1464 project.edits_from_lsp(
1465 &buffer,
1466 [
1467 // Replace the first use statement without editing the semicolon.
1468 lsp::TextEdit {
1469 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1470 new_text: "a::{b, c}".into(),
1471 },
1472 // Reinsert the remainder of the file between the semicolon and the final
1473 // newline of the file.
1474 lsp::TextEdit {
1475 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1476 new_text: "\n\n".into(),
1477 },
1478 lsp::TextEdit {
1479 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1480 new_text: "
1481 fn f() {
1482 b();
1483 c();
1484 }"
1485 .unindent(),
1486 },
1487 // Delete everything after the first newline of the file.
1488 lsp::TextEdit {
1489 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1490 new_text: "".into(),
1491 },
1492 ],
1493 None,
1494 cx,
1495 )
1496 })
1497 .await
1498 .unwrap();
1499
1500 buffer.update(cx, |buffer, cx| {
1501 let edits = edits
1502 .into_iter()
1503 .map(|(range, text)| {
1504 (
1505 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1506 text,
1507 )
1508 })
1509 .collect::<Vec<_>>();
1510
1511 assert_eq!(
1512 edits,
1513 [
1514 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1515 (Point::new(1, 0)..Point::new(2, 0), "".into())
1516 ]
1517 );
1518
1519 for (range, new_text) in edits {
1520 buffer.edit([(range, new_text)], cx);
1521 }
1522 assert_eq!(
1523 buffer.text(),
1524 "
1525 use a::{b, c};
1526
1527 fn f() {
1528 b();
1529 c();
1530 }
1531 "
1532 .unindent()
1533 );
1534 });
1535}
1536
1537#[gpui::test]
1538async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1539 cx.foreground().forbid_parking();
1540
1541 let text = "
1542 use a::b;
1543 use a::c;
1544
1545 fn f() {
1546 b();
1547 c();
1548 }
1549 "
1550 .unindent();
1551
1552 let fs = FakeFs::new(cx.background());
1553 fs.insert_tree(
1554 "/dir",
1555 json!({
1556 "a.rs": text.clone(),
1557 }),
1558 )
1559 .await;
1560
1561 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1562 let buffer = project
1563 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1564 .await
1565 .unwrap();
1566
1567 // Simulate the language server sending us edits in a non-ordered fashion,
1568 // with ranges sometimes being inverted.
1569 let edits = project
1570 .update(cx, |project, cx| {
1571 project.edits_from_lsp(
1572 &buffer,
1573 [
1574 lsp::TextEdit {
1575 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1576 new_text: "\n\n".into(),
1577 },
1578 lsp::TextEdit {
1579 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1580 new_text: "a::{b, c}".into(),
1581 },
1582 lsp::TextEdit {
1583 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1584 new_text: "".into(),
1585 },
1586 lsp::TextEdit {
1587 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1588 new_text: "
1589 fn f() {
1590 b();
1591 c();
1592 }"
1593 .unindent(),
1594 },
1595 ],
1596 None,
1597 cx,
1598 )
1599 })
1600 .await
1601 .unwrap();
1602
1603 buffer.update(cx, |buffer, cx| {
1604 let edits = edits
1605 .into_iter()
1606 .map(|(range, text)| {
1607 (
1608 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1609 text,
1610 )
1611 })
1612 .collect::<Vec<_>>();
1613
1614 assert_eq!(
1615 edits,
1616 [
1617 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1618 (Point::new(1, 0)..Point::new(2, 0), "".into())
1619 ]
1620 );
1621
1622 for (range, new_text) in edits {
1623 buffer.edit([(range, new_text)], cx);
1624 }
1625 assert_eq!(
1626 buffer.text(),
1627 "
1628 use a::{b, c};
1629
1630 fn f() {
1631 b();
1632 c();
1633 }
1634 "
1635 .unindent()
1636 );
1637 });
1638}
1639
1640fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1641 buffer: &Buffer,
1642 range: Range<T>,
1643) -> Vec<(String, Option<DiagnosticSeverity>)> {
1644 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1645 for chunk in buffer.snapshot().chunks(range, true) {
1646 if chunks.last().map_or(false, |prev_chunk| {
1647 prev_chunk.1 == chunk.diagnostic_severity
1648 }) {
1649 chunks.last_mut().unwrap().0.push_str(chunk.text);
1650 } else {
1651 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1652 }
1653 }
1654 chunks
1655}
1656
1657#[gpui::test(iterations = 10)]
1658async fn test_definition(cx: &mut gpui::TestAppContext) {
1659 let mut language = Language::new(
1660 LanguageConfig {
1661 name: "Rust".into(),
1662 path_suffixes: vec!["rs".to_string()],
1663 ..Default::default()
1664 },
1665 Some(tree_sitter_rust::language()),
1666 );
1667 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1668
1669 let fs = FakeFs::new(cx.background());
1670 fs.insert_tree(
1671 "/dir",
1672 json!({
1673 "a.rs": "const fn a() { A }",
1674 "b.rs": "const y: i32 = crate::a()",
1675 }),
1676 )
1677 .await;
1678
1679 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1680 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1681
1682 let buffer = project
1683 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1684 .await
1685 .unwrap();
1686
1687 let fake_server = fake_servers.next().await.unwrap();
1688 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1689 let params = params.text_document_position_params;
1690 assert_eq!(
1691 params.text_document.uri.to_file_path().unwrap(),
1692 Path::new("/dir/b.rs"),
1693 );
1694 assert_eq!(params.position, lsp::Position::new(0, 22));
1695
1696 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1697 lsp::Location::new(
1698 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1699 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1700 ),
1701 )))
1702 });
1703
1704 let mut definitions = project
1705 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1706 .await
1707 .unwrap();
1708
1709 // Assert no new language server started
1710 cx.foreground().run_until_parked();
1711 assert!(fake_servers.try_next().is_err());
1712
1713 assert_eq!(definitions.len(), 1);
1714 let definition = definitions.pop().unwrap();
1715 cx.update(|cx| {
1716 let target_buffer = definition.target.buffer.read(cx);
1717 assert_eq!(
1718 target_buffer
1719 .file()
1720 .unwrap()
1721 .as_local()
1722 .unwrap()
1723 .abs_path(cx),
1724 Path::new("/dir/a.rs"),
1725 );
1726 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1727 assert_eq!(
1728 list_worktrees(&project, cx),
1729 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1730 );
1731
1732 drop(definition);
1733 });
1734 cx.read(|cx| {
1735 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1736 });
1737
1738 fn list_worktrees<'a>(
1739 project: &'a ModelHandle<Project>,
1740 cx: &'a AppContext,
1741 ) -> Vec<(&'a Path, bool)> {
1742 project
1743 .read(cx)
1744 .worktrees(cx)
1745 .map(|worktree| {
1746 let worktree = worktree.read(cx);
1747 (
1748 worktree.as_local().unwrap().abs_path().as_ref(),
1749 worktree.is_visible(),
1750 )
1751 })
1752 .collect::<Vec<_>>()
1753 }
1754}
1755
1756#[gpui::test]
1757async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1758 let mut language = Language::new(
1759 LanguageConfig {
1760 name: "TypeScript".into(),
1761 path_suffixes: vec!["ts".to_string()],
1762 ..Default::default()
1763 },
1764 Some(tree_sitter_typescript::language_typescript()),
1765 );
1766 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1767
1768 let fs = FakeFs::new(cx.background());
1769 fs.insert_tree(
1770 "/dir",
1771 json!({
1772 "a.ts": "",
1773 }),
1774 )
1775 .await;
1776
1777 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1778 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1779 let buffer = project
1780 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1781 .await
1782 .unwrap();
1783
1784 let fake_server = fake_language_servers.next().await.unwrap();
1785
1786 let text = "let a = b.fqn";
1787 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1788 let completions = project.update(cx, |project, cx| {
1789 project.completions(&buffer, text.len(), cx)
1790 });
1791
1792 fake_server
1793 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1794 Ok(Some(lsp::CompletionResponse::Array(vec![
1795 lsp::CompletionItem {
1796 label: "fullyQualifiedName?".into(),
1797 insert_text: Some("fullyQualifiedName".into()),
1798 ..Default::default()
1799 },
1800 ])))
1801 })
1802 .next()
1803 .await;
1804 let completions = completions.await.unwrap();
1805 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1806 assert_eq!(completions.len(), 1);
1807 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1808 assert_eq!(
1809 completions[0].old_range.to_offset(&snapshot),
1810 text.len() - 3..text.len()
1811 );
1812
1813 let text = "let a = \"atoms/cmp\"";
1814 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1815 let completions = project.update(cx, |project, cx| {
1816 project.completions(&buffer, text.len() - 1, cx)
1817 });
1818
1819 fake_server
1820 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1821 Ok(Some(lsp::CompletionResponse::Array(vec![
1822 lsp::CompletionItem {
1823 label: "component".into(),
1824 ..Default::default()
1825 },
1826 ])))
1827 })
1828 .next()
1829 .await;
1830 let completions = completions.await.unwrap();
1831 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1832 assert_eq!(completions.len(), 1);
1833 assert_eq!(completions[0].new_text, "component");
1834 assert_eq!(
1835 completions[0].old_range.to_offset(&snapshot),
1836 text.len() - 4..text.len() - 1
1837 );
1838}
1839
1840#[gpui::test]
1841async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1842 let mut language = Language::new(
1843 LanguageConfig {
1844 name: "TypeScript".into(),
1845 path_suffixes: vec!["ts".to_string()],
1846 ..Default::default()
1847 },
1848 Some(tree_sitter_typescript::language_typescript()),
1849 );
1850 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1851
1852 let fs = FakeFs::new(cx.background());
1853 fs.insert_tree(
1854 "/dir",
1855 json!({
1856 "a.ts": "",
1857 }),
1858 )
1859 .await;
1860
1861 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1862 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1863 let buffer = project
1864 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1865 .await
1866 .unwrap();
1867
1868 let fake_server = fake_language_servers.next().await.unwrap();
1869
1870 let text = "let a = b.fqn";
1871 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1872 let completions = project.update(cx, |project, cx| {
1873 project.completions(&buffer, text.len(), cx)
1874 });
1875
1876 fake_server
1877 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1878 Ok(Some(lsp::CompletionResponse::Array(vec![
1879 lsp::CompletionItem {
1880 label: "fullyQualifiedName?".into(),
1881 insert_text: Some("fully\rQualified\r\nName".into()),
1882 ..Default::default()
1883 },
1884 ])))
1885 })
1886 .next()
1887 .await;
1888 let completions = completions.await.unwrap();
1889 assert_eq!(completions.len(), 1);
1890 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1891}
1892
1893#[gpui::test(iterations = 10)]
1894async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1895 let mut language = Language::new(
1896 LanguageConfig {
1897 name: "TypeScript".into(),
1898 path_suffixes: vec!["ts".to_string()],
1899 ..Default::default()
1900 },
1901 None,
1902 );
1903 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1904
1905 let fs = FakeFs::new(cx.background());
1906 fs.insert_tree(
1907 "/dir",
1908 json!({
1909 "a.ts": "a",
1910 }),
1911 )
1912 .await;
1913
1914 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1915 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1916 let buffer = project
1917 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1918 .await
1919 .unwrap();
1920
1921 let fake_server = fake_language_servers.next().await.unwrap();
1922
1923 // Language server returns code actions that contain commands, and not edits.
1924 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1925 fake_server
1926 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1927 Ok(Some(vec![
1928 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1929 title: "The code action".into(),
1930 command: Some(lsp::Command {
1931 title: "The command".into(),
1932 command: "_the/command".into(),
1933 arguments: Some(vec![json!("the-argument")]),
1934 }),
1935 ..Default::default()
1936 }),
1937 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1938 title: "two".into(),
1939 ..Default::default()
1940 }),
1941 ]))
1942 })
1943 .next()
1944 .await;
1945
1946 let action = actions.await.unwrap()[0].clone();
1947 let apply = project.update(cx, |project, cx| {
1948 project.apply_code_action(buffer.clone(), action, true, cx)
1949 });
1950
1951 // Resolving the code action does not populate its edits. In absence of
1952 // edits, we must execute the given command.
1953 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1954 |action, _| async move { Ok(action) },
1955 );
1956
1957 // While executing the command, the language server sends the editor
1958 // a `workspaceEdit` request.
1959 fake_server
1960 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1961 let fake = fake_server.clone();
1962 move |params, _| {
1963 assert_eq!(params.command, "_the/command");
1964 let fake = fake.clone();
1965 async move {
1966 fake.server
1967 .request::<lsp::request::ApplyWorkspaceEdit>(
1968 lsp::ApplyWorkspaceEditParams {
1969 label: None,
1970 edit: lsp::WorkspaceEdit {
1971 changes: Some(
1972 [(
1973 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1974 vec![lsp::TextEdit {
1975 range: lsp::Range::new(
1976 lsp::Position::new(0, 0),
1977 lsp::Position::new(0, 0),
1978 ),
1979 new_text: "X".into(),
1980 }],
1981 )]
1982 .into_iter()
1983 .collect(),
1984 ),
1985 ..Default::default()
1986 },
1987 },
1988 )
1989 .await
1990 .unwrap();
1991 Ok(Some(json!(null)))
1992 }
1993 }
1994 })
1995 .next()
1996 .await;
1997
1998 // Applying the code action returns a project transaction containing the edits
1999 // sent by the language server in its `workspaceEdit` request.
2000 let transaction = apply.await.unwrap();
2001 assert!(transaction.0.contains_key(&buffer));
2002 buffer.update(cx, |buffer, cx| {
2003 assert_eq!(buffer.text(), "Xa");
2004 buffer.undo(cx);
2005 assert_eq!(buffer.text(), "a");
2006 });
2007}
2008
2009#[gpui::test]
2010async fn test_save_file(cx: &mut gpui::TestAppContext) {
2011 let fs = FakeFs::new(cx.background());
2012 fs.insert_tree(
2013 "/dir",
2014 json!({
2015 "file1": "the old contents",
2016 }),
2017 )
2018 .await;
2019
2020 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2021 let buffer = project
2022 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2023 .await
2024 .unwrap();
2025 buffer
2026 .update(cx, |buffer, cx| {
2027 assert_eq!(buffer.text(), "the old contents");
2028 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2029 buffer.save(cx)
2030 })
2031 .await
2032 .unwrap();
2033
2034 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2035 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2036}
2037
2038#[gpui::test]
2039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2040 let fs = FakeFs::new(cx.background());
2041 fs.insert_tree(
2042 "/dir",
2043 json!({
2044 "file1": "the old contents",
2045 }),
2046 )
2047 .await;
2048
2049 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2050 let buffer = project
2051 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2052 .await
2053 .unwrap();
2054 buffer
2055 .update(cx, |buffer, cx| {
2056 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2057 buffer.save(cx)
2058 })
2059 .await
2060 .unwrap();
2061
2062 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2063 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2064}
2065
2066#[gpui::test]
2067async fn test_save_as(cx: &mut gpui::TestAppContext) {
2068 let fs = FakeFs::new(cx.background());
2069 fs.insert_tree("/dir", json!({})).await;
2070
2071 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2072 let buffer = project.update(cx, |project, cx| {
2073 project.create_buffer("", None, cx).unwrap()
2074 });
2075 buffer.update(cx, |buffer, cx| {
2076 buffer.edit([(0..0, "abc")], cx);
2077 assert!(buffer.is_dirty());
2078 assert!(!buffer.has_conflict());
2079 });
2080 project
2081 .update(cx, |project, cx| {
2082 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2083 })
2084 .await
2085 .unwrap();
2086 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2087 buffer.read_with(cx, |buffer, cx| {
2088 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2089 assert!(!buffer.is_dirty());
2090 assert!(!buffer.has_conflict());
2091 });
2092
2093 let opened_buffer = project
2094 .update(cx, |project, cx| {
2095 project.open_local_buffer("/dir/file1", cx)
2096 })
2097 .await
2098 .unwrap();
2099 assert_eq!(opened_buffer, buffer);
2100}
2101
2102#[gpui::test(retries = 5)]
2103async fn test_rescan_and_remote_updates(
2104 deterministic: Arc<Deterministic>,
2105 cx: &mut gpui::TestAppContext,
2106) {
2107 let dir = temp_tree(json!({
2108 "a": {
2109 "file1": "",
2110 "file2": "",
2111 "file3": "",
2112 },
2113 "b": {
2114 "c": {
2115 "file4": "",
2116 "file5": "",
2117 }
2118 }
2119 }));
2120
2121 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2122 let rpc = project.read_with(cx, |p, _| p.client.clone());
2123
2124 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2125 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2126 async move { buffer.await.unwrap() }
2127 };
2128 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2129 project.read_with(cx, |project, cx| {
2130 let tree = project.worktrees(cx).next().unwrap();
2131 tree.read(cx)
2132 .entry_for_path(path)
2133 .expect(&format!("no entry for path {}", path))
2134 .id
2135 })
2136 };
2137
2138 let buffer2 = buffer_for_path("a/file2", cx).await;
2139 let buffer3 = buffer_for_path("a/file3", cx).await;
2140 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2141 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2142
2143 let file2_id = id_for_path("a/file2", &cx);
2144 let file3_id = id_for_path("a/file3", &cx);
2145 let file4_id = id_for_path("b/c/file4", &cx);
2146
2147 // Create a remote copy of this worktree.
2148 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2149 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2150 let remote = cx.update(|cx| {
2151 Worktree::remote(
2152 1,
2153 1,
2154 proto::WorktreeMetadata {
2155 id: initial_snapshot.id().to_proto(),
2156 root_name: initial_snapshot.root_name().into(),
2157 visible: true,
2158 },
2159 rpc.clone(),
2160 cx,
2161 )
2162 });
2163 remote.update(cx, |remote, _| {
2164 let update = initial_snapshot.build_initial_update(1);
2165 remote.as_remote_mut().unwrap().update_from_remote(update);
2166 });
2167 deterministic.run_until_parked();
2168
2169 cx.read(|cx| {
2170 assert!(!buffer2.read(cx).is_dirty());
2171 assert!(!buffer3.read(cx).is_dirty());
2172 assert!(!buffer4.read(cx).is_dirty());
2173 assert!(!buffer5.read(cx).is_dirty());
2174 });
2175
2176 // Rename and delete files and directories.
2177 tree.flush_fs_events(&cx).await;
2178 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2179 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2180 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2181 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2182 tree.flush_fs_events(&cx).await;
2183
2184 let expected_paths = vec![
2185 "a",
2186 "a/file1",
2187 "a/file2.new",
2188 "b",
2189 "d",
2190 "d/file3",
2191 "d/file4",
2192 ];
2193
2194 cx.read(|app| {
2195 assert_eq!(
2196 tree.read(app)
2197 .paths()
2198 .map(|p| p.to_str().unwrap())
2199 .collect::<Vec<_>>(),
2200 expected_paths
2201 );
2202
2203 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2204 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2205 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2206
2207 assert_eq!(
2208 buffer2.read(app).file().unwrap().path().as_ref(),
2209 Path::new("a/file2.new")
2210 );
2211 assert_eq!(
2212 buffer3.read(app).file().unwrap().path().as_ref(),
2213 Path::new("d/file3")
2214 );
2215 assert_eq!(
2216 buffer4.read(app).file().unwrap().path().as_ref(),
2217 Path::new("d/file4")
2218 );
2219 assert_eq!(
2220 buffer5.read(app).file().unwrap().path().as_ref(),
2221 Path::new("b/c/file5")
2222 );
2223
2224 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2225 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2226 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2227 assert!(buffer5.read(app).file().unwrap().is_deleted());
2228 });
2229
2230 // Update the remote worktree. Check that it becomes consistent with the
2231 // local worktree.
2232 remote.update(cx, |remote, cx| {
2233 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2234 &initial_snapshot,
2235 1,
2236 1,
2237 true,
2238 );
2239 remote.as_remote_mut().unwrap().update_from_remote(update);
2240 });
2241 deterministic.run_until_parked();
2242 remote.read_with(cx, |remote, _| {
2243 assert_eq!(
2244 remote
2245 .paths()
2246 .map(|p| p.to_str().unwrap())
2247 .collect::<Vec<_>>(),
2248 expected_paths
2249 );
2250 });
2251}
2252
2253#[gpui::test]
2254async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2255 let fs = FakeFs::new(cx.background());
2256 fs.insert_tree(
2257 "/dir",
2258 json!({
2259 "a.txt": "a-contents",
2260 "b.txt": "b-contents",
2261 }),
2262 )
2263 .await;
2264
2265 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2266
2267 // Spawn multiple tasks to open paths, repeating some paths.
2268 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2269 (
2270 p.open_local_buffer("/dir/a.txt", cx),
2271 p.open_local_buffer("/dir/b.txt", cx),
2272 p.open_local_buffer("/dir/a.txt", cx),
2273 )
2274 });
2275
2276 let buffer_a_1 = buffer_a_1.await.unwrap();
2277 let buffer_a_2 = buffer_a_2.await.unwrap();
2278 let buffer_b = buffer_b.await.unwrap();
2279 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2280 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2281
2282 // There is only one buffer per path.
2283 let buffer_a_id = buffer_a_1.id();
2284 assert_eq!(buffer_a_2.id(), buffer_a_id);
2285
2286 // Open the same path again while it is still open.
2287 drop(buffer_a_1);
2288 let buffer_a_3 = project
2289 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2290 .await
2291 .unwrap();
2292
2293 // There's still only one buffer per path.
2294 assert_eq!(buffer_a_3.id(), buffer_a_id);
2295}
2296
2297#[gpui::test]
2298async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2299 let fs = FakeFs::new(cx.background());
2300 fs.insert_tree(
2301 "/dir",
2302 json!({
2303 "file1": "abc",
2304 "file2": "def",
2305 "file3": "ghi",
2306 }),
2307 )
2308 .await;
2309
2310 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2311
2312 let buffer1 = project
2313 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2314 .await
2315 .unwrap();
2316 let events = Rc::new(RefCell::new(Vec::new()));
2317
2318 // initially, the buffer isn't dirty.
2319 buffer1.update(cx, |buffer, cx| {
2320 cx.subscribe(&buffer1, {
2321 let events = events.clone();
2322 move |_, _, event, _| match event {
2323 BufferEvent::Operation(_) => {}
2324 _ => events.borrow_mut().push(event.clone()),
2325 }
2326 })
2327 .detach();
2328
2329 assert!(!buffer.is_dirty());
2330 assert!(events.borrow().is_empty());
2331
2332 buffer.edit([(1..2, "")], cx);
2333 });
2334
2335 // after the first edit, the buffer is dirty, and emits a dirtied event.
2336 buffer1.update(cx, |buffer, cx| {
2337 assert!(buffer.text() == "ac");
2338 assert!(buffer.is_dirty());
2339 assert_eq!(
2340 *events.borrow(),
2341 &[language::Event::Edited, language::Event::DirtyChanged]
2342 );
2343 events.borrow_mut().clear();
2344 buffer.did_save(
2345 buffer.version(),
2346 buffer.as_rope().fingerprint(),
2347 buffer.file().unwrap().mtime(),
2348 None,
2349 cx,
2350 );
2351 });
2352
2353 // after saving, the buffer is not dirty, and emits a saved event.
2354 buffer1.update(cx, |buffer, cx| {
2355 assert!(!buffer.is_dirty());
2356 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2357 events.borrow_mut().clear();
2358
2359 buffer.edit([(1..1, "B")], cx);
2360 buffer.edit([(2..2, "D")], cx);
2361 });
2362
2363 // after editing again, the buffer is dirty, and emits another dirty event.
2364 buffer1.update(cx, |buffer, cx| {
2365 assert!(buffer.text() == "aBDc");
2366 assert!(buffer.is_dirty());
2367 assert_eq!(
2368 *events.borrow(),
2369 &[
2370 language::Event::Edited,
2371 language::Event::DirtyChanged,
2372 language::Event::Edited,
2373 ],
2374 );
2375 events.borrow_mut().clear();
2376
2377 // After restoring the buffer to its previously-saved state,
2378 // the buffer is not considered dirty anymore.
2379 buffer.edit([(1..3, "")], cx);
2380 assert!(buffer.text() == "ac");
2381 assert!(!buffer.is_dirty());
2382 });
2383
2384 assert_eq!(
2385 *events.borrow(),
2386 &[language::Event::Edited, language::Event::DirtyChanged]
2387 );
2388
2389 // When a file is deleted, the buffer is considered dirty.
2390 let events = Rc::new(RefCell::new(Vec::new()));
2391 let buffer2 = project
2392 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2393 .await
2394 .unwrap();
2395 buffer2.update(cx, |_, cx| {
2396 cx.subscribe(&buffer2, {
2397 let events = events.clone();
2398 move |_, _, event, _| events.borrow_mut().push(event.clone())
2399 })
2400 .detach();
2401 });
2402
2403 fs.remove_file("/dir/file2".as_ref(), Default::default())
2404 .await
2405 .unwrap();
2406 cx.foreground().run_until_parked();
2407 assert_eq!(
2408 *events.borrow(),
2409 &[
2410 language::Event::DirtyChanged,
2411 language::Event::FileHandleChanged
2412 ]
2413 );
2414
2415 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2416 let events = Rc::new(RefCell::new(Vec::new()));
2417 let buffer3 = project
2418 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2419 .await
2420 .unwrap();
2421 buffer3.update(cx, |_, cx| {
2422 cx.subscribe(&buffer3, {
2423 let events = events.clone();
2424 move |_, _, event, _| events.borrow_mut().push(event.clone())
2425 })
2426 .detach();
2427 });
2428
2429 buffer3.update(cx, |buffer, cx| {
2430 buffer.edit([(0..0, "x")], cx);
2431 });
2432 events.borrow_mut().clear();
2433 fs.remove_file("/dir/file3".as_ref(), Default::default())
2434 .await
2435 .unwrap();
2436 cx.foreground().run_until_parked();
2437 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2438 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2439}
2440
2441#[gpui::test]
2442async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2443 let initial_contents = "aaa\nbbbbb\nc\n";
2444 let fs = FakeFs::new(cx.background());
2445 fs.insert_tree(
2446 "/dir",
2447 json!({
2448 "the-file": initial_contents,
2449 }),
2450 )
2451 .await;
2452 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2453 let buffer = project
2454 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2455 .await
2456 .unwrap();
2457
2458 let anchors = (0..3)
2459 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2460 .collect::<Vec<_>>();
2461
2462 // Change the file on disk, adding two new lines of text, and removing
2463 // one line.
2464 buffer.read_with(cx, |buffer, _| {
2465 assert!(!buffer.is_dirty());
2466 assert!(!buffer.has_conflict());
2467 });
2468 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2469 fs.save(
2470 "/dir/the-file".as_ref(),
2471 &new_contents.into(),
2472 LineEnding::Unix,
2473 )
2474 .await
2475 .unwrap();
2476
2477 // Because the buffer was not modified, it is reloaded from disk. Its
2478 // contents are edited according to the diff between the old and new
2479 // file contents.
2480 cx.foreground().run_until_parked();
2481 buffer.update(cx, |buffer, _| {
2482 assert_eq!(buffer.text(), new_contents);
2483 assert!(!buffer.is_dirty());
2484 assert!(!buffer.has_conflict());
2485
2486 let anchor_positions = anchors
2487 .iter()
2488 .map(|anchor| anchor.to_point(&*buffer))
2489 .collect::<Vec<_>>();
2490 assert_eq!(
2491 anchor_positions,
2492 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2493 );
2494 });
2495
2496 // Modify the buffer
2497 buffer.update(cx, |buffer, cx| {
2498 buffer.edit([(0..0, " ")], cx);
2499 assert!(buffer.is_dirty());
2500 assert!(!buffer.has_conflict());
2501 });
2502
2503 // Change the file on disk again, adding blank lines to the beginning.
2504 fs.save(
2505 "/dir/the-file".as_ref(),
2506 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2507 LineEnding::Unix,
2508 )
2509 .await
2510 .unwrap();
2511
2512 // Because the buffer is modified, it doesn't reload from disk, but is
2513 // marked as having a conflict.
2514 cx.foreground().run_until_parked();
2515 buffer.read_with(cx, |buffer, _| {
2516 assert!(buffer.has_conflict());
2517 });
2518}
2519
2520#[gpui::test]
2521async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2522 let fs = FakeFs::new(cx.background());
2523 fs.insert_tree(
2524 "/dir",
2525 json!({
2526 "file1": "a\nb\nc\n",
2527 "file2": "one\r\ntwo\r\nthree\r\n",
2528 }),
2529 )
2530 .await;
2531
2532 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2533 let buffer1 = project
2534 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2535 .await
2536 .unwrap();
2537 let buffer2 = project
2538 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2539 .await
2540 .unwrap();
2541
2542 buffer1.read_with(cx, |buffer, _| {
2543 assert_eq!(buffer.text(), "a\nb\nc\n");
2544 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2545 });
2546 buffer2.read_with(cx, |buffer, _| {
2547 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2548 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2549 });
2550
2551 // Change a file's line endings on disk from unix to windows. The buffer's
2552 // state updates correctly.
2553 fs.save(
2554 "/dir/file1".as_ref(),
2555 &"aaa\nb\nc\n".into(),
2556 LineEnding::Windows,
2557 )
2558 .await
2559 .unwrap();
2560 cx.foreground().run_until_parked();
2561 buffer1.read_with(cx, |buffer, _| {
2562 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2563 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2564 });
2565
2566 // Save a file with windows line endings. The file is written correctly.
2567 buffer2
2568 .update(cx, |buffer, cx| {
2569 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2570 buffer.save(cx)
2571 })
2572 .await
2573 .unwrap();
2574 assert_eq!(
2575 fs.load("/dir/file2".as_ref()).await.unwrap(),
2576 "one\r\ntwo\r\nthree\r\nfour\r\n",
2577 );
2578}
2579
2580#[gpui::test]
2581async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2582 cx.foreground().forbid_parking();
2583
2584 let fs = FakeFs::new(cx.background());
2585 fs.insert_tree(
2586 "/the-dir",
2587 json!({
2588 "a.rs": "
2589 fn foo(mut v: Vec<usize>) {
2590 for x in &v {
2591 v.push(1);
2592 }
2593 }
2594 "
2595 .unindent(),
2596 }),
2597 )
2598 .await;
2599
2600 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2601 let buffer = project
2602 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2603 .await
2604 .unwrap();
2605
2606 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2607 let message = lsp::PublishDiagnosticsParams {
2608 uri: buffer_uri.clone(),
2609 diagnostics: vec![
2610 lsp::Diagnostic {
2611 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2612 severity: Some(DiagnosticSeverity::WARNING),
2613 message: "error 1".to_string(),
2614 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2615 location: lsp::Location {
2616 uri: buffer_uri.clone(),
2617 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2618 },
2619 message: "error 1 hint 1".to_string(),
2620 }]),
2621 ..Default::default()
2622 },
2623 lsp::Diagnostic {
2624 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2625 severity: Some(DiagnosticSeverity::HINT),
2626 message: "error 1 hint 1".to_string(),
2627 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2628 location: lsp::Location {
2629 uri: buffer_uri.clone(),
2630 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2631 },
2632 message: "original diagnostic".to_string(),
2633 }]),
2634 ..Default::default()
2635 },
2636 lsp::Diagnostic {
2637 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2638 severity: Some(DiagnosticSeverity::ERROR),
2639 message: "error 2".to_string(),
2640 related_information: Some(vec![
2641 lsp::DiagnosticRelatedInformation {
2642 location: lsp::Location {
2643 uri: buffer_uri.clone(),
2644 range: lsp::Range::new(
2645 lsp::Position::new(1, 13),
2646 lsp::Position::new(1, 15),
2647 ),
2648 },
2649 message: "error 2 hint 1".to_string(),
2650 },
2651 lsp::DiagnosticRelatedInformation {
2652 location: lsp::Location {
2653 uri: buffer_uri.clone(),
2654 range: lsp::Range::new(
2655 lsp::Position::new(1, 13),
2656 lsp::Position::new(1, 15),
2657 ),
2658 },
2659 message: "error 2 hint 2".to_string(),
2660 },
2661 ]),
2662 ..Default::default()
2663 },
2664 lsp::Diagnostic {
2665 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2666 severity: Some(DiagnosticSeverity::HINT),
2667 message: "error 2 hint 1".to_string(),
2668 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2669 location: lsp::Location {
2670 uri: buffer_uri.clone(),
2671 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2672 },
2673 message: "original diagnostic".to_string(),
2674 }]),
2675 ..Default::default()
2676 },
2677 lsp::Diagnostic {
2678 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2679 severity: Some(DiagnosticSeverity::HINT),
2680 message: "error 2 hint 2".to_string(),
2681 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2682 location: lsp::Location {
2683 uri: buffer_uri.clone(),
2684 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2685 },
2686 message: "original diagnostic".to_string(),
2687 }]),
2688 ..Default::default()
2689 },
2690 ],
2691 version: None,
2692 };
2693
2694 project
2695 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2696 .unwrap();
2697 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2698
2699 assert_eq!(
2700 buffer
2701 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2702 .collect::<Vec<_>>(),
2703 &[
2704 DiagnosticEntry {
2705 range: Point::new(1, 8)..Point::new(1, 9),
2706 diagnostic: Diagnostic {
2707 severity: DiagnosticSeverity::WARNING,
2708 message: "error 1".to_string(),
2709 group_id: 0,
2710 is_primary: true,
2711 ..Default::default()
2712 }
2713 },
2714 DiagnosticEntry {
2715 range: Point::new(1, 8)..Point::new(1, 9),
2716 diagnostic: Diagnostic {
2717 severity: DiagnosticSeverity::HINT,
2718 message: "error 1 hint 1".to_string(),
2719 group_id: 0,
2720 is_primary: false,
2721 ..Default::default()
2722 }
2723 },
2724 DiagnosticEntry {
2725 range: Point::new(1, 13)..Point::new(1, 15),
2726 diagnostic: Diagnostic {
2727 severity: DiagnosticSeverity::HINT,
2728 message: "error 2 hint 1".to_string(),
2729 group_id: 1,
2730 is_primary: false,
2731 ..Default::default()
2732 }
2733 },
2734 DiagnosticEntry {
2735 range: Point::new(1, 13)..Point::new(1, 15),
2736 diagnostic: Diagnostic {
2737 severity: DiagnosticSeverity::HINT,
2738 message: "error 2 hint 2".to_string(),
2739 group_id: 1,
2740 is_primary: false,
2741 ..Default::default()
2742 }
2743 },
2744 DiagnosticEntry {
2745 range: Point::new(2, 8)..Point::new(2, 17),
2746 diagnostic: Diagnostic {
2747 severity: DiagnosticSeverity::ERROR,
2748 message: "error 2".to_string(),
2749 group_id: 1,
2750 is_primary: true,
2751 ..Default::default()
2752 }
2753 }
2754 ]
2755 );
2756
2757 assert_eq!(
2758 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2759 &[
2760 DiagnosticEntry {
2761 range: Point::new(1, 8)..Point::new(1, 9),
2762 diagnostic: Diagnostic {
2763 severity: DiagnosticSeverity::WARNING,
2764 message: "error 1".to_string(),
2765 group_id: 0,
2766 is_primary: true,
2767 ..Default::default()
2768 }
2769 },
2770 DiagnosticEntry {
2771 range: Point::new(1, 8)..Point::new(1, 9),
2772 diagnostic: Diagnostic {
2773 severity: DiagnosticSeverity::HINT,
2774 message: "error 1 hint 1".to_string(),
2775 group_id: 0,
2776 is_primary: false,
2777 ..Default::default()
2778 }
2779 },
2780 ]
2781 );
2782 assert_eq!(
2783 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2784 &[
2785 DiagnosticEntry {
2786 range: Point::new(1, 13)..Point::new(1, 15),
2787 diagnostic: Diagnostic {
2788 severity: DiagnosticSeverity::HINT,
2789 message: "error 2 hint 1".to_string(),
2790 group_id: 1,
2791 is_primary: false,
2792 ..Default::default()
2793 }
2794 },
2795 DiagnosticEntry {
2796 range: Point::new(1, 13)..Point::new(1, 15),
2797 diagnostic: Diagnostic {
2798 severity: DiagnosticSeverity::HINT,
2799 message: "error 2 hint 2".to_string(),
2800 group_id: 1,
2801 is_primary: false,
2802 ..Default::default()
2803 }
2804 },
2805 DiagnosticEntry {
2806 range: Point::new(2, 8)..Point::new(2, 17),
2807 diagnostic: Diagnostic {
2808 severity: DiagnosticSeverity::ERROR,
2809 message: "error 2".to_string(),
2810 group_id: 1,
2811 is_primary: true,
2812 ..Default::default()
2813 }
2814 }
2815 ]
2816 );
2817}
2818
2819#[gpui::test]
2820async fn test_rename(cx: &mut gpui::TestAppContext) {
2821 cx.foreground().forbid_parking();
2822
2823 let mut language = Language::new(
2824 LanguageConfig {
2825 name: "Rust".into(),
2826 path_suffixes: vec!["rs".to_string()],
2827 ..Default::default()
2828 },
2829 Some(tree_sitter_rust::language()),
2830 );
2831 let mut fake_servers = language
2832 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2833 capabilities: lsp::ServerCapabilities {
2834 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2835 prepare_provider: Some(true),
2836 work_done_progress_options: Default::default(),
2837 })),
2838 ..Default::default()
2839 },
2840 ..Default::default()
2841 }))
2842 .await;
2843
2844 let fs = FakeFs::new(cx.background());
2845 fs.insert_tree(
2846 "/dir",
2847 json!({
2848 "one.rs": "const ONE: usize = 1;",
2849 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2850 }),
2851 )
2852 .await;
2853
2854 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2855 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2856 let buffer = project
2857 .update(cx, |project, cx| {
2858 project.open_local_buffer("/dir/one.rs", cx)
2859 })
2860 .await
2861 .unwrap();
2862
2863 let fake_server = fake_servers.next().await.unwrap();
2864
2865 let response = project.update(cx, |project, cx| {
2866 project.prepare_rename(buffer.clone(), 7, cx)
2867 });
2868 fake_server
2869 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2870 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2871 assert_eq!(params.position, lsp::Position::new(0, 7));
2872 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2873 lsp::Position::new(0, 6),
2874 lsp::Position::new(0, 9),
2875 ))))
2876 })
2877 .next()
2878 .await
2879 .unwrap();
2880 let range = response.await.unwrap().unwrap();
2881 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2882 assert_eq!(range, 6..9);
2883
2884 let response = project.update(cx, |project, cx| {
2885 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2886 });
2887 fake_server
2888 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2889 assert_eq!(
2890 params.text_document_position.text_document.uri.as_str(),
2891 "file:///dir/one.rs"
2892 );
2893 assert_eq!(
2894 params.text_document_position.position,
2895 lsp::Position::new(0, 7)
2896 );
2897 assert_eq!(params.new_name, "THREE");
2898 Ok(Some(lsp::WorkspaceEdit {
2899 changes: Some(
2900 [
2901 (
2902 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2903 vec![lsp::TextEdit::new(
2904 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2905 "THREE".to_string(),
2906 )],
2907 ),
2908 (
2909 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2910 vec![
2911 lsp::TextEdit::new(
2912 lsp::Range::new(
2913 lsp::Position::new(0, 24),
2914 lsp::Position::new(0, 27),
2915 ),
2916 "THREE".to_string(),
2917 ),
2918 lsp::TextEdit::new(
2919 lsp::Range::new(
2920 lsp::Position::new(0, 35),
2921 lsp::Position::new(0, 38),
2922 ),
2923 "THREE".to_string(),
2924 ),
2925 ],
2926 ),
2927 ]
2928 .into_iter()
2929 .collect(),
2930 ),
2931 ..Default::default()
2932 }))
2933 })
2934 .next()
2935 .await
2936 .unwrap();
2937 let mut transaction = response.await.unwrap().0;
2938 assert_eq!(transaction.len(), 2);
2939 assert_eq!(
2940 transaction
2941 .remove_entry(&buffer)
2942 .unwrap()
2943 .0
2944 .read_with(cx, |buffer, _| buffer.text()),
2945 "const THREE: usize = 1;"
2946 );
2947 assert_eq!(
2948 transaction
2949 .into_keys()
2950 .next()
2951 .unwrap()
2952 .read_with(cx, |buffer, _| buffer.text()),
2953 "const TWO: usize = one::THREE + one::THREE;"
2954 );
2955}
2956
2957#[gpui::test]
2958async fn test_search(cx: &mut gpui::TestAppContext) {
2959 let fs = FakeFs::new(cx.background());
2960 fs.insert_tree(
2961 "/dir",
2962 json!({
2963 "one.rs": "const ONE: usize = 1;",
2964 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
2965 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
2966 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
2967 }),
2968 )
2969 .await;
2970 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2971 assert_eq!(
2972 search(&project, SearchQuery::text("TWO", false, true), cx)
2973 .await
2974 .unwrap(),
2975 HashMap::from_iter([
2976 ("two.rs".to_string(), vec![6..9]),
2977 ("three.rs".to_string(), vec![37..40])
2978 ])
2979 );
2980
2981 let buffer_4 = project
2982 .update(cx, |project, cx| {
2983 project.open_local_buffer("/dir/four.rs", cx)
2984 })
2985 .await
2986 .unwrap();
2987 buffer_4.update(cx, |buffer, cx| {
2988 let text = "two::TWO";
2989 buffer.edit([(20..28, text), (31..43, text)], cx);
2990 });
2991
2992 assert_eq!(
2993 search(&project, SearchQuery::text("TWO", false, true), cx)
2994 .await
2995 .unwrap(),
2996 HashMap::from_iter([
2997 ("two.rs".to_string(), vec![6..9]),
2998 ("three.rs".to_string(), vec![37..40]),
2999 ("four.rs".to_string(), vec![25..28, 36..39])
3000 ])
3001 );
3002
3003 async fn search(
3004 project: &ModelHandle<Project>,
3005 query: SearchQuery,
3006 cx: &mut gpui::TestAppContext,
3007 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3008 let results = project
3009 .update(cx, |project, cx| project.search(query, cx))
3010 .await?;
3011
3012 Ok(results
3013 .into_iter()
3014 .map(|(buffer, ranges)| {
3015 buffer.read_with(cx, |buffer, _| {
3016 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3017 let ranges = ranges
3018 .into_iter()
3019 .map(|range| range.to_offset(buffer))
3020 .collect::<Vec<_>>();
3021 (path, ranges)
3022 })
3023 })
3024 .collect())
3025 }
3026}