1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use serde_json::json;
12use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
13use unindent::Unindent as _;
14use util::{assert_set_eq, test::temp_tree};
15
16#[gpui::test]
17async fn test_symlinks(cx: &mut gpui::TestAppContext) {
18 let dir = temp_tree(json!({
19 "root": {
20 "apple": "",
21 "banana": {
22 "carrot": {
23 "date": "",
24 "endive": "",
25 }
26 },
27 "fennel": {
28 "grape": "",
29 }
30 }
31 }));
32
33 let root_link_path = dir.path().join("root_link");
34 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
35 unix::fs::symlink(
36 &dir.path().join("root/fennel"),
37 &dir.path().join("root/finnochio"),
38 )
39 .unwrap();
40
41 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50}
51
52#[gpui::test]
53async fn test_managing_language_servers(
54 deterministic: Arc<Deterministic>,
55 cx: &mut gpui::TestAppContext,
56) {
57 cx.foreground().forbid_parking();
58
59 let mut rust_language = Language::new(
60 LanguageConfig {
61 name: "Rust".into(),
62 path_suffixes: vec!["rs".to_string()],
63 ..Default::default()
64 },
65 Some(tree_sitter_rust::language()),
66 );
67 let mut json_language = Language::new(
68 LanguageConfig {
69 name: "JSON".into(),
70 path_suffixes: vec!["json".to_string()],
71 ..Default::default()
72 },
73 None,
74 );
75 let mut fake_rust_servers = rust_language
76 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
77 name: "the-rust-language-server",
78 capabilities: lsp::ServerCapabilities {
79 completion_provider: Some(lsp::CompletionOptions {
80 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
81 ..Default::default()
82 }),
83 ..Default::default()
84 },
85 ..Default::default()
86 }))
87 .await;
88 let mut fake_json_servers = json_language
89 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-json-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![":".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }))
100 .await;
101
102 let fs = FakeFs::new(cx.background());
103 fs.insert_tree(
104 "/the-root",
105 json!({
106 "test.rs": "const A: i32 = 1;",
107 "test2.rs": "",
108 "Cargo.toml": "a = 1",
109 "package.json": "{\"a\": 1}",
110 }),
111 )
112 .await;
113
114 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
115
116 // Open a buffer without an associated language server.
117 let toml_buffer = project
118 .update(cx, |project, cx| {
119 project.open_local_buffer("/the-root/Cargo.toml", cx)
120 })
121 .await
122 .unwrap();
123
124 // Open a buffer with an associated language server before the language for it has been loaded.
125 let rust_buffer = project
126 .update(cx, |project, cx| {
127 project.open_local_buffer("/the-root/test.rs", cx)
128 })
129 .await
130 .unwrap();
131 rust_buffer.read_with(cx, |buffer, _| {
132 assert_eq!(buffer.language().map(|l| l.name()), None);
133 });
134
135 // Now we add the languages to the project, and ensure they get assigned to all
136 // the relevant open buffers.
137 project.update(cx, |project, _| {
138 project.languages.add(Arc::new(json_language));
139 project.languages.add(Arc::new(rust_language));
140 });
141 deterministic.run_until_parked();
142 rust_buffer.read_with(cx, |buffer, _| {
143 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
144 });
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
231 rust_buffer2.update(cx, |buffer, cx| {
232 buffer.edit([(0..0, "let x = 1;")], None, cx)
233 });
234 assert_eq!(
235 fake_rust_server
236 .receive_notification::<lsp::notification::DidChangeTextDocument>()
237 .await
238 .text_document,
239 lsp::VersionedTextDocumentIdentifier::new(
240 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
241 1
242 )
243 );
244
245 // Save notifications are reported to all servers.
246 toml_buffer
247 .update(cx, |buffer, cx| buffer.save(cx))
248 .await
249 .unwrap();
250 assert_eq!(
251 fake_rust_server
252 .receive_notification::<lsp::notification::DidSaveTextDocument>()
253 .await
254 .text_document,
255 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
256 );
257 assert_eq!(
258 fake_json_server
259 .receive_notification::<lsp::notification::DidSaveTextDocument>()
260 .await
261 .text_document,
262 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
263 );
264
265 // Renames are reported only to servers matching the buffer's language.
266 fs.rename(
267 Path::new("/the-root/test2.rs"),
268 Path::new("/the-root/test3.rs"),
269 Default::default(),
270 )
271 .await
272 .unwrap();
273 assert_eq!(
274 fake_rust_server
275 .receive_notification::<lsp::notification::DidCloseTextDocument>()
276 .await
277 .text_document,
278 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
279 );
280 assert_eq!(
281 fake_rust_server
282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
283 .await
284 .text_document,
285 lsp::TextDocumentItem {
286 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
287 version: 0,
288 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
289 language_id: Default::default()
290 },
291 );
292
293 rust_buffer2.update(cx, |buffer, cx| {
294 buffer.update_diagnostics(
295 DiagnosticSet::from_sorted_entries(
296 vec![DiagnosticEntry {
297 diagnostic: Default::default(),
298 range: Anchor::MIN..Anchor::MAX,
299 }],
300 &buffer.snapshot(),
301 ),
302 cx,
303 );
304 assert_eq!(
305 buffer
306 .snapshot()
307 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
308 .count(),
309 1
310 );
311 });
312
313 // When the rename changes the extension of the file, the buffer gets closed on the old
314 // language server and gets opened on the new one.
315 fs.rename(
316 Path::new("/the-root/test3.rs"),
317 Path::new("/the-root/test3.json"),
318 Default::default(),
319 )
320 .await
321 .unwrap();
322 assert_eq!(
323 fake_rust_server
324 .receive_notification::<lsp::notification::DidCloseTextDocument>()
325 .await
326 .text_document,
327 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
328 );
329 assert_eq!(
330 fake_json_server
331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
332 .await
333 .text_document,
334 lsp::TextDocumentItem {
335 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
336 version: 0,
337 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
338 language_id: Default::default()
339 },
340 );
341
342 // We clear the diagnostics, since the language has changed.
343 rust_buffer2.read_with(cx, |buffer, _| {
344 assert_eq!(
345 buffer
346 .snapshot()
347 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
348 .count(),
349 0
350 );
351 });
352
353 // The renamed file's version resets after changing language server.
354 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
355 assert_eq!(
356 fake_json_server
357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
358 .await
359 .text_document,
360 lsp::VersionedTextDocumentIdentifier::new(
361 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
362 1
363 )
364 );
365
366 // Restart language servers
367 project.update(cx, |project, cx| {
368 project.restart_language_servers_for_buffers(
369 vec![rust_buffer.clone(), json_buffer.clone()],
370 cx,
371 );
372 });
373
374 let mut rust_shutdown_requests = fake_rust_server
375 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
376 let mut json_shutdown_requests = fake_json_server
377 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
378 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
379
380 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
381 let mut fake_json_server = fake_json_servers.next().await.unwrap();
382
383 // Ensure rust document is reopened in new rust language server
384 assert_eq!(
385 fake_rust_server
386 .receive_notification::<lsp::notification::DidOpenTextDocument>()
387 .await
388 .text_document,
389 lsp::TextDocumentItem {
390 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
391 version: 1,
392 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
393 language_id: Default::default()
394 }
395 );
396
397 // Ensure json documents are reopened in new json language server
398 assert_set_eq!(
399 [
400 fake_json_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 ],
409 [
410 lsp::TextDocumentItem {
411 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
412 version: 0,
413 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
414 language_id: Default::default()
415 },
416 lsp::TextDocumentItem {
417 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
418 version: 1,
419 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
420 language_id: Default::default()
421 }
422 ]
423 );
424
425 // Close notifications are reported only to servers matching the buffer's language.
426 cx.update(|_| drop(json_buffer));
427 let close_message = lsp::DidCloseTextDocumentParams {
428 text_document: lsp::TextDocumentIdentifier::new(
429 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
430 ),
431 };
432 assert_eq!(
433 fake_json_server
434 .receive_notification::<lsp::notification::DidCloseTextDocument>()
435 .await,
436 close_message,
437 );
438}
439
440#[gpui::test]
441async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
442 cx.foreground().forbid_parking();
443
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/dir",
447 json!({
448 "a.rs": "let a = 1;",
449 "b.rs": "let b = 2;"
450 }),
451 )
452 .await;
453
454 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
455
456 let buffer_a = project
457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
458 .await
459 .unwrap();
460 let buffer_b = project
461 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
462 .await
463 .unwrap();
464
465 project.update(cx, |project, cx| {
466 project
467 .update_diagnostics(
468 0,
469 lsp::PublishDiagnosticsParams {
470 uri: Url::from_file_path("/dir/a.rs").unwrap(),
471 version: None,
472 diagnostics: vec![lsp::Diagnostic {
473 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
474 severity: Some(lsp::DiagnosticSeverity::ERROR),
475 message: "error 1".to_string(),
476 ..Default::default()
477 }],
478 },
479 &[],
480 cx,
481 )
482 .unwrap();
483 project
484 .update_diagnostics(
485 0,
486 lsp::PublishDiagnosticsParams {
487 uri: Url::from_file_path("/dir/b.rs").unwrap(),
488 version: None,
489 diagnostics: vec![lsp::Diagnostic {
490 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
491 severity: Some(lsp::DiagnosticSeverity::WARNING),
492 message: "error 2".to_string(),
493 ..Default::default()
494 }],
495 },
496 &[],
497 cx,
498 )
499 .unwrap();
500 });
501
502 buffer_a.read_with(cx, |buffer, _| {
503 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
504 assert_eq!(
505 chunks
506 .iter()
507 .map(|(s, d)| (s.as_str(), *d))
508 .collect::<Vec<_>>(),
509 &[
510 ("let ", None),
511 ("a", Some(DiagnosticSeverity::ERROR)),
512 (" = 1;", None),
513 ]
514 );
515 });
516 buffer_b.read_with(cx, |buffer, _| {
517 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
518 assert_eq!(
519 chunks
520 .iter()
521 .map(|(s, d)| (s.as_str(), *d))
522 .collect::<Vec<_>>(),
523 &[
524 ("let ", None),
525 ("b", Some(DiagnosticSeverity::WARNING)),
526 (" = 2;", None),
527 ]
528 );
529 });
530}
531
532#[gpui::test]
533async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
534 cx.foreground().forbid_parking();
535
536 let fs = FakeFs::new(cx.background());
537 fs.insert_tree(
538 "/root",
539 json!({
540 "dir": {
541 "a.rs": "let a = 1;",
542 },
543 "other.rs": "let b = c;"
544 }),
545 )
546 .await;
547
548 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
549
550 let (worktree, _) = project
551 .update(cx, |project, cx| {
552 project.find_or_create_local_worktree("/root/other.rs", false, cx)
553 })
554 .await
555 .unwrap();
556 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
557
558 project.update(cx, |project, cx| {
559 project
560 .update_diagnostics(
561 0,
562 lsp::PublishDiagnosticsParams {
563 uri: Url::from_file_path("/root/other.rs").unwrap(),
564 version: None,
565 diagnostics: vec![lsp::Diagnostic {
566 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
567 severity: Some(lsp::DiagnosticSeverity::ERROR),
568 message: "unknown variable 'c'".to_string(),
569 ..Default::default()
570 }],
571 },
572 &[],
573 cx,
574 )
575 .unwrap();
576 });
577
578 let buffer = project
579 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
580 .await
581 .unwrap();
582 buffer.read_with(cx, |buffer, _| {
583 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
584 assert_eq!(
585 chunks
586 .iter()
587 .map(|(s, d)| (s.as_str(), *d))
588 .collect::<Vec<_>>(),
589 &[
590 ("let b = ", None),
591 ("c", Some(DiagnosticSeverity::ERROR)),
592 (";", None),
593 ]
594 );
595 });
596
597 project.read_with(cx, |project, cx| {
598 assert_eq!(project.diagnostic_summaries(cx).next(), None);
599 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
600 });
601}
602
603#[gpui::test]
604async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
605 cx.foreground().forbid_parking();
606
607 let progress_token = "the-progress-token";
608 let mut language = Language::new(
609 LanguageConfig {
610 name: "Rust".into(),
611 path_suffixes: vec!["rs".to_string()],
612 ..Default::default()
613 },
614 Some(tree_sitter_rust::language()),
615 );
616 let mut fake_servers = language
617 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
618 disk_based_diagnostics_progress_token: Some(progress_token.into()),
619 disk_based_diagnostics_sources: vec!["disk".into()],
620 ..Default::default()
621 }))
622 .await;
623
624 let fs = FakeFs::new(cx.background());
625 fs.insert_tree(
626 "/dir",
627 json!({
628 "a.rs": "fn a() { A }",
629 "b.rs": "const y: i32 = 1",
630 }),
631 )
632 .await;
633
634 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
635 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
636 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
637
638 // Cause worktree to start the fake language server
639 let _buffer = project
640 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
641 .await
642 .unwrap();
643
644 let mut events = subscribe(&project, cx);
645
646 let fake_server = fake_servers.next().await.unwrap();
647 fake_server.start_progress(progress_token).await;
648 assert_eq!(
649 events.next().await.unwrap(),
650 Event::DiskBasedDiagnosticsStarted {
651 language_server_id: 0,
652 }
653 );
654
655 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
656 uri: Url::from_file_path("/dir/a.rs").unwrap(),
657 version: None,
658 diagnostics: vec![lsp::Diagnostic {
659 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
660 severity: Some(lsp::DiagnosticSeverity::ERROR),
661 message: "undefined variable 'A'".to_string(),
662 ..Default::default()
663 }],
664 });
665 assert_eq!(
666 events.next().await.unwrap(),
667 Event::DiagnosticsUpdated {
668 language_server_id: 0,
669 path: (worktree_id, Path::new("a.rs")).into()
670 }
671 );
672
673 fake_server.end_progress(progress_token);
674 assert_eq!(
675 events.next().await.unwrap(),
676 Event::DiskBasedDiagnosticsFinished {
677 language_server_id: 0
678 }
679 );
680
681 let buffer = project
682 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
683 .await
684 .unwrap();
685
686 buffer.read_with(cx, |buffer, _| {
687 let snapshot = buffer.snapshot();
688 let diagnostics = snapshot
689 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
690 .collect::<Vec<_>>();
691 assert_eq!(
692 diagnostics,
693 &[DiagnosticEntry {
694 range: Point::new(0, 9)..Point::new(0, 10),
695 diagnostic: Diagnostic {
696 severity: lsp::DiagnosticSeverity::ERROR,
697 message: "undefined variable 'A'".to_string(),
698 group_id: 0,
699 is_primary: true,
700 ..Default::default()
701 }
702 }]
703 )
704 });
705
706 // Ensure publishing empty diagnostics twice only results in one update event.
707 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
708 uri: Url::from_file_path("/dir/a.rs").unwrap(),
709 version: None,
710 diagnostics: Default::default(),
711 });
712 assert_eq!(
713 events.next().await.unwrap(),
714 Event::DiagnosticsUpdated {
715 language_server_id: 0,
716 path: (worktree_id, Path::new("a.rs")).into()
717 }
718 );
719
720 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
721 uri: Url::from_file_path("/dir/a.rs").unwrap(),
722 version: None,
723 diagnostics: Default::default(),
724 });
725 cx.foreground().run_until_parked();
726 assert_eq!(futures::poll!(events.next()), Poll::Pending);
727}
728
729#[gpui::test]
730async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
731 cx.foreground().forbid_parking();
732
733 let progress_token = "the-progress-token";
734 let mut language = Language::new(
735 LanguageConfig {
736 path_suffixes: vec!["rs".to_string()],
737 ..Default::default()
738 },
739 None,
740 );
741 let mut fake_servers = language
742 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
743 disk_based_diagnostics_sources: vec!["disk".into()],
744 disk_based_diagnostics_progress_token: Some(progress_token.into()),
745 ..Default::default()
746 }))
747 .await;
748
749 let fs = FakeFs::new(cx.background());
750 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
751
752 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
753 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
754
755 let buffer = project
756 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
757 .await
758 .unwrap();
759
760 // Simulate diagnostics starting to update.
761 let fake_server = fake_servers.next().await.unwrap();
762 fake_server.start_progress(progress_token).await;
763
764 // Restart the server before the diagnostics finish updating.
765 project.update(cx, |project, cx| {
766 project.restart_language_servers_for_buffers([buffer], cx);
767 });
768 let mut events = subscribe(&project, cx);
769
770 // Simulate the newly started server sending more diagnostics.
771 let fake_server = fake_servers.next().await.unwrap();
772 fake_server.start_progress(progress_token).await;
773 assert_eq!(
774 events.next().await.unwrap(),
775 Event::DiskBasedDiagnosticsStarted {
776 language_server_id: 1
777 }
778 );
779 project.read_with(cx, |project, _| {
780 assert_eq!(
781 project
782 .language_servers_running_disk_based_diagnostics()
783 .collect::<Vec<_>>(),
784 [1]
785 );
786 });
787
788 // All diagnostics are considered done, despite the old server's diagnostic
789 // task never completing.
790 fake_server.end_progress(progress_token);
791 assert_eq!(
792 events.next().await.unwrap(),
793 Event::DiskBasedDiagnosticsFinished {
794 language_server_id: 1
795 }
796 );
797 project.read_with(cx, |project, _| {
798 assert_eq!(
799 project
800 .language_servers_running_disk_based_diagnostics()
801 .collect::<Vec<_>>(),
802 [0; 0]
803 );
804 });
805}
806
807#[gpui::test]
808async fn test_toggling_enable_language_server(
809 deterministic: Arc<Deterministic>,
810 cx: &mut gpui::TestAppContext,
811) {
812 deterministic.forbid_parking();
813
814 let mut rust = Language::new(
815 LanguageConfig {
816 name: Arc::from("Rust"),
817 path_suffixes: vec!["rs".to_string()],
818 ..Default::default()
819 },
820 None,
821 );
822 let mut fake_rust_servers = rust
823 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
824 name: "rust-lsp",
825 ..Default::default()
826 }))
827 .await;
828 let mut js = Language::new(
829 LanguageConfig {
830 name: Arc::from("JavaScript"),
831 path_suffixes: vec!["js".to_string()],
832 ..Default::default()
833 },
834 None,
835 );
836 let mut fake_js_servers = js
837 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
838 name: "js-lsp",
839 ..Default::default()
840 }))
841 .await;
842
843 let fs = FakeFs::new(cx.background());
844 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
845 .await;
846
847 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
848 project.update(cx, |project, _| {
849 project.languages.add(Arc::new(rust));
850 project.languages.add(Arc::new(js));
851 });
852
853 let _rs_buffer = project
854 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
855 .await
856 .unwrap();
857 let _js_buffer = project
858 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
859 .await
860 .unwrap();
861
862 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
863 assert_eq!(
864 fake_rust_server_1
865 .receive_notification::<lsp::notification::DidOpenTextDocument>()
866 .await
867 .text_document
868 .uri
869 .as_str(),
870 "file:///dir/a.rs"
871 );
872
873 let mut fake_js_server = fake_js_servers.next().await.unwrap();
874 assert_eq!(
875 fake_js_server
876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
877 .await
878 .text_document
879 .uri
880 .as_str(),
881 "file:///dir/b.js"
882 );
883
884 // Disable Rust language server, ensuring only that server gets stopped.
885 cx.update(|cx| {
886 cx.update_global(|settings: &mut Settings, _| {
887 settings.language_overrides.insert(
888 Arc::from("Rust"),
889 settings::EditorSettings {
890 enable_language_server: Some(false),
891 ..Default::default()
892 },
893 );
894 })
895 });
896 fake_rust_server_1
897 .receive_notification::<lsp::notification::Exit>()
898 .await;
899
900 // Enable Rust and disable JavaScript language servers, ensuring that the
901 // former gets started again and that the latter stops.
902 cx.update(|cx| {
903 cx.update_global(|settings: &mut Settings, _| {
904 settings.language_overrides.insert(
905 Arc::from("Rust"),
906 settings::EditorSettings {
907 enable_language_server: Some(true),
908 ..Default::default()
909 },
910 );
911 settings.language_overrides.insert(
912 Arc::from("JavaScript"),
913 settings::EditorSettings {
914 enable_language_server: Some(false),
915 ..Default::default()
916 },
917 );
918 })
919 });
920 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
921 assert_eq!(
922 fake_rust_server_2
923 .receive_notification::<lsp::notification::DidOpenTextDocument>()
924 .await
925 .text_document
926 .uri
927 .as_str(),
928 "file:///dir/a.rs"
929 );
930 fake_js_server
931 .receive_notification::<lsp::notification::Exit>()
932 .await;
933}
934
935#[gpui::test]
936async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
937 cx.foreground().forbid_parking();
938
939 let mut language = Language::new(
940 LanguageConfig {
941 name: "Rust".into(),
942 path_suffixes: vec!["rs".to_string()],
943 ..Default::default()
944 },
945 Some(tree_sitter_rust::language()),
946 );
947 let mut fake_servers = language
948 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
949 disk_based_diagnostics_sources: vec!["disk".into()],
950 ..Default::default()
951 }))
952 .await;
953
954 let text = "
955 fn a() { A }
956 fn b() { BB }
957 fn c() { CCC }
958 "
959 .unindent();
960
961 let fs = FakeFs::new(cx.background());
962 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
963
964 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
965 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
966
967 let buffer = project
968 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
969 .await
970 .unwrap();
971
972 let mut fake_server = fake_servers.next().await.unwrap();
973 let open_notification = fake_server
974 .receive_notification::<lsp::notification::DidOpenTextDocument>()
975 .await;
976
977 // Edit the buffer, moving the content down
978 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
979 let change_notification_1 = fake_server
980 .receive_notification::<lsp::notification::DidChangeTextDocument>()
981 .await;
982 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
983
984 // Report some diagnostics for the initial version of the buffer
985 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
986 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
987 version: Some(open_notification.text_document.version),
988 diagnostics: vec![
989 lsp::Diagnostic {
990 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
991 severity: Some(DiagnosticSeverity::ERROR),
992 message: "undefined variable 'A'".to_string(),
993 source: Some("disk".to_string()),
994 ..Default::default()
995 },
996 lsp::Diagnostic {
997 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
998 severity: Some(DiagnosticSeverity::ERROR),
999 message: "undefined variable 'BB'".to_string(),
1000 source: Some("disk".to_string()),
1001 ..Default::default()
1002 },
1003 lsp::Diagnostic {
1004 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1005 severity: Some(DiagnosticSeverity::ERROR),
1006 source: Some("disk".to_string()),
1007 message: "undefined variable 'CCC'".to_string(),
1008 ..Default::default()
1009 },
1010 ],
1011 });
1012
1013 // The diagnostics have moved down since they were created.
1014 buffer.next_notification(cx).await;
1015 buffer.read_with(cx, |buffer, _| {
1016 assert_eq!(
1017 buffer
1018 .snapshot()
1019 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1020 .collect::<Vec<_>>(),
1021 &[
1022 DiagnosticEntry {
1023 range: Point::new(3, 9)..Point::new(3, 11),
1024 diagnostic: Diagnostic {
1025 severity: DiagnosticSeverity::ERROR,
1026 message: "undefined variable 'BB'".to_string(),
1027 is_disk_based: true,
1028 group_id: 1,
1029 is_primary: true,
1030 ..Default::default()
1031 },
1032 },
1033 DiagnosticEntry {
1034 range: Point::new(4, 9)..Point::new(4, 12),
1035 diagnostic: Diagnostic {
1036 severity: DiagnosticSeverity::ERROR,
1037 message: "undefined variable 'CCC'".to_string(),
1038 is_disk_based: true,
1039 group_id: 2,
1040 is_primary: true,
1041 ..Default::default()
1042 }
1043 }
1044 ]
1045 );
1046 assert_eq!(
1047 chunks_with_diagnostics(buffer, 0..buffer.len()),
1048 [
1049 ("\n\nfn a() { ".to_string(), None),
1050 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1051 (" }\nfn b() { ".to_string(), None),
1052 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1053 (" }\nfn c() { ".to_string(), None),
1054 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1055 (" }\n".to_string(), None),
1056 ]
1057 );
1058 assert_eq!(
1059 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1060 [
1061 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1062 (" }\nfn c() { ".to_string(), None),
1063 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1064 ]
1065 );
1066 });
1067
1068 // Ensure overlapping diagnostics are highlighted correctly.
1069 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1070 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1071 version: Some(open_notification.text_document.version),
1072 diagnostics: vec![
1073 lsp::Diagnostic {
1074 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1075 severity: Some(DiagnosticSeverity::ERROR),
1076 message: "undefined variable 'A'".to_string(),
1077 source: Some("disk".to_string()),
1078 ..Default::default()
1079 },
1080 lsp::Diagnostic {
1081 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1082 severity: Some(DiagnosticSeverity::WARNING),
1083 message: "unreachable statement".to_string(),
1084 source: Some("disk".to_string()),
1085 ..Default::default()
1086 },
1087 ],
1088 });
1089
1090 buffer.next_notification(cx).await;
1091 buffer.read_with(cx, |buffer, _| {
1092 assert_eq!(
1093 buffer
1094 .snapshot()
1095 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1096 .collect::<Vec<_>>(),
1097 &[
1098 DiagnosticEntry {
1099 range: Point::new(2, 9)..Point::new(2, 12),
1100 diagnostic: Diagnostic {
1101 severity: DiagnosticSeverity::WARNING,
1102 message: "unreachable statement".to_string(),
1103 is_disk_based: true,
1104 group_id: 4,
1105 is_primary: true,
1106 ..Default::default()
1107 }
1108 },
1109 DiagnosticEntry {
1110 range: Point::new(2, 9)..Point::new(2, 10),
1111 diagnostic: Diagnostic {
1112 severity: DiagnosticSeverity::ERROR,
1113 message: "undefined variable 'A'".to_string(),
1114 is_disk_based: true,
1115 group_id: 3,
1116 is_primary: true,
1117 ..Default::default()
1118 },
1119 }
1120 ]
1121 );
1122 assert_eq!(
1123 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1124 [
1125 ("fn a() { ".to_string(), None),
1126 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1127 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1128 ("\n".to_string(), None),
1129 ]
1130 );
1131 assert_eq!(
1132 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1133 [
1134 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1135 ("\n".to_string(), None),
1136 ]
1137 );
1138 });
1139
1140 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1141 // changes since the last save.
1142 buffer.update(cx, |buffer, cx| {
1143 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1144 buffer.edit(
1145 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1146 None,
1147 cx,
1148 );
1149 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1150 });
1151 let change_notification_2 = fake_server
1152 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1153 .await;
1154 assert!(
1155 change_notification_2.text_document.version > change_notification_1.text_document.version
1156 );
1157
1158 // Handle out-of-order diagnostics
1159 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1160 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1161 version: Some(change_notification_2.text_document.version),
1162 diagnostics: vec![
1163 lsp::Diagnostic {
1164 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1165 severity: Some(DiagnosticSeverity::ERROR),
1166 message: "undefined variable 'BB'".to_string(),
1167 source: Some("disk".to_string()),
1168 ..Default::default()
1169 },
1170 lsp::Diagnostic {
1171 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1172 severity: Some(DiagnosticSeverity::WARNING),
1173 message: "undefined variable 'A'".to_string(),
1174 source: Some("disk".to_string()),
1175 ..Default::default()
1176 },
1177 ],
1178 });
1179
1180 buffer.next_notification(cx).await;
1181 buffer.read_with(cx, |buffer, _| {
1182 assert_eq!(
1183 buffer
1184 .snapshot()
1185 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1186 .collect::<Vec<_>>(),
1187 &[
1188 DiagnosticEntry {
1189 range: Point::new(2, 21)..Point::new(2, 22),
1190 diagnostic: Diagnostic {
1191 severity: DiagnosticSeverity::WARNING,
1192 message: "undefined variable 'A'".to_string(),
1193 is_disk_based: true,
1194 group_id: 6,
1195 is_primary: true,
1196 ..Default::default()
1197 }
1198 },
1199 DiagnosticEntry {
1200 range: Point::new(3, 9)..Point::new(3, 14),
1201 diagnostic: Diagnostic {
1202 severity: DiagnosticSeverity::ERROR,
1203 message: "undefined variable 'BB'".to_string(),
1204 is_disk_based: true,
1205 group_id: 5,
1206 is_primary: true,
1207 ..Default::default()
1208 },
1209 }
1210 ]
1211 );
1212 });
1213}
1214
1215#[gpui::test]
1216async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1217 cx.foreground().forbid_parking();
1218
1219 let text = concat!(
1220 "let one = ;\n", //
1221 "let two = \n",
1222 "let three = 3;\n",
1223 );
1224
1225 let fs = FakeFs::new(cx.background());
1226 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1227
1228 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1229 let buffer = project
1230 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1231 .await
1232 .unwrap();
1233
1234 project.update(cx, |project, cx| {
1235 project
1236 .update_buffer_diagnostics(
1237 &buffer,
1238 vec![
1239 DiagnosticEntry {
1240 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1241 diagnostic: Diagnostic {
1242 severity: DiagnosticSeverity::ERROR,
1243 message: "syntax error 1".to_string(),
1244 ..Default::default()
1245 },
1246 },
1247 DiagnosticEntry {
1248 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1249 diagnostic: Diagnostic {
1250 severity: DiagnosticSeverity::ERROR,
1251 message: "syntax error 2".to_string(),
1252 ..Default::default()
1253 },
1254 },
1255 ],
1256 None,
1257 cx,
1258 )
1259 .unwrap();
1260 });
1261
1262 // An empty range is extended forward to include the following character.
1263 // At the end of a line, an empty range is extended backward to include
1264 // the preceding character.
1265 buffer.read_with(cx, |buffer, _| {
1266 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1267 assert_eq!(
1268 chunks
1269 .iter()
1270 .map(|(s, d)| (s.as_str(), *d))
1271 .collect::<Vec<_>>(),
1272 &[
1273 ("let one = ", None),
1274 (";", Some(DiagnosticSeverity::ERROR)),
1275 ("\nlet two =", None),
1276 (" ", Some(DiagnosticSeverity::ERROR)),
1277 ("\nlet three = 3;\n", None)
1278 ]
1279 );
1280 });
1281}
1282
1283#[gpui::test]
1284async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1285 cx.foreground().forbid_parking();
1286
1287 let mut language = Language::new(
1288 LanguageConfig {
1289 name: "Rust".into(),
1290 path_suffixes: vec!["rs".to_string()],
1291 ..Default::default()
1292 },
1293 Some(tree_sitter_rust::language()),
1294 );
1295 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1296
1297 let text = "
1298 fn a() {
1299 f1();
1300 }
1301 fn b() {
1302 f2();
1303 }
1304 fn c() {
1305 f3();
1306 }
1307 "
1308 .unindent();
1309
1310 let fs = FakeFs::new(cx.background());
1311 fs.insert_tree(
1312 "/dir",
1313 json!({
1314 "a.rs": text.clone(),
1315 }),
1316 )
1317 .await;
1318
1319 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1320 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1321 let buffer = project
1322 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1323 .await
1324 .unwrap();
1325
1326 let mut fake_server = fake_servers.next().await.unwrap();
1327 let lsp_document_version = fake_server
1328 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1329 .await
1330 .text_document
1331 .version;
1332
1333 // Simulate editing the buffer after the language server computes some edits.
1334 buffer.update(cx, |buffer, cx| {
1335 buffer.edit(
1336 [(
1337 Point::new(0, 0)..Point::new(0, 0),
1338 "// above first function\n",
1339 )],
1340 None,
1341 cx,
1342 );
1343 buffer.edit(
1344 [(
1345 Point::new(2, 0)..Point::new(2, 0),
1346 " // inside first function\n",
1347 )],
1348 None,
1349 cx,
1350 );
1351 buffer.edit(
1352 [(
1353 Point::new(6, 4)..Point::new(6, 4),
1354 "// inside second function ",
1355 )],
1356 None,
1357 cx,
1358 );
1359
1360 assert_eq!(
1361 buffer.text(),
1362 "
1363 // above first function
1364 fn a() {
1365 // inside first function
1366 f1();
1367 }
1368 fn b() {
1369 // inside second function f2();
1370 }
1371 fn c() {
1372 f3();
1373 }
1374 "
1375 .unindent()
1376 );
1377 });
1378
1379 let edits = project
1380 .update(cx, |project, cx| {
1381 project.edits_from_lsp(
1382 &buffer,
1383 vec![
1384 // replace body of first function
1385 lsp::TextEdit {
1386 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1387 new_text: "
1388 fn a() {
1389 f10();
1390 }
1391 "
1392 .unindent(),
1393 },
1394 // edit inside second function
1395 lsp::TextEdit {
1396 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1397 new_text: "00".into(),
1398 },
1399 // edit inside third function via two distinct edits
1400 lsp::TextEdit {
1401 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1402 new_text: "4000".into(),
1403 },
1404 lsp::TextEdit {
1405 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1406 new_text: "".into(),
1407 },
1408 ],
1409 Some(lsp_document_version),
1410 cx,
1411 )
1412 })
1413 .await
1414 .unwrap();
1415
1416 buffer.update(cx, |buffer, cx| {
1417 for (range, new_text) in edits {
1418 buffer.edit([(range, new_text)], None, cx);
1419 }
1420 assert_eq!(
1421 buffer.text(),
1422 "
1423 // above first function
1424 fn a() {
1425 // inside first function
1426 f10();
1427 }
1428 fn b() {
1429 // inside second function f200();
1430 }
1431 fn c() {
1432 f4000();
1433 }
1434 "
1435 .unindent()
1436 );
1437 });
1438}
1439
1440#[gpui::test]
1441async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1442 cx.foreground().forbid_parking();
1443
1444 let text = "
1445 use a::b;
1446 use a::c;
1447
1448 fn f() {
1449 b();
1450 c();
1451 }
1452 "
1453 .unindent();
1454
1455 let fs = FakeFs::new(cx.background());
1456 fs.insert_tree(
1457 "/dir",
1458 json!({
1459 "a.rs": text.clone(),
1460 }),
1461 )
1462 .await;
1463
1464 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1465 let buffer = project
1466 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1467 .await
1468 .unwrap();
1469
1470 // Simulate the language server sending us a small edit in the form of a very large diff.
1471 // Rust-analyzer does this when performing a merge-imports code action.
1472 let edits = project
1473 .update(cx, |project, cx| {
1474 project.edits_from_lsp(
1475 &buffer,
1476 [
1477 // Replace the first use statement without editing the semicolon.
1478 lsp::TextEdit {
1479 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1480 new_text: "a::{b, c}".into(),
1481 },
1482 // Reinsert the remainder of the file between the semicolon and the final
1483 // newline of the file.
1484 lsp::TextEdit {
1485 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1486 new_text: "\n\n".into(),
1487 },
1488 lsp::TextEdit {
1489 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1490 new_text: "
1491 fn f() {
1492 b();
1493 c();
1494 }"
1495 .unindent(),
1496 },
1497 // Delete everything after the first newline of the file.
1498 lsp::TextEdit {
1499 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1500 new_text: "".into(),
1501 },
1502 ],
1503 None,
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509
1510 buffer.update(cx, |buffer, cx| {
1511 let edits = edits
1512 .into_iter()
1513 .map(|(range, text)| {
1514 (
1515 range.start.to_point(buffer)..range.end.to_point(buffer),
1516 text,
1517 )
1518 })
1519 .collect::<Vec<_>>();
1520
1521 assert_eq!(
1522 edits,
1523 [
1524 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1525 (Point::new(1, 0)..Point::new(2, 0), "".into())
1526 ]
1527 );
1528
1529 for (range, new_text) in edits {
1530 buffer.edit([(range, new_text)], None, cx);
1531 }
1532 assert_eq!(
1533 buffer.text(),
1534 "
1535 use a::{b, c};
1536
1537 fn f() {
1538 b();
1539 c();
1540 }
1541 "
1542 .unindent()
1543 );
1544 });
1545}
1546
1547#[gpui::test]
1548async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1549 cx.foreground().forbid_parking();
1550
1551 let text = "
1552 use a::b;
1553 use a::c;
1554
1555 fn f() {
1556 b();
1557 c();
1558 }
1559 "
1560 .unindent();
1561
1562 let fs = FakeFs::new(cx.background());
1563 fs.insert_tree(
1564 "/dir",
1565 json!({
1566 "a.rs": text.clone(),
1567 }),
1568 )
1569 .await;
1570
1571 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1572 let buffer = project
1573 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1574 .await
1575 .unwrap();
1576
1577 // Simulate the language server sending us edits in a non-ordered fashion,
1578 // with ranges sometimes being inverted or pointing to invalid locations.
1579 let edits = project
1580 .update(cx, |project, cx| {
1581 project.edits_from_lsp(
1582 &buffer,
1583 [
1584 lsp::TextEdit {
1585 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1586 new_text: "\n\n".into(),
1587 },
1588 lsp::TextEdit {
1589 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1590 new_text: "a::{b, c}".into(),
1591 },
1592 lsp::TextEdit {
1593 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1594 new_text: "".into(),
1595 },
1596 lsp::TextEdit {
1597 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1598 new_text: "
1599 fn f() {
1600 b();
1601 c();
1602 }"
1603 .unindent(),
1604 },
1605 ],
1606 None,
1607 cx,
1608 )
1609 })
1610 .await
1611 .unwrap();
1612
1613 buffer.update(cx, |buffer, cx| {
1614 let edits = edits
1615 .into_iter()
1616 .map(|(range, text)| {
1617 (
1618 range.start.to_point(buffer)..range.end.to_point(buffer),
1619 text,
1620 )
1621 })
1622 .collect::<Vec<_>>();
1623
1624 assert_eq!(
1625 edits,
1626 [
1627 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1628 (Point::new(1, 0)..Point::new(2, 0), "".into())
1629 ]
1630 );
1631
1632 for (range, new_text) in edits {
1633 buffer.edit([(range, new_text)], None, cx);
1634 }
1635 assert_eq!(
1636 buffer.text(),
1637 "
1638 use a::{b, c};
1639
1640 fn f() {
1641 b();
1642 c();
1643 }
1644 "
1645 .unindent()
1646 );
1647 });
1648}
1649
1650fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1651 buffer: &Buffer,
1652 range: Range<T>,
1653) -> Vec<(String, Option<DiagnosticSeverity>)> {
1654 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1655 for chunk in buffer.snapshot().chunks(range, true) {
1656 if chunks.last().map_or(false, |prev_chunk| {
1657 prev_chunk.1 == chunk.diagnostic_severity
1658 }) {
1659 chunks.last_mut().unwrap().0.push_str(chunk.text);
1660 } else {
1661 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1662 }
1663 }
1664 chunks
1665}
1666
1667#[gpui::test(iterations = 10)]
1668async fn test_definition(cx: &mut gpui::TestAppContext) {
1669 let mut language = Language::new(
1670 LanguageConfig {
1671 name: "Rust".into(),
1672 path_suffixes: vec!["rs".to_string()],
1673 ..Default::default()
1674 },
1675 Some(tree_sitter_rust::language()),
1676 );
1677 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1678
1679 let fs = FakeFs::new(cx.background());
1680 fs.insert_tree(
1681 "/dir",
1682 json!({
1683 "a.rs": "const fn a() { A }",
1684 "b.rs": "const y: i32 = crate::a()",
1685 }),
1686 )
1687 .await;
1688
1689 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1690 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1691
1692 let buffer = project
1693 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1694 .await
1695 .unwrap();
1696
1697 let fake_server = fake_servers.next().await.unwrap();
1698 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1699 let params = params.text_document_position_params;
1700 assert_eq!(
1701 params.text_document.uri.to_file_path().unwrap(),
1702 Path::new("/dir/b.rs"),
1703 );
1704 assert_eq!(params.position, lsp::Position::new(0, 22));
1705
1706 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1707 lsp::Location::new(
1708 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1709 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1710 ),
1711 )))
1712 });
1713
1714 let mut definitions = project
1715 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1716 .await
1717 .unwrap();
1718
1719 // Assert no new language server started
1720 cx.foreground().run_until_parked();
1721 assert!(fake_servers.try_next().is_err());
1722
1723 assert_eq!(definitions.len(), 1);
1724 let definition = definitions.pop().unwrap();
1725 cx.update(|cx| {
1726 let target_buffer = definition.target.buffer.read(cx);
1727 assert_eq!(
1728 target_buffer
1729 .file()
1730 .unwrap()
1731 .as_local()
1732 .unwrap()
1733 .abs_path(cx),
1734 Path::new("/dir/a.rs"),
1735 );
1736 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1737 assert_eq!(
1738 list_worktrees(&project, cx),
1739 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1740 );
1741
1742 drop(definition);
1743 });
1744 cx.read(|cx| {
1745 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1746 });
1747
1748 fn list_worktrees<'a>(
1749 project: &'a ModelHandle<Project>,
1750 cx: &'a AppContext,
1751 ) -> Vec<(&'a Path, bool)> {
1752 project
1753 .read(cx)
1754 .worktrees(cx)
1755 .map(|worktree| {
1756 let worktree = worktree.read(cx);
1757 (
1758 worktree.as_local().unwrap().abs_path().as_ref(),
1759 worktree.is_visible(),
1760 )
1761 })
1762 .collect::<Vec<_>>()
1763 }
1764}
1765
1766#[gpui::test]
1767async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1768 let mut language = Language::new(
1769 LanguageConfig {
1770 name: "TypeScript".into(),
1771 path_suffixes: vec!["ts".to_string()],
1772 ..Default::default()
1773 },
1774 Some(tree_sitter_typescript::language_typescript()),
1775 );
1776 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1777
1778 let fs = FakeFs::new(cx.background());
1779 fs.insert_tree(
1780 "/dir",
1781 json!({
1782 "a.ts": "",
1783 }),
1784 )
1785 .await;
1786
1787 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1788 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1789 let buffer = project
1790 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1791 .await
1792 .unwrap();
1793
1794 let fake_server = fake_language_servers.next().await.unwrap();
1795
1796 let text = "let a = b.fqn";
1797 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1798 let completions = project.update(cx, |project, cx| {
1799 project.completions(&buffer, text.len(), cx)
1800 });
1801
1802 fake_server
1803 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1804 Ok(Some(lsp::CompletionResponse::Array(vec![
1805 lsp::CompletionItem {
1806 label: "fullyQualifiedName?".into(),
1807 insert_text: Some("fullyQualifiedName".into()),
1808 ..Default::default()
1809 },
1810 ])))
1811 })
1812 .next()
1813 .await;
1814 let completions = completions.await.unwrap();
1815 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1816 assert_eq!(completions.len(), 1);
1817 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1818 assert_eq!(
1819 completions[0].old_range.to_offset(&snapshot),
1820 text.len() - 3..text.len()
1821 );
1822
1823 let text = "let a = \"atoms/cmp\"";
1824 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1825 let completions = project.update(cx, |project, cx| {
1826 project.completions(&buffer, text.len() - 1, cx)
1827 });
1828
1829 fake_server
1830 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1831 Ok(Some(lsp::CompletionResponse::Array(vec![
1832 lsp::CompletionItem {
1833 label: "component".into(),
1834 ..Default::default()
1835 },
1836 ])))
1837 })
1838 .next()
1839 .await;
1840 let completions = completions.await.unwrap();
1841 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1842 assert_eq!(completions.len(), 1);
1843 assert_eq!(completions[0].new_text, "component");
1844 assert_eq!(
1845 completions[0].old_range.to_offset(&snapshot),
1846 text.len() - 4..text.len() - 1
1847 );
1848}
1849
1850#[gpui::test]
1851async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1852 let mut language = Language::new(
1853 LanguageConfig {
1854 name: "TypeScript".into(),
1855 path_suffixes: vec!["ts".to_string()],
1856 ..Default::default()
1857 },
1858 Some(tree_sitter_typescript::language_typescript()),
1859 );
1860 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1861
1862 let fs = FakeFs::new(cx.background());
1863 fs.insert_tree(
1864 "/dir",
1865 json!({
1866 "a.ts": "",
1867 }),
1868 )
1869 .await;
1870
1871 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1872 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1873 let buffer = project
1874 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1875 .await
1876 .unwrap();
1877
1878 let fake_server = fake_language_servers.next().await.unwrap();
1879
1880 let text = "let a = b.fqn";
1881 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1882 let completions = project.update(cx, |project, cx| {
1883 project.completions(&buffer, text.len(), cx)
1884 });
1885
1886 fake_server
1887 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1888 Ok(Some(lsp::CompletionResponse::Array(vec![
1889 lsp::CompletionItem {
1890 label: "fullyQualifiedName?".into(),
1891 insert_text: Some("fully\rQualified\r\nName".into()),
1892 ..Default::default()
1893 },
1894 ])))
1895 })
1896 .next()
1897 .await;
1898 let completions = completions.await.unwrap();
1899 assert_eq!(completions.len(), 1);
1900 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1901}
1902
1903#[gpui::test(iterations = 10)]
1904async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1905 let mut language = Language::new(
1906 LanguageConfig {
1907 name: "TypeScript".into(),
1908 path_suffixes: vec!["ts".to_string()],
1909 ..Default::default()
1910 },
1911 None,
1912 );
1913 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1914
1915 let fs = FakeFs::new(cx.background());
1916 fs.insert_tree(
1917 "/dir",
1918 json!({
1919 "a.ts": "a",
1920 }),
1921 )
1922 .await;
1923
1924 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1925 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1926 let buffer = project
1927 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1928 .await
1929 .unwrap();
1930
1931 let fake_server = fake_language_servers.next().await.unwrap();
1932
1933 // Language server returns code actions that contain commands, and not edits.
1934 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1935 fake_server
1936 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1937 Ok(Some(vec![
1938 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1939 title: "The code action".into(),
1940 command: Some(lsp::Command {
1941 title: "The command".into(),
1942 command: "_the/command".into(),
1943 arguments: Some(vec![json!("the-argument")]),
1944 }),
1945 ..Default::default()
1946 }),
1947 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1948 title: "two".into(),
1949 ..Default::default()
1950 }),
1951 ]))
1952 })
1953 .next()
1954 .await;
1955
1956 let action = actions.await.unwrap()[0].clone();
1957 let apply = project.update(cx, |project, cx| {
1958 project.apply_code_action(buffer.clone(), action, true, cx)
1959 });
1960
1961 // Resolving the code action does not populate its edits. In absence of
1962 // edits, we must execute the given command.
1963 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1964 |action, _| async move { Ok(action) },
1965 );
1966
1967 // While executing the command, the language server sends the editor
1968 // a `workspaceEdit` request.
1969 fake_server
1970 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1971 let fake = fake_server.clone();
1972 move |params, _| {
1973 assert_eq!(params.command, "_the/command");
1974 let fake = fake.clone();
1975 async move {
1976 fake.server
1977 .request::<lsp::request::ApplyWorkspaceEdit>(
1978 lsp::ApplyWorkspaceEditParams {
1979 label: None,
1980 edit: lsp::WorkspaceEdit {
1981 changes: Some(
1982 [(
1983 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1984 vec![lsp::TextEdit {
1985 range: lsp::Range::new(
1986 lsp::Position::new(0, 0),
1987 lsp::Position::new(0, 0),
1988 ),
1989 new_text: "X".into(),
1990 }],
1991 )]
1992 .into_iter()
1993 .collect(),
1994 ),
1995 ..Default::default()
1996 },
1997 },
1998 )
1999 .await
2000 .unwrap();
2001 Ok(Some(json!(null)))
2002 }
2003 }
2004 })
2005 .next()
2006 .await;
2007
2008 // Applying the code action returns a project transaction containing the edits
2009 // sent by the language server in its `workspaceEdit` request.
2010 let transaction = apply.await.unwrap();
2011 assert!(transaction.0.contains_key(&buffer));
2012 buffer.update(cx, |buffer, cx| {
2013 assert_eq!(buffer.text(), "Xa");
2014 buffer.undo(cx);
2015 assert_eq!(buffer.text(), "a");
2016 });
2017}
2018
2019#[gpui::test]
2020async fn test_save_file(cx: &mut gpui::TestAppContext) {
2021 let fs = FakeFs::new(cx.background());
2022 fs.insert_tree(
2023 "/dir",
2024 json!({
2025 "file1": "the old contents",
2026 }),
2027 )
2028 .await;
2029
2030 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2031 let buffer = project
2032 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2033 .await
2034 .unwrap();
2035 buffer
2036 .update(cx, |buffer, cx| {
2037 assert_eq!(buffer.text(), "the old contents");
2038 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2039 buffer.save(cx)
2040 })
2041 .await
2042 .unwrap();
2043
2044 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2045 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2046}
2047
2048#[gpui::test]
2049async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2050 let fs = FakeFs::new(cx.background());
2051 fs.insert_tree(
2052 "/dir",
2053 json!({
2054 "file1": "the old contents",
2055 }),
2056 )
2057 .await;
2058
2059 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2060 let buffer = project
2061 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2062 .await
2063 .unwrap();
2064 buffer
2065 .update(cx, |buffer, cx| {
2066 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2067 buffer.save(cx)
2068 })
2069 .await
2070 .unwrap();
2071
2072 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2073 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2074}
2075
2076#[gpui::test]
2077async fn test_save_as(cx: &mut gpui::TestAppContext) {
2078 let fs = FakeFs::new(cx.background());
2079 fs.insert_tree("/dir", json!({})).await;
2080
2081 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2082 let buffer = project.update(cx, |project, cx| {
2083 project.create_buffer("", None, cx).unwrap()
2084 });
2085 buffer.update(cx, |buffer, cx| {
2086 buffer.edit([(0..0, "abc")], None, cx);
2087 assert!(buffer.is_dirty());
2088 assert!(!buffer.has_conflict());
2089 });
2090 project
2091 .update(cx, |project, cx| {
2092 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2093 })
2094 .await
2095 .unwrap();
2096 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2097 buffer.read_with(cx, |buffer, cx| {
2098 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2099 assert!(!buffer.is_dirty());
2100 assert!(!buffer.has_conflict());
2101 });
2102
2103 let opened_buffer = project
2104 .update(cx, |project, cx| {
2105 project.open_local_buffer("/dir/file1", cx)
2106 })
2107 .await
2108 .unwrap();
2109 assert_eq!(opened_buffer, buffer);
2110}
2111
2112#[gpui::test(retries = 5)]
2113async fn test_rescan_and_remote_updates(
2114 deterministic: Arc<Deterministic>,
2115 cx: &mut gpui::TestAppContext,
2116) {
2117 let dir = temp_tree(json!({
2118 "a": {
2119 "file1": "",
2120 "file2": "",
2121 "file3": "",
2122 },
2123 "b": {
2124 "c": {
2125 "file4": "",
2126 "file5": "",
2127 }
2128 }
2129 }));
2130
2131 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2132 let rpc = project.read_with(cx, |p, _| p.client.clone());
2133
2134 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2135 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2136 async move { buffer.await.unwrap() }
2137 };
2138 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2139 project.read_with(cx, |project, cx| {
2140 let tree = project.worktrees(cx).next().unwrap();
2141 tree.read(cx)
2142 .entry_for_path(path)
2143 .unwrap_or_else(|| panic!("no entry for path {}", path))
2144 .id
2145 })
2146 };
2147
2148 let buffer2 = buffer_for_path("a/file2", cx).await;
2149 let buffer3 = buffer_for_path("a/file3", cx).await;
2150 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2151 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2152
2153 let file2_id = id_for_path("a/file2", cx);
2154 let file3_id = id_for_path("a/file3", cx);
2155 let file4_id = id_for_path("b/c/file4", cx);
2156
2157 // Create a remote copy of this worktree.
2158 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2159 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2160 let remote = cx.update(|cx| {
2161 Worktree::remote(
2162 1,
2163 1,
2164 proto::WorktreeMetadata {
2165 id: initial_snapshot.id().to_proto(),
2166 root_name: initial_snapshot.root_name().into(),
2167 abs_path: initial_snapshot.abs_path().as_os_str().as_bytes().to_vec(),
2168 visible: true,
2169 },
2170 rpc.clone(),
2171 cx,
2172 )
2173 });
2174 remote.update(cx, |remote, _| {
2175 let update = initial_snapshot.build_initial_update(1);
2176 remote.as_remote_mut().unwrap().update_from_remote(update);
2177 });
2178 deterministic.run_until_parked();
2179
2180 cx.read(|cx| {
2181 assert!(!buffer2.read(cx).is_dirty());
2182 assert!(!buffer3.read(cx).is_dirty());
2183 assert!(!buffer4.read(cx).is_dirty());
2184 assert!(!buffer5.read(cx).is_dirty());
2185 });
2186
2187 // Rename and delete files and directories.
2188 tree.flush_fs_events(cx).await;
2189 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2190 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2191 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2192 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2193 tree.flush_fs_events(cx).await;
2194
2195 let expected_paths = vec![
2196 "a",
2197 "a/file1",
2198 "a/file2.new",
2199 "b",
2200 "d",
2201 "d/file3",
2202 "d/file4",
2203 ];
2204
2205 cx.read(|app| {
2206 assert_eq!(
2207 tree.read(app)
2208 .paths()
2209 .map(|p| p.to_str().unwrap())
2210 .collect::<Vec<_>>(),
2211 expected_paths
2212 );
2213
2214 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2215 assert_eq!(id_for_path("d/file3", cx), file3_id);
2216 assert_eq!(id_for_path("d/file4", cx), file4_id);
2217
2218 assert_eq!(
2219 buffer2.read(app).file().unwrap().path().as_ref(),
2220 Path::new("a/file2.new")
2221 );
2222 assert_eq!(
2223 buffer3.read(app).file().unwrap().path().as_ref(),
2224 Path::new("d/file3")
2225 );
2226 assert_eq!(
2227 buffer4.read(app).file().unwrap().path().as_ref(),
2228 Path::new("d/file4")
2229 );
2230 assert_eq!(
2231 buffer5.read(app).file().unwrap().path().as_ref(),
2232 Path::new("b/c/file5")
2233 );
2234
2235 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2236 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2237 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2238 assert!(buffer5.read(app).file().unwrap().is_deleted());
2239 });
2240
2241 // Update the remote worktree. Check that it becomes consistent with the
2242 // local worktree.
2243 remote.update(cx, |remote, cx| {
2244 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2245 &initial_snapshot,
2246 1,
2247 1,
2248 true,
2249 );
2250 remote.as_remote_mut().unwrap().update_from_remote(update);
2251 });
2252 deterministic.run_until_parked();
2253 remote.read_with(cx, |remote, _| {
2254 assert_eq!(
2255 remote
2256 .paths()
2257 .map(|p| p.to_str().unwrap())
2258 .collect::<Vec<_>>(),
2259 expected_paths
2260 );
2261 });
2262}
2263
2264#[gpui::test(iterations = 10)]
2265async fn test_buffer_identity_across_renames(
2266 deterministic: Arc<Deterministic>,
2267 cx: &mut gpui::TestAppContext,
2268) {
2269 let fs = FakeFs::new(cx.background());
2270 fs.insert_tree(
2271 "/dir",
2272 json!({
2273 "a": {
2274 "file1": "",
2275 }
2276 }),
2277 )
2278 .await;
2279
2280 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2281 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2282 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2283
2284 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2285 project.read_with(cx, |project, cx| {
2286 let tree = project.worktrees(cx).next().unwrap();
2287 tree.read(cx)
2288 .entry_for_path(path)
2289 .unwrap_or_else(|| panic!("no entry for path {}", path))
2290 .id
2291 })
2292 };
2293
2294 let dir_id = id_for_path("a", cx);
2295 let file_id = id_for_path("a/file1", cx);
2296 let buffer = project
2297 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2298 .await
2299 .unwrap();
2300 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2301
2302 project
2303 .update(cx, |project, cx| {
2304 project.rename_entry(dir_id, Path::new("b"), cx)
2305 })
2306 .unwrap()
2307 .await
2308 .unwrap();
2309 deterministic.run_until_parked();
2310 assert_eq!(id_for_path("b", cx), dir_id);
2311 assert_eq!(id_for_path("b/file1", cx), file_id);
2312 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2313}
2314
2315#[gpui::test]
2316async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2317 let fs = FakeFs::new(cx.background());
2318 fs.insert_tree(
2319 "/dir",
2320 json!({
2321 "a.txt": "a-contents",
2322 "b.txt": "b-contents",
2323 }),
2324 )
2325 .await;
2326
2327 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2328
2329 // Spawn multiple tasks to open paths, repeating some paths.
2330 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2331 (
2332 p.open_local_buffer("/dir/a.txt", cx),
2333 p.open_local_buffer("/dir/b.txt", cx),
2334 p.open_local_buffer("/dir/a.txt", cx),
2335 )
2336 });
2337
2338 let buffer_a_1 = buffer_a_1.await.unwrap();
2339 let buffer_a_2 = buffer_a_2.await.unwrap();
2340 let buffer_b = buffer_b.await.unwrap();
2341 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2342 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2343
2344 // There is only one buffer per path.
2345 let buffer_a_id = buffer_a_1.id();
2346 assert_eq!(buffer_a_2.id(), buffer_a_id);
2347
2348 // Open the same path again while it is still open.
2349 drop(buffer_a_1);
2350 let buffer_a_3 = project
2351 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2352 .await
2353 .unwrap();
2354
2355 // There's still only one buffer per path.
2356 assert_eq!(buffer_a_3.id(), buffer_a_id);
2357}
2358
2359#[gpui::test]
2360async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2361 let fs = FakeFs::new(cx.background());
2362 fs.insert_tree(
2363 "/dir",
2364 json!({
2365 "file1": "abc",
2366 "file2": "def",
2367 "file3": "ghi",
2368 }),
2369 )
2370 .await;
2371
2372 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2373
2374 let buffer1 = project
2375 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2376 .await
2377 .unwrap();
2378 let events = Rc::new(RefCell::new(Vec::new()));
2379
2380 // initially, the buffer isn't dirty.
2381 buffer1.update(cx, |buffer, cx| {
2382 cx.subscribe(&buffer1, {
2383 let events = events.clone();
2384 move |_, _, event, _| match event {
2385 BufferEvent::Operation(_) => {}
2386 _ => events.borrow_mut().push(event.clone()),
2387 }
2388 })
2389 .detach();
2390
2391 assert!(!buffer.is_dirty());
2392 assert!(events.borrow().is_empty());
2393
2394 buffer.edit([(1..2, "")], None, cx);
2395 });
2396
2397 // after the first edit, the buffer is dirty, and emits a dirtied event.
2398 buffer1.update(cx, |buffer, cx| {
2399 assert!(buffer.text() == "ac");
2400 assert!(buffer.is_dirty());
2401 assert_eq!(
2402 *events.borrow(),
2403 &[language::Event::Edited, language::Event::DirtyChanged]
2404 );
2405 events.borrow_mut().clear();
2406 buffer.did_save(
2407 buffer.version(),
2408 buffer.as_rope().fingerprint(),
2409 buffer.file().unwrap().mtime(),
2410 None,
2411 cx,
2412 );
2413 });
2414
2415 // after saving, the buffer is not dirty, and emits a saved event.
2416 buffer1.update(cx, |buffer, cx| {
2417 assert!(!buffer.is_dirty());
2418 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2419 events.borrow_mut().clear();
2420
2421 buffer.edit([(1..1, "B")], None, cx);
2422 buffer.edit([(2..2, "D")], None, cx);
2423 });
2424
2425 // after editing again, the buffer is dirty, and emits another dirty event.
2426 buffer1.update(cx, |buffer, cx| {
2427 assert!(buffer.text() == "aBDc");
2428 assert!(buffer.is_dirty());
2429 assert_eq!(
2430 *events.borrow(),
2431 &[
2432 language::Event::Edited,
2433 language::Event::DirtyChanged,
2434 language::Event::Edited,
2435 ],
2436 );
2437 events.borrow_mut().clear();
2438
2439 // After restoring the buffer to its previously-saved state,
2440 // the buffer is not considered dirty anymore.
2441 buffer.edit([(1..3, "")], None, cx);
2442 assert!(buffer.text() == "ac");
2443 assert!(!buffer.is_dirty());
2444 });
2445
2446 assert_eq!(
2447 *events.borrow(),
2448 &[language::Event::Edited, language::Event::DirtyChanged]
2449 );
2450
2451 // When a file is deleted, the buffer is considered dirty.
2452 let events = Rc::new(RefCell::new(Vec::new()));
2453 let buffer2 = project
2454 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2455 .await
2456 .unwrap();
2457 buffer2.update(cx, |_, cx| {
2458 cx.subscribe(&buffer2, {
2459 let events = events.clone();
2460 move |_, _, event, _| events.borrow_mut().push(event.clone())
2461 })
2462 .detach();
2463 });
2464
2465 fs.remove_file("/dir/file2".as_ref(), Default::default())
2466 .await
2467 .unwrap();
2468 cx.foreground().run_until_parked();
2469 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2470 assert_eq!(
2471 *events.borrow(),
2472 &[
2473 language::Event::DirtyChanged,
2474 language::Event::FileHandleChanged
2475 ]
2476 );
2477
2478 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2479 let events = Rc::new(RefCell::new(Vec::new()));
2480 let buffer3 = project
2481 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2482 .await
2483 .unwrap();
2484 buffer3.update(cx, |_, cx| {
2485 cx.subscribe(&buffer3, {
2486 let events = events.clone();
2487 move |_, _, event, _| events.borrow_mut().push(event.clone())
2488 })
2489 .detach();
2490 });
2491
2492 buffer3.update(cx, |buffer, cx| {
2493 buffer.edit([(0..0, "x")], None, cx);
2494 });
2495 events.borrow_mut().clear();
2496 fs.remove_file("/dir/file3".as_ref(), Default::default())
2497 .await
2498 .unwrap();
2499 cx.foreground().run_until_parked();
2500 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2501 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2502}
2503
2504#[gpui::test]
2505async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2506 let initial_contents = "aaa\nbbbbb\nc\n";
2507 let fs = FakeFs::new(cx.background());
2508 fs.insert_tree(
2509 "/dir",
2510 json!({
2511 "the-file": initial_contents,
2512 }),
2513 )
2514 .await;
2515 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2516 let buffer = project
2517 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2518 .await
2519 .unwrap();
2520
2521 let anchors = (0..3)
2522 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2523 .collect::<Vec<_>>();
2524
2525 // Change the file on disk, adding two new lines of text, and removing
2526 // one line.
2527 buffer.read_with(cx, |buffer, _| {
2528 assert!(!buffer.is_dirty());
2529 assert!(!buffer.has_conflict());
2530 });
2531 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2532 fs.save(
2533 "/dir/the-file".as_ref(),
2534 &new_contents.into(),
2535 LineEnding::Unix,
2536 )
2537 .await
2538 .unwrap();
2539
2540 // Because the buffer was not modified, it is reloaded from disk. Its
2541 // contents are edited according to the diff between the old and new
2542 // file contents.
2543 cx.foreground().run_until_parked();
2544 buffer.update(cx, |buffer, _| {
2545 assert_eq!(buffer.text(), new_contents);
2546 assert!(!buffer.is_dirty());
2547 assert!(!buffer.has_conflict());
2548
2549 let anchor_positions = anchors
2550 .iter()
2551 .map(|anchor| anchor.to_point(&*buffer))
2552 .collect::<Vec<_>>();
2553 assert_eq!(
2554 anchor_positions,
2555 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2556 );
2557 });
2558
2559 // Modify the buffer
2560 buffer.update(cx, |buffer, cx| {
2561 buffer.edit([(0..0, " ")], None, cx);
2562 assert!(buffer.is_dirty());
2563 assert!(!buffer.has_conflict());
2564 });
2565
2566 // Change the file on disk again, adding blank lines to the beginning.
2567 fs.save(
2568 "/dir/the-file".as_ref(),
2569 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2570 LineEnding::Unix,
2571 )
2572 .await
2573 .unwrap();
2574
2575 // Because the buffer is modified, it doesn't reload from disk, but is
2576 // marked as having a conflict.
2577 cx.foreground().run_until_parked();
2578 buffer.read_with(cx, |buffer, _| {
2579 assert!(buffer.has_conflict());
2580 });
2581}
2582
2583#[gpui::test]
2584async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2585 let fs = FakeFs::new(cx.background());
2586 fs.insert_tree(
2587 "/dir",
2588 json!({
2589 "file1": "a\nb\nc\n",
2590 "file2": "one\r\ntwo\r\nthree\r\n",
2591 }),
2592 )
2593 .await;
2594
2595 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2596 let buffer1 = project
2597 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2598 .await
2599 .unwrap();
2600 let buffer2 = project
2601 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2602 .await
2603 .unwrap();
2604
2605 buffer1.read_with(cx, |buffer, _| {
2606 assert_eq!(buffer.text(), "a\nb\nc\n");
2607 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2608 });
2609 buffer2.read_with(cx, |buffer, _| {
2610 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2611 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2612 });
2613
2614 // Change a file's line endings on disk from unix to windows. The buffer's
2615 // state updates correctly.
2616 fs.save(
2617 "/dir/file1".as_ref(),
2618 &"aaa\nb\nc\n".into(),
2619 LineEnding::Windows,
2620 )
2621 .await
2622 .unwrap();
2623 cx.foreground().run_until_parked();
2624 buffer1.read_with(cx, |buffer, _| {
2625 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2626 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2627 });
2628
2629 // Save a file with windows line endings. The file is written correctly.
2630 buffer2
2631 .update(cx, |buffer, cx| {
2632 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2633 buffer.save(cx)
2634 })
2635 .await
2636 .unwrap();
2637 assert_eq!(
2638 fs.load("/dir/file2".as_ref()).await.unwrap(),
2639 "one\r\ntwo\r\nthree\r\nfour\r\n",
2640 );
2641}
2642
2643#[gpui::test]
2644async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2645 cx.foreground().forbid_parking();
2646
2647 let fs = FakeFs::new(cx.background());
2648 fs.insert_tree(
2649 "/the-dir",
2650 json!({
2651 "a.rs": "
2652 fn foo(mut v: Vec<usize>) {
2653 for x in &v {
2654 v.push(1);
2655 }
2656 }
2657 "
2658 .unindent(),
2659 }),
2660 )
2661 .await;
2662
2663 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2664 let buffer = project
2665 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2666 .await
2667 .unwrap();
2668
2669 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2670 let message = lsp::PublishDiagnosticsParams {
2671 uri: buffer_uri.clone(),
2672 diagnostics: vec![
2673 lsp::Diagnostic {
2674 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2675 severity: Some(DiagnosticSeverity::WARNING),
2676 message: "error 1".to_string(),
2677 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2678 location: lsp::Location {
2679 uri: buffer_uri.clone(),
2680 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2681 },
2682 message: "error 1 hint 1".to_string(),
2683 }]),
2684 ..Default::default()
2685 },
2686 lsp::Diagnostic {
2687 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2688 severity: Some(DiagnosticSeverity::HINT),
2689 message: "error 1 hint 1".to_string(),
2690 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2691 location: lsp::Location {
2692 uri: buffer_uri.clone(),
2693 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2694 },
2695 message: "original diagnostic".to_string(),
2696 }]),
2697 ..Default::default()
2698 },
2699 lsp::Diagnostic {
2700 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2701 severity: Some(DiagnosticSeverity::ERROR),
2702 message: "error 2".to_string(),
2703 related_information: Some(vec![
2704 lsp::DiagnosticRelatedInformation {
2705 location: lsp::Location {
2706 uri: buffer_uri.clone(),
2707 range: lsp::Range::new(
2708 lsp::Position::new(1, 13),
2709 lsp::Position::new(1, 15),
2710 ),
2711 },
2712 message: "error 2 hint 1".to_string(),
2713 },
2714 lsp::DiagnosticRelatedInformation {
2715 location: lsp::Location {
2716 uri: buffer_uri.clone(),
2717 range: lsp::Range::new(
2718 lsp::Position::new(1, 13),
2719 lsp::Position::new(1, 15),
2720 ),
2721 },
2722 message: "error 2 hint 2".to_string(),
2723 },
2724 ]),
2725 ..Default::default()
2726 },
2727 lsp::Diagnostic {
2728 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2729 severity: Some(DiagnosticSeverity::HINT),
2730 message: "error 2 hint 1".to_string(),
2731 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2732 location: lsp::Location {
2733 uri: buffer_uri.clone(),
2734 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2735 },
2736 message: "original diagnostic".to_string(),
2737 }]),
2738 ..Default::default()
2739 },
2740 lsp::Diagnostic {
2741 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2742 severity: Some(DiagnosticSeverity::HINT),
2743 message: "error 2 hint 2".to_string(),
2744 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2745 location: lsp::Location {
2746 uri: buffer_uri,
2747 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2748 },
2749 message: "original diagnostic".to_string(),
2750 }]),
2751 ..Default::default()
2752 },
2753 ],
2754 version: None,
2755 };
2756
2757 project
2758 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2759 .unwrap();
2760 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2761
2762 assert_eq!(
2763 buffer
2764 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2765 .collect::<Vec<_>>(),
2766 &[
2767 DiagnosticEntry {
2768 range: Point::new(1, 8)..Point::new(1, 9),
2769 diagnostic: Diagnostic {
2770 severity: DiagnosticSeverity::WARNING,
2771 message: "error 1".to_string(),
2772 group_id: 0,
2773 is_primary: true,
2774 ..Default::default()
2775 }
2776 },
2777 DiagnosticEntry {
2778 range: Point::new(1, 8)..Point::new(1, 9),
2779 diagnostic: Diagnostic {
2780 severity: DiagnosticSeverity::HINT,
2781 message: "error 1 hint 1".to_string(),
2782 group_id: 0,
2783 is_primary: false,
2784 ..Default::default()
2785 }
2786 },
2787 DiagnosticEntry {
2788 range: Point::new(1, 13)..Point::new(1, 15),
2789 diagnostic: Diagnostic {
2790 severity: DiagnosticSeverity::HINT,
2791 message: "error 2 hint 1".to_string(),
2792 group_id: 1,
2793 is_primary: false,
2794 ..Default::default()
2795 }
2796 },
2797 DiagnosticEntry {
2798 range: Point::new(1, 13)..Point::new(1, 15),
2799 diagnostic: Diagnostic {
2800 severity: DiagnosticSeverity::HINT,
2801 message: "error 2 hint 2".to_string(),
2802 group_id: 1,
2803 is_primary: false,
2804 ..Default::default()
2805 }
2806 },
2807 DiagnosticEntry {
2808 range: Point::new(2, 8)..Point::new(2, 17),
2809 diagnostic: Diagnostic {
2810 severity: DiagnosticSeverity::ERROR,
2811 message: "error 2".to_string(),
2812 group_id: 1,
2813 is_primary: true,
2814 ..Default::default()
2815 }
2816 }
2817 ]
2818 );
2819
2820 assert_eq!(
2821 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2822 &[
2823 DiagnosticEntry {
2824 range: Point::new(1, 8)..Point::new(1, 9),
2825 diagnostic: Diagnostic {
2826 severity: DiagnosticSeverity::WARNING,
2827 message: "error 1".to_string(),
2828 group_id: 0,
2829 is_primary: true,
2830 ..Default::default()
2831 }
2832 },
2833 DiagnosticEntry {
2834 range: Point::new(1, 8)..Point::new(1, 9),
2835 diagnostic: Diagnostic {
2836 severity: DiagnosticSeverity::HINT,
2837 message: "error 1 hint 1".to_string(),
2838 group_id: 0,
2839 is_primary: false,
2840 ..Default::default()
2841 }
2842 },
2843 ]
2844 );
2845 assert_eq!(
2846 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2847 &[
2848 DiagnosticEntry {
2849 range: Point::new(1, 13)..Point::new(1, 15),
2850 diagnostic: Diagnostic {
2851 severity: DiagnosticSeverity::HINT,
2852 message: "error 2 hint 1".to_string(),
2853 group_id: 1,
2854 is_primary: false,
2855 ..Default::default()
2856 }
2857 },
2858 DiagnosticEntry {
2859 range: Point::new(1, 13)..Point::new(1, 15),
2860 diagnostic: Diagnostic {
2861 severity: DiagnosticSeverity::HINT,
2862 message: "error 2 hint 2".to_string(),
2863 group_id: 1,
2864 is_primary: false,
2865 ..Default::default()
2866 }
2867 },
2868 DiagnosticEntry {
2869 range: Point::new(2, 8)..Point::new(2, 17),
2870 diagnostic: Diagnostic {
2871 severity: DiagnosticSeverity::ERROR,
2872 message: "error 2".to_string(),
2873 group_id: 1,
2874 is_primary: true,
2875 ..Default::default()
2876 }
2877 }
2878 ]
2879 );
2880}
2881
2882#[gpui::test]
2883async fn test_rename(cx: &mut gpui::TestAppContext) {
2884 cx.foreground().forbid_parking();
2885
2886 let mut language = Language::new(
2887 LanguageConfig {
2888 name: "Rust".into(),
2889 path_suffixes: vec!["rs".to_string()],
2890 ..Default::default()
2891 },
2892 Some(tree_sitter_rust::language()),
2893 );
2894 let mut fake_servers = language
2895 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2896 capabilities: lsp::ServerCapabilities {
2897 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2898 prepare_provider: Some(true),
2899 work_done_progress_options: Default::default(),
2900 })),
2901 ..Default::default()
2902 },
2903 ..Default::default()
2904 }))
2905 .await;
2906
2907 let fs = FakeFs::new(cx.background());
2908 fs.insert_tree(
2909 "/dir",
2910 json!({
2911 "one.rs": "const ONE: usize = 1;",
2912 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2913 }),
2914 )
2915 .await;
2916
2917 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2918 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2919 let buffer = project
2920 .update(cx, |project, cx| {
2921 project.open_local_buffer("/dir/one.rs", cx)
2922 })
2923 .await
2924 .unwrap();
2925
2926 let fake_server = fake_servers.next().await.unwrap();
2927
2928 let response = project.update(cx, |project, cx| {
2929 project.prepare_rename(buffer.clone(), 7, cx)
2930 });
2931 fake_server
2932 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2933 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2934 assert_eq!(params.position, lsp::Position::new(0, 7));
2935 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2936 lsp::Position::new(0, 6),
2937 lsp::Position::new(0, 9),
2938 ))))
2939 })
2940 .next()
2941 .await
2942 .unwrap();
2943 let range = response.await.unwrap().unwrap();
2944 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2945 assert_eq!(range, 6..9);
2946
2947 let response = project.update(cx, |project, cx| {
2948 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2949 });
2950 fake_server
2951 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2952 assert_eq!(
2953 params.text_document_position.text_document.uri.as_str(),
2954 "file:///dir/one.rs"
2955 );
2956 assert_eq!(
2957 params.text_document_position.position,
2958 lsp::Position::new(0, 7)
2959 );
2960 assert_eq!(params.new_name, "THREE");
2961 Ok(Some(lsp::WorkspaceEdit {
2962 changes: Some(
2963 [
2964 (
2965 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2966 vec![lsp::TextEdit::new(
2967 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2968 "THREE".to_string(),
2969 )],
2970 ),
2971 (
2972 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2973 vec![
2974 lsp::TextEdit::new(
2975 lsp::Range::new(
2976 lsp::Position::new(0, 24),
2977 lsp::Position::new(0, 27),
2978 ),
2979 "THREE".to_string(),
2980 ),
2981 lsp::TextEdit::new(
2982 lsp::Range::new(
2983 lsp::Position::new(0, 35),
2984 lsp::Position::new(0, 38),
2985 ),
2986 "THREE".to_string(),
2987 ),
2988 ],
2989 ),
2990 ]
2991 .into_iter()
2992 .collect(),
2993 ),
2994 ..Default::default()
2995 }))
2996 })
2997 .next()
2998 .await
2999 .unwrap();
3000 let mut transaction = response.await.unwrap().0;
3001 assert_eq!(transaction.len(), 2);
3002 assert_eq!(
3003 transaction
3004 .remove_entry(&buffer)
3005 .unwrap()
3006 .0
3007 .read_with(cx, |buffer, _| buffer.text()),
3008 "const THREE: usize = 1;"
3009 );
3010 assert_eq!(
3011 transaction
3012 .into_keys()
3013 .next()
3014 .unwrap()
3015 .read_with(cx, |buffer, _| buffer.text()),
3016 "const TWO: usize = one::THREE + one::THREE;"
3017 );
3018}
3019
3020#[gpui::test]
3021async fn test_search(cx: &mut gpui::TestAppContext) {
3022 let fs = FakeFs::new(cx.background());
3023 fs.insert_tree(
3024 "/dir",
3025 json!({
3026 "one.rs": "const ONE: usize = 1;",
3027 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3028 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3029 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3030 }),
3031 )
3032 .await;
3033 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3034 assert_eq!(
3035 search(&project, SearchQuery::text("TWO", false, true), cx)
3036 .await
3037 .unwrap(),
3038 HashMap::from_iter([
3039 ("two.rs".to_string(), vec![6..9]),
3040 ("three.rs".to_string(), vec![37..40])
3041 ])
3042 );
3043
3044 let buffer_4 = project
3045 .update(cx, |project, cx| {
3046 project.open_local_buffer("/dir/four.rs", cx)
3047 })
3048 .await
3049 .unwrap();
3050 buffer_4.update(cx, |buffer, cx| {
3051 let text = "two::TWO";
3052 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3053 });
3054
3055 assert_eq!(
3056 search(&project, SearchQuery::text("TWO", false, true), cx)
3057 .await
3058 .unwrap(),
3059 HashMap::from_iter([
3060 ("two.rs".to_string(), vec![6..9]),
3061 ("three.rs".to_string(), vec![37..40]),
3062 ("four.rs".to_string(), vec![25..28, 36..39])
3063 ])
3064 );
3065
3066 async fn search(
3067 project: &ModelHandle<Project>,
3068 query: SearchQuery,
3069 cx: &mut gpui::TestAppContext,
3070 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3071 let results = project
3072 .update(cx, |project, cx| project.search(query, cx))
3073 .await?;
3074
3075 Ok(results
3076 .into_iter()
3077 .map(|(buffer, ranges)| {
3078 buffer.read_with(cx, |buffer, _| {
3079 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3080 let ranges = ranges
3081 .into_iter()
3082 .map(|range| range.to_offset(buffer))
3083 .collect::<Vec<_>>();
3084 (path, ranges)
3085 })
3086 })
3087 .collect())
3088 }
3089}