1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_symlinks(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41 project.read_with(cx, |project, cx| {
42 let tree = project.worktrees(cx).next().unwrap().read(cx);
43 assert_eq!(tree.file_count(), 5);
44 assert_eq!(
45 tree.inode_for_path("fennel/grape"),
46 tree.inode_for_path("finnochio/grape")
47 );
48 });
49}
50
51#[gpui::test]
52async fn test_managing_language_servers(
53 deterministic: Arc<Deterministic>,
54 cx: &mut gpui::TestAppContext,
55) {
56 cx.foreground().forbid_parking();
57
58 let mut rust_language = Language::new(
59 LanguageConfig {
60 name: "Rust".into(),
61 path_suffixes: vec!["rs".to_string()],
62 ..Default::default()
63 },
64 Some(tree_sitter_rust::language()),
65 );
66 let mut json_language = Language::new(
67 LanguageConfig {
68 name: "JSON".into(),
69 path_suffixes: vec!["json".to_string()],
70 ..Default::default()
71 },
72 None,
73 );
74 let mut fake_rust_servers = rust_language
75 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
76 name: "the-rust-language-server",
77 capabilities: lsp::ServerCapabilities {
78 completion_provider: Some(lsp::CompletionOptions {
79 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
80 ..Default::default()
81 }),
82 ..Default::default()
83 },
84 ..Default::default()
85 }))
86 .await;
87 let mut fake_json_servers = json_language
88 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
89 name: "the-json-language-server",
90 capabilities: lsp::ServerCapabilities {
91 completion_provider: Some(lsp::CompletionOptions {
92 trigger_characters: Some(vec![":".to_string()]),
93 ..Default::default()
94 }),
95 ..Default::default()
96 },
97 ..Default::default()
98 }))
99 .await;
100
101 let fs = FakeFs::new(cx.background());
102 fs.insert_tree(
103 "/the-root",
104 json!({
105 "test.rs": "const A: i32 = 1;",
106 "test2.rs": "",
107 "Cargo.toml": "a = 1",
108 "package.json": "{\"a\": 1}",
109 }),
110 )
111 .await;
112
113 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
114
115 // Open a buffer without an associated language server.
116 let toml_buffer = project
117 .update(cx, |project, cx| {
118 project.open_local_buffer("/the-root/Cargo.toml", cx)
119 })
120 .await
121 .unwrap();
122
123 // Open a buffer with an associated language server before the language for it has been loaded.
124 let rust_buffer = project
125 .update(cx, |project, cx| {
126 project.open_local_buffer("/the-root/test.rs", cx)
127 })
128 .await
129 .unwrap();
130 rust_buffer.read_with(cx, |buffer, _| {
131 assert_eq!(buffer.language().map(|l| l.name()), None);
132 });
133
134 // Now we add the languages to the project, and ensure they get assigned to all
135 // the relevant open buffers.
136 project.update(cx, |project, _| {
137 project.languages.add(Arc::new(json_language));
138 project.languages.add(Arc::new(rust_language));
139 });
140 deterministic.run_until_parked();
141 rust_buffer.read_with(cx, |buffer, _| {
142 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
143 });
144
145 // A server is started up, and it is notified about Rust files.
146 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
147 assert_eq!(
148 fake_rust_server
149 .receive_notification::<lsp::notification::DidOpenTextDocument>()
150 .await
151 .text_document,
152 lsp::TextDocumentItem {
153 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
154 version: 0,
155 text: "const A: i32 = 1;".to_string(),
156 language_id: Default::default()
157 }
158 );
159
160 // The buffer is configured based on the language server's capabilities.
161 rust_buffer.read_with(cx, |buffer, _| {
162 assert_eq!(
163 buffer.completion_triggers(),
164 &[".".to_string(), "::".to_string()]
165 );
166 });
167 toml_buffer.read_with(cx, |buffer, _| {
168 assert!(buffer.completion_triggers().is_empty());
169 });
170
171 // Edit a buffer. The changes are reported to the language server.
172 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
173 assert_eq!(
174 fake_rust_server
175 .receive_notification::<lsp::notification::DidChangeTextDocument>()
176 .await
177 .text_document,
178 lsp::VersionedTextDocumentIdentifier::new(
179 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
180 1
181 )
182 );
183
184 // Open a third buffer with a different associated language server.
185 let json_buffer = project
186 .update(cx, |project, cx| {
187 project.open_local_buffer("/the-root/package.json", cx)
188 })
189 .await
190 .unwrap();
191
192 // A json language server is started up and is only notified about the json buffer.
193 let mut fake_json_server = fake_json_servers.next().await.unwrap();
194 assert_eq!(
195 fake_json_server
196 .receive_notification::<lsp::notification::DidOpenTextDocument>()
197 .await
198 .text_document,
199 lsp::TextDocumentItem {
200 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
201 version: 0,
202 text: "{\"a\": 1}".to_string(),
203 language_id: Default::default()
204 }
205 );
206
207 // This buffer is configured based on the second language server's
208 // capabilities.
209 json_buffer.read_with(cx, |buffer, _| {
210 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
211 });
212
213 // When opening another buffer whose language server is already running,
214 // it is also configured based on the existing language server's capabilities.
215 let rust_buffer2 = project
216 .update(cx, |project, cx| {
217 project.open_local_buffer("/the-root/test2.rs", cx)
218 })
219 .await
220 .unwrap();
221 rust_buffer2.read_with(cx, |buffer, _| {
222 assert_eq!(
223 buffer.completion_triggers(),
224 &[".".to_string(), "::".to_string()]
225 );
226 });
227
228 // Changes are reported only to servers matching the buffer's language.
229 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
230 rust_buffer2.update(cx, |buffer, cx| {
231 buffer.edit([(0..0, "let x = 1;")], None, cx)
232 });
233 assert_eq!(
234 fake_rust_server
235 .receive_notification::<lsp::notification::DidChangeTextDocument>()
236 .await
237 .text_document,
238 lsp::VersionedTextDocumentIdentifier::new(
239 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
240 1
241 )
242 );
243
244 // Save notifications are reported to all servers.
245 toml_buffer
246 .update(cx, |buffer, cx| buffer.save(cx))
247 .await
248 .unwrap();
249 assert_eq!(
250 fake_rust_server
251 .receive_notification::<lsp::notification::DidSaveTextDocument>()
252 .await
253 .text_document,
254 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
255 );
256 assert_eq!(
257 fake_json_server
258 .receive_notification::<lsp::notification::DidSaveTextDocument>()
259 .await
260 .text_document,
261 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
262 );
263
264 // Renames are reported only to servers matching the buffer's language.
265 fs.rename(
266 Path::new("/the-root/test2.rs"),
267 Path::new("/the-root/test3.rs"),
268 Default::default(),
269 )
270 .await
271 .unwrap();
272 assert_eq!(
273 fake_rust_server
274 .receive_notification::<lsp::notification::DidCloseTextDocument>()
275 .await
276 .text_document,
277 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
278 );
279 assert_eq!(
280 fake_rust_server
281 .receive_notification::<lsp::notification::DidOpenTextDocument>()
282 .await
283 .text_document,
284 lsp::TextDocumentItem {
285 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
286 version: 0,
287 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
288 language_id: Default::default()
289 },
290 );
291
292 rust_buffer2.update(cx, |buffer, cx| {
293 buffer.update_diagnostics(
294 DiagnosticSet::from_sorted_entries(
295 vec![DiagnosticEntry {
296 diagnostic: Default::default(),
297 range: Anchor::MIN..Anchor::MAX,
298 }],
299 &buffer.snapshot(),
300 ),
301 cx,
302 );
303 assert_eq!(
304 buffer
305 .snapshot()
306 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
307 .count(),
308 1
309 );
310 });
311
312 // When the rename changes the extension of the file, the buffer gets closed on the old
313 // language server and gets opened on the new one.
314 fs.rename(
315 Path::new("/the-root/test3.rs"),
316 Path::new("/the-root/test3.json"),
317 Default::default(),
318 )
319 .await
320 .unwrap();
321 assert_eq!(
322 fake_rust_server
323 .receive_notification::<lsp::notification::DidCloseTextDocument>()
324 .await
325 .text_document,
326 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
327 );
328 assert_eq!(
329 fake_json_server
330 .receive_notification::<lsp::notification::DidOpenTextDocument>()
331 .await
332 .text_document,
333 lsp::TextDocumentItem {
334 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
335 version: 0,
336 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
337 language_id: Default::default()
338 },
339 );
340
341 // We clear the diagnostics, since the language has changed.
342 rust_buffer2.read_with(cx, |buffer, _| {
343 assert_eq!(
344 buffer
345 .snapshot()
346 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
347 .count(),
348 0
349 );
350 });
351
352 // The renamed file's version resets after changing language server.
353 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
354 assert_eq!(
355 fake_json_server
356 .receive_notification::<lsp::notification::DidChangeTextDocument>()
357 .await
358 .text_document,
359 lsp::VersionedTextDocumentIdentifier::new(
360 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
361 1
362 )
363 );
364
365 // Restart language servers
366 project.update(cx, |project, cx| {
367 project.restart_language_servers_for_buffers(
368 vec![rust_buffer.clone(), json_buffer.clone()],
369 cx,
370 );
371 });
372
373 let mut rust_shutdown_requests = fake_rust_server
374 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
375 let mut json_shutdown_requests = fake_json_server
376 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
377 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
378
379 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
380 let mut fake_json_server = fake_json_servers.next().await.unwrap();
381
382 // Ensure rust document is reopened in new rust language server
383 assert_eq!(
384 fake_rust_server
385 .receive_notification::<lsp::notification::DidOpenTextDocument>()
386 .await
387 .text_document,
388 lsp::TextDocumentItem {
389 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
390 version: 1,
391 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
392 language_id: Default::default()
393 }
394 );
395
396 // Ensure json documents are reopened in new json language server
397 assert_set_eq!(
398 [
399 fake_json_server
400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
401 .await
402 .text_document,
403 fake_json_server
404 .receive_notification::<lsp::notification::DidOpenTextDocument>()
405 .await
406 .text_document,
407 ],
408 [
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
411 version: 0,
412 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 lsp::TextDocumentItem {
416 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
417 version: 1,
418 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
419 language_id: Default::default()
420 }
421 ]
422 );
423
424 // Close notifications are reported only to servers matching the buffer's language.
425 cx.update(|_| drop(json_buffer));
426 let close_message = lsp::DidCloseTextDocumentParams {
427 text_document: lsp::TextDocumentIdentifier::new(
428 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
429 ),
430 };
431 assert_eq!(
432 fake_json_server
433 .receive_notification::<lsp::notification::DidCloseTextDocument>()
434 .await,
435 close_message,
436 );
437}
438
439#[gpui::test]
440async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
441 cx.foreground().forbid_parking();
442
443 let fs = FakeFs::new(cx.background());
444 fs.insert_tree(
445 "/dir",
446 json!({
447 "a.rs": "let a = 1;",
448 "b.rs": "let b = 2;"
449 }),
450 )
451 .await;
452
453 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
454
455 let buffer_a = project
456 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
457 .await
458 .unwrap();
459 let buffer_b = project
460 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
461 .await
462 .unwrap();
463
464 project.update(cx, |project, cx| {
465 project
466 .update_diagnostics(
467 0,
468 lsp::PublishDiagnosticsParams {
469 uri: Url::from_file_path("/dir/a.rs").unwrap(),
470 version: None,
471 diagnostics: vec![lsp::Diagnostic {
472 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
473 severity: Some(lsp::DiagnosticSeverity::ERROR),
474 message: "error 1".to_string(),
475 ..Default::default()
476 }],
477 },
478 &[],
479 cx,
480 )
481 .unwrap();
482 project
483 .update_diagnostics(
484 0,
485 lsp::PublishDiagnosticsParams {
486 uri: Url::from_file_path("/dir/b.rs").unwrap(),
487 version: None,
488 diagnostics: vec![lsp::Diagnostic {
489 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
490 severity: Some(lsp::DiagnosticSeverity::WARNING),
491 message: "error 2".to_string(),
492 ..Default::default()
493 }],
494 },
495 &[],
496 cx,
497 )
498 .unwrap();
499 });
500
501 buffer_a.read_with(cx, |buffer, _| {
502 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
503 assert_eq!(
504 chunks
505 .iter()
506 .map(|(s, d)| (s.as_str(), *d))
507 .collect::<Vec<_>>(),
508 &[
509 ("let ", None),
510 ("a", Some(DiagnosticSeverity::ERROR)),
511 (" = 1;", None),
512 ]
513 );
514 });
515 buffer_b.read_with(cx, |buffer, _| {
516 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
517 assert_eq!(
518 chunks
519 .iter()
520 .map(|(s, d)| (s.as_str(), *d))
521 .collect::<Vec<_>>(),
522 &[
523 ("let ", None),
524 ("b", Some(DiagnosticSeverity::WARNING)),
525 (" = 2;", None),
526 ]
527 );
528 });
529}
530
531#[gpui::test]
532async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
533 cx.foreground().forbid_parking();
534
535 let fs = FakeFs::new(cx.background());
536 fs.insert_tree(
537 "/root",
538 json!({
539 "dir": {
540 "a.rs": "let a = 1;",
541 },
542 "other.rs": "let b = c;"
543 }),
544 )
545 .await;
546
547 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
548
549 let (worktree, _) = project
550 .update(cx, |project, cx| {
551 project.find_or_create_local_worktree("/root/other.rs", false, cx)
552 })
553 .await
554 .unwrap();
555 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
556
557 project.update(cx, |project, cx| {
558 project
559 .update_diagnostics(
560 0,
561 lsp::PublishDiagnosticsParams {
562 uri: Url::from_file_path("/root/other.rs").unwrap(),
563 version: None,
564 diagnostics: vec![lsp::Diagnostic {
565 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
566 severity: Some(lsp::DiagnosticSeverity::ERROR),
567 message: "unknown variable 'c'".to_string(),
568 ..Default::default()
569 }],
570 },
571 &[],
572 cx,
573 )
574 .unwrap();
575 });
576
577 let buffer = project
578 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
579 .await
580 .unwrap();
581 buffer.read_with(cx, |buffer, _| {
582 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
583 assert_eq!(
584 chunks
585 .iter()
586 .map(|(s, d)| (s.as_str(), *d))
587 .collect::<Vec<_>>(),
588 &[
589 ("let b = ", None),
590 ("c", Some(DiagnosticSeverity::ERROR)),
591 (";", None),
592 ]
593 );
594 });
595
596 project.read_with(cx, |project, cx| {
597 assert_eq!(project.diagnostic_summaries(cx).next(), None);
598 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
599 });
600}
601
602#[gpui::test]
603async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
604 cx.foreground().forbid_parking();
605
606 let progress_token = "the-progress-token";
607 let mut language = Language::new(
608 LanguageConfig {
609 name: "Rust".into(),
610 path_suffixes: vec!["rs".to_string()],
611 ..Default::default()
612 },
613 Some(tree_sitter_rust::language()),
614 );
615 let mut fake_servers = language
616 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
617 disk_based_diagnostics_progress_token: Some(progress_token.into()),
618 disk_based_diagnostics_sources: vec!["disk".into()],
619 ..Default::default()
620 }))
621 .await;
622
623 let fs = FakeFs::new(cx.background());
624 fs.insert_tree(
625 "/dir",
626 json!({
627 "a.rs": "fn a() { A }",
628 "b.rs": "const y: i32 = 1",
629 }),
630 )
631 .await;
632
633 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
634 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
635 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
636
637 // Cause worktree to start the fake language server
638 let _buffer = project
639 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
640 .await
641 .unwrap();
642
643 let mut events = subscribe(&project, cx);
644
645 let fake_server = fake_servers.next().await.unwrap();
646 fake_server.start_progress(progress_token).await;
647 assert_eq!(
648 events.next().await.unwrap(),
649 Event::DiskBasedDiagnosticsStarted {
650 language_server_id: 0,
651 }
652 );
653
654 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
655 uri: Url::from_file_path("/dir/a.rs").unwrap(),
656 version: None,
657 diagnostics: vec![lsp::Diagnostic {
658 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
659 severity: Some(lsp::DiagnosticSeverity::ERROR),
660 message: "undefined variable 'A'".to_string(),
661 ..Default::default()
662 }],
663 });
664 assert_eq!(
665 events.next().await.unwrap(),
666 Event::DiagnosticsUpdated {
667 language_server_id: 0,
668 path: (worktree_id, Path::new("a.rs")).into()
669 }
670 );
671
672 fake_server.end_progress(progress_token);
673 assert_eq!(
674 events.next().await.unwrap(),
675 Event::DiskBasedDiagnosticsFinished {
676 language_server_id: 0
677 }
678 );
679
680 let buffer = project
681 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
682 .await
683 .unwrap();
684
685 buffer.read_with(cx, |buffer, _| {
686 let snapshot = buffer.snapshot();
687 let diagnostics = snapshot
688 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
689 .collect::<Vec<_>>();
690 assert_eq!(
691 diagnostics,
692 &[DiagnosticEntry {
693 range: Point::new(0, 9)..Point::new(0, 10),
694 diagnostic: Diagnostic {
695 severity: lsp::DiagnosticSeverity::ERROR,
696 message: "undefined variable 'A'".to_string(),
697 group_id: 0,
698 is_primary: true,
699 ..Default::default()
700 }
701 }]
702 )
703 });
704
705 // Ensure publishing empty diagnostics twice only results in one update event.
706 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
707 uri: Url::from_file_path("/dir/a.rs").unwrap(),
708 version: None,
709 diagnostics: Default::default(),
710 });
711 assert_eq!(
712 events.next().await.unwrap(),
713 Event::DiagnosticsUpdated {
714 language_server_id: 0,
715 path: (worktree_id, Path::new("a.rs")).into()
716 }
717 );
718
719 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
720 uri: Url::from_file_path("/dir/a.rs").unwrap(),
721 version: None,
722 diagnostics: Default::default(),
723 });
724 cx.foreground().run_until_parked();
725 assert_eq!(futures::poll!(events.next()), Poll::Pending);
726}
727
728#[gpui::test]
729async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
730 cx.foreground().forbid_parking();
731
732 let progress_token = "the-progress-token";
733 let mut language = Language::new(
734 LanguageConfig {
735 path_suffixes: vec!["rs".to_string()],
736 ..Default::default()
737 },
738 None,
739 );
740 let mut fake_servers = language
741 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
742 disk_based_diagnostics_sources: vec!["disk".into()],
743 disk_based_diagnostics_progress_token: Some(progress_token.into()),
744 ..Default::default()
745 }))
746 .await;
747
748 let fs = FakeFs::new(cx.background());
749 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
750
751 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
752 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
753
754 let buffer = project
755 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
756 .await
757 .unwrap();
758
759 // Simulate diagnostics starting to update.
760 let fake_server = fake_servers.next().await.unwrap();
761 fake_server.start_progress(progress_token).await;
762
763 // Restart the server before the diagnostics finish updating.
764 project.update(cx, |project, cx| {
765 project.restart_language_servers_for_buffers([buffer], cx);
766 });
767 let mut events = subscribe(&project, cx);
768
769 // Simulate the newly started server sending more diagnostics.
770 let fake_server = fake_servers.next().await.unwrap();
771 fake_server.start_progress(progress_token).await;
772 assert_eq!(
773 events.next().await.unwrap(),
774 Event::DiskBasedDiagnosticsStarted {
775 language_server_id: 1
776 }
777 );
778 project.read_with(cx, |project, _| {
779 assert_eq!(
780 project
781 .language_servers_running_disk_based_diagnostics()
782 .collect::<Vec<_>>(),
783 [1]
784 );
785 });
786
787 // All diagnostics are considered done, despite the old server's diagnostic
788 // task never completing.
789 fake_server.end_progress(progress_token);
790 assert_eq!(
791 events.next().await.unwrap(),
792 Event::DiskBasedDiagnosticsFinished {
793 language_server_id: 1
794 }
795 );
796 project.read_with(cx, |project, _| {
797 assert_eq!(
798 project
799 .language_servers_running_disk_based_diagnostics()
800 .collect::<Vec<_>>(),
801 [0; 0]
802 );
803 });
804}
805
806#[gpui::test]
807async fn test_toggling_enable_language_server(
808 deterministic: Arc<Deterministic>,
809 cx: &mut gpui::TestAppContext,
810) {
811 deterministic.forbid_parking();
812
813 let mut rust = Language::new(
814 LanguageConfig {
815 name: Arc::from("Rust"),
816 path_suffixes: vec!["rs".to_string()],
817 ..Default::default()
818 },
819 None,
820 );
821 let mut fake_rust_servers = rust
822 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
823 name: "rust-lsp",
824 ..Default::default()
825 }))
826 .await;
827 let mut js = Language::new(
828 LanguageConfig {
829 name: Arc::from("JavaScript"),
830 path_suffixes: vec!["js".to_string()],
831 ..Default::default()
832 },
833 None,
834 );
835 let mut fake_js_servers = js
836 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
837 name: "js-lsp",
838 ..Default::default()
839 }))
840 .await;
841
842 let fs = FakeFs::new(cx.background());
843 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
844 .await;
845
846 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
847 project.update(cx, |project, _| {
848 project.languages.add(Arc::new(rust));
849 project.languages.add(Arc::new(js));
850 });
851
852 let _rs_buffer = project
853 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
854 .await
855 .unwrap();
856 let _js_buffer = project
857 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
858 .await
859 .unwrap();
860
861 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
862 assert_eq!(
863 fake_rust_server_1
864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
865 .await
866 .text_document
867 .uri
868 .as_str(),
869 "file:///dir/a.rs"
870 );
871
872 let mut fake_js_server = fake_js_servers.next().await.unwrap();
873 assert_eq!(
874 fake_js_server
875 .receive_notification::<lsp::notification::DidOpenTextDocument>()
876 .await
877 .text_document
878 .uri
879 .as_str(),
880 "file:///dir/b.js"
881 );
882
883 // Disable Rust language server, ensuring only that server gets stopped.
884 cx.update(|cx| {
885 cx.update_global(|settings: &mut Settings, _| {
886 settings.language_overrides.insert(
887 Arc::from("Rust"),
888 settings::EditorSettings {
889 enable_language_server: Some(false),
890 ..Default::default()
891 },
892 );
893 })
894 });
895 fake_rust_server_1
896 .receive_notification::<lsp::notification::Exit>()
897 .await;
898
899 // Enable Rust and disable JavaScript language servers, ensuring that the
900 // former gets started again and that the latter stops.
901 cx.update(|cx| {
902 cx.update_global(|settings: &mut Settings, _| {
903 settings.language_overrides.insert(
904 Arc::from("Rust"),
905 settings::EditorSettings {
906 enable_language_server: Some(true),
907 ..Default::default()
908 },
909 );
910 settings.language_overrides.insert(
911 Arc::from("JavaScript"),
912 settings::EditorSettings {
913 enable_language_server: Some(false),
914 ..Default::default()
915 },
916 );
917 })
918 });
919 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
920 assert_eq!(
921 fake_rust_server_2
922 .receive_notification::<lsp::notification::DidOpenTextDocument>()
923 .await
924 .text_document
925 .uri
926 .as_str(),
927 "file:///dir/a.rs"
928 );
929 fake_js_server
930 .receive_notification::<lsp::notification::Exit>()
931 .await;
932}
933
934#[gpui::test]
935async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
936 cx.foreground().forbid_parking();
937
938 let mut language = Language::new(
939 LanguageConfig {
940 name: "Rust".into(),
941 path_suffixes: vec!["rs".to_string()],
942 ..Default::default()
943 },
944 Some(tree_sitter_rust::language()),
945 );
946 let mut fake_servers = language
947 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
948 disk_based_diagnostics_sources: vec!["disk".into()],
949 ..Default::default()
950 }))
951 .await;
952
953 let text = "
954 fn a() { A }
955 fn b() { BB }
956 fn c() { CCC }
957 "
958 .unindent();
959
960 let fs = FakeFs::new(cx.background());
961 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
962
963 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
964 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
965
966 let buffer = project
967 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
968 .await
969 .unwrap();
970
971 let mut fake_server = fake_servers.next().await.unwrap();
972 let open_notification = fake_server
973 .receive_notification::<lsp::notification::DidOpenTextDocument>()
974 .await;
975
976 // Edit the buffer, moving the content down
977 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
978 let change_notification_1 = fake_server
979 .receive_notification::<lsp::notification::DidChangeTextDocument>()
980 .await;
981 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
982
983 // Report some diagnostics for the initial version of the buffer
984 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
985 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
986 version: Some(open_notification.text_document.version),
987 diagnostics: vec![
988 lsp::Diagnostic {
989 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
990 severity: Some(DiagnosticSeverity::ERROR),
991 message: "undefined variable 'A'".to_string(),
992 source: Some("disk".to_string()),
993 ..Default::default()
994 },
995 lsp::Diagnostic {
996 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
997 severity: Some(DiagnosticSeverity::ERROR),
998 message: "undefined variable 'BB'".to_string(),
999 source: Some("disk".to_string()),
1000 ..Default::default()
1001 },
1002 lsp::Diagnostic {
1003 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1004 severity: Some(DiagnosticSeverity::ERROR),
1005 source: Some("disk".to_string()),
1006 message: "undefined variable 'CCC'".to_string(),
1007 ..Default::default()
1008 },
1009 ],
1010 });
1011
1012 // The diagnostics have moved down since they were created.
1013 buffer.next_notification(cx).await;
1014 buffer.read_with(cx, |buffer, _| {
1015 assert_eq!(
1016 buffer
1017 .snapshot()
1018 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1019 .collect::<Vec<_>>(),
1020 &[
1021 DiagnosticEntry {
1022 range: Point::new(3, 9)..Point::new(3, 11),
1023 diagnostic: Diagnostic {
1024 severity: DiagnosticSeverity::ERROR,
1025 message: "undefined variable 'BB'".to_string(),
1026 is_disk_based: true,
1027 group_id: 1,
1028 is_primary: true,
1029 ..Default::default()
1030 },
1031 },
1032 DiagnosticEntry {
1033 range: Point::new(4, 9)..Point::new(4, 12),
1034 diagnostic: Diagnostic {
1035 severity: DiagnosticSeverity::ERROR,
1036 message: "undefined variable 'CCC'".to_string(),
1037 is_disk_based: true,
1038 group_id: 2,
1039 is_primary: true,
1040 ..Default::default()
1041 }
1042 }
1043 ]
1044 );
1045 assert_eq!(
1046 chunks_with_diagnostics(buffer, 0..buffer.len()),
1047 [
1048 ("\n\nfn a() { ".to_string(), None),
1049 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1050 (" }\nfn b() { ".to_string(), None),
1051 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1052 (" }\nfn c() { ".to_string(), None),
1053 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1054 (" }\n".to_string(), None),
1055 ]
1056 );
1057 assert_eq!(
1058 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1059 [
1060 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1061 (" }\nfn c() { ".to_string(), None),
1062 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1063 ]
1064 );
1065 });
1066
1067 // Ensure overlapping diagnostics are highlighted correctly.
1068 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1069 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1070 version: Some(open_notification.text_document.version),
1071 diagnostics: vec![
1072 lsp::Diagnostic {
1073 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1074 severity: Some(DiagnosticSeverity::ERROR),
1075 message: "undefined variable 'A'".to_string(),
1076 source: Some("disk".to_string()),
1077 ..Default::default()
1078 },
1079 lsp::Diagnostic {
1080 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1081 severity: Some(DiagnosticSeverity::WARNING),
1082 message: "unreachable statement".to_string(),
1083 source: Some("disk".to_string()),
1084 ..Default::default()
1085 },
1086 ],
1087 });
1088
1089 buffer.next_notification(cx).await;
1090 buffer.read_with(cx, |buffer, _| {
1091 assert_eq!(
1092 buffer
1093 .snapshot()
1094 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1095 .collect::<Vec<_>>(),
1096 &[
1097 DiagnosticEntry {
1098 range: Point::new(2, 9)..Point::new(2, 12),
1099 diagnostic: Diagnostic {
1100 severity: DiagnosticSeverity::WARNING,
1101 message: "unreachable statement".to_string(),
1102 is_disk_based: true,
1103 group_id: 4,
1104 is_primary: true,
1105 ..Default::default()
1106 }
1107 },
1108 DiagnosticEntry {
1109 range: Point::new(2, 9)..Point::new(2, 10),
1110 diagnostic: Diagnostic {
1111 severity: DiagnosticSeverity::ERROR,
1112 message: "undefined variable 'A'".to_string(),
1113 is_disk_based: true,
1114 group_id: 3,
1115 is_primary: true,
1116 ..Default::default()
1117 },
1118 }
1119 ]
1120 );
1121 assert_eq!(
1122 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1123 [
1124 ("fn a() { ".to_string(), None),
1125 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1126 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1127 ("\n".to_string(), None),
1128 ]
1129 );
1130 assert_eq!(
1131 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1132 [
1133 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1134 ("\n".to_string(), None),
1135 ]
1136 );
1137 });
1138
1139 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1140 // changes since the last save.
1141 buffer.update(cx, |buffer, cx| {
1142 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1143 buffer.edit(
1144 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1145 None,
1146 cx,
1147 );
1148 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1149 });
1150 let change_notification_2 = fake_server
1151 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1152 .await;
1153 assert!(
1154 change_notification_2.text_document.version > change_notification_1.text_document.version
1155 );
1156
1157 // Handle out-of-order diagnostics
1158 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1159 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1160 version: Some(change_notification_2.text_document.version),
1161 diagnostics: vec![
1162 lsp::Diagnostic {
1163 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1164 severity: Some(DiagnosticSeverity::ERROR),
1165 message: "undefined variable 'BB'".to_string(),
1166 source: Some("disk".to_string()),
1167 ..Default::default()
1168 },
1169 lsp::Diagnostic {
1170 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1171 severity: Some(DiagnosticSeverity::WARNING),
1172 message: "undefined variable 'A'".to_string(),
1173 source: Some("disk".to_string()),
1174 ..Default::default()
1175 },
1176 ],
1177 });
1178
1179 buffer.next_notification(cx).await;
1180 buffer.read_with(cx, |buffer, _| {
1181 assert_eq!(
1182 buffer
1183 .snapshot()
1184 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1185 .collect::<Vec<_>>(),
1186 &[
1187 DiagnosticEntry {
1188 range: Point::new(2, 21)..Point::new(2, 22),
1189 diagnostic: Diagnostic {
1190 severity: DiagnosticSeverity::WARNING,
1191 message: "undefined variable 'A'".to_string(),
1192 is_disk_based: true,
1193 group_id: 6,
1194 is_primary: true,
1195 ..Default::default()
1196 }
1197 },
1198 DiagnosticEntry {
1199 range: Point::new(3, 9)..Point::new(3, 14),
1200 diagnostic: Diagnostic {
1201 severity: DiagnosticSeverity::ERROR,
1202 message: "undefined variable 'BB'".to_string(),
1203 is_disk_based: true,
1204 group_id: 5,
1205 is_primary: true,
1206 ..Default::default()
1207 },
1208 }
1209 ]
1210 );
1211 });
1212}
1213
1214#[gpui::test]
1215async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1216 cx.foreground().forbid_parking();
1217
1218 let text = concat!(
1219 "let one = ;\n", //
1220 "let two = \n",
1221 "let three = 3;\n",
1222 );
1223
1224 let fs = FakeFs::new(cx.background());
1225 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1226
1227 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1228 let buffer = project
1229 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1230 .await
1231 .unwrap();
1232
1233 project.update(cx, |project, cx| {
1234 project
1235 .update_buffer_diagnostics(
1236 &buffer,
1237 vec![
1238 DiagnosticEntry {
1239 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1240 diagnostic: Diagnostic {
1241 severity: DiagnosticSeverity::ERROR,
1242 message: "syntax error 1".to_string(),
1243 ..Default::default()
1244 },
1245 },
1246 DiagnosticEntry {
1247 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1248 diagnostic: Diagnostic {
1249 severity: DiagnosticSeverity::ERROR,
1250 message: "syntax error 2".to_string(),
1251 ..Default::default()
1252 },
1253 },
1254 ],
1255 None,
1256 cx,
1257 )
1258 .unwrap();
1259 });
1260
1261 // An empty range is extended forward to include the following character.
1262 // At the end of a line, an empty range is extended backward to include
1263 // the preceding character.
1264 buffer.read_with(cx, |buffer, _| {
1265 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1266 assert_eq!(
1267 chunks
1268 .iter()
1269 .map(|(s, d)| (s.as_str(), *d))
1270 .collect::<Vec<_>>(),
1271 &[
1272 ("let one = ", None),
1273 (";", Some(DiagnosticSeverity::ERROR)),
1274 ("\nlet two =", None),
1275 (" ", Some(DiagnosticSeverity::ERROR)),
1276 ("\nlet three = 3;\n", None)
1277 ]
1278 );
1279 });
1280}
1281
1282#[gpui::test]
1283async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1284 cx.foreground().forbid_parking();
1285
1286 let mut language = Language::new(
1287 LanguageConfig {
1288 name: "Rust".into(),
1289 path_suffixes: vec!["rs".to_string()],
1290 ..Default::default()
1291 },
1292 Some(tree_sitter_rust::language()),
1293 );
1294 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1295
1296 let text = "
1297 fn a() {
1298 f1();
1299 }
1300 fn b() {
1301 f2();
1302 }
1303 fn c() {
1304 f3();
1305 }
1306 "
1307 .unindent();
1308
1309 let fs = FakeFs::new(cx.background());
1310 fs.insert_tree(
1311 "/dir",
1312 json!({
1313 "a.rs": text.clone(),
1314 }),
1315 )
1316 .await;
1317
1318 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1319 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1320 let buffer = project
1321 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1322 .await
1323 .unwrap();
1324
1325 let mut fake_server = fake_servers.next().await.unwrap();
1326 let lsp_document_version = fake_server
1327 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1328 .await
1329 .text_document
1330 .version;
1331
1332 // Simulate editing the buffer after the language server computes some edits.
1333 buffer.update(cx, |buffer, cx| {
1334 buffer.edit(
1335 [(
1336 Point::new(0, 0)..Point::new(0, 0),
1337 "// above first function\n",
1338 )],
1339 None,
1340 cx,
1341 );
1342 buffer.edit(
1343 [(
1344 Point::new(2, 0)..Point::new(2, 0),
1345 " // inside first function\n",
1346 )],
1347 None,
1348 cx,
1349 );
1350 buffer.edit(
1351 [(
1352 Point::new(6, 4)..Point::new(6, 4),
1353 "// inside second function ",
1354 )],
1355 None,
1356 cx,
1357 );
1358
1359 assert_eq!(
1360 buffer.text(),
1361 "
1362 // above first function
1363 fn a() {
1364 // inside first function
1365 f1();
1366 }
1367 fn b() {
1368 // inside second function f2();
1369 }
1370 fn c() {
1371 f3();
1372 }
1373 "
1374 .unindent()
1375 );
1376 });
1377
1378 let edits = project
1379 .update(cx, |project, cx| {
1380 project.edits_from_lsp(
1381 &buffer,
1382 vec![
1383 // replace body of first function
1384 lsp::TextEdit {
1385 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1386 new_text: "
1387 fn a() {
1388 f10();
1389 }
1390 "
1391 .unindent(),
1392 },
1393 // edit inside second function
1394 lsp::TextEdit {
1395 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1396 new_text: "00".into(),
1397 },
1398 // edit inside third function via two distinct edits
1399 lsp::TextEdit {
1400 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1401 new_text: "4000".into(),
1402 },
1403 lsp::TextEdit {
1404 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1405 new_text: "".into(),
1406 },
1407 ],
1408 Some(lsp_document_version),
1409 cx,
1410 )
1411 })
1412 .await
1413 .unwrap();
1414
1415 buffer.update(cx, |buffer, cx| {
1416 for (range, new_text) in edits {
1417 buffer.edit([(range, new_text)], None, cx);
1418 }
1419 assert_eq!(
1420 buffer.text(),
1421 "
1422 // above first function
1423 fn a() {
1424 // inside first function
1425 f10();
1426 }
1427 fn b() {
1428 // inside second function f200();
1429 }
1430 fn c() {
1431 f4000();
1432 }
1433 "
1434 .unindent()
1435 );
1436 });
1437}
1438
1439#[gpui::test]
1440async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1441 cx.foreground().forbid_parking();
1442
1443 let text = "
1444 use a::b;
1445 use a::c;
1446
1447 fn f() {
1448 b();
1449 c();
1450 }
1451 "
1452 .unindent();
1453
1454 let fs = FakeFs::new(cx.background());
1455 fs.insert_tree(
1456 "/dir",
1457 json!({
1458 "a.rs": text.clone(),
1459 }),
1460 )
1461 .await;
1462
1463 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1464 let buffer = project
1465 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1466 .await
1467 .unwrap();
1468
1469 // Simulate the language server sending us a small edit in the form of a very large diff.
1470 // Rust-analyzer does this when performing a merge-imports code action.
1471 let edits = project
1472 .update(cx, |project, cx| {
1473 project.edits_from_lsp(
1474 &buffer,
1475 [
1476 // Replace the first use statement without editing the semicolon.
1477 lsp::TextEdit {
1478 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1479 new_text: "a::{b, c}".into(),
1480 },
1481 // Reinsert the remainder of the file between the semicolon and the final
1482 // newline of the file.
1483 lsp::TextEdit {
1484 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1485 new_text: "\n\n".into(),
1486 },
1487 lsp::TextEdit {
1488 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1489 new_text: "
1490 fn f() {
1491 b();
1492 c();
1493 }"
1494 .unindent(),
1495 },
1496 // Delete everything after the first newline of the file.
1497 lsp::TextEdit {
1498 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1499 new_text: "".into(),
1500 },
1501 ],
1502 None,
1503 cx,
1504 )
1505 })
1506 .await
1507 .unwrap();
1508
1509 buffer.update(cx, |buffer, cx| {
1510 let edits = edits
1511 .into_iter()
1512 .map(|(range, text)| {
1513 (
1514 range.start.to_point(buffer)..range.end.to_point(buffer),
1515 text,
1516 )
1517 })
1518 .collect::<Vec<_>>();
1519
1520 assert_eq!(
1521 edits,
1522 [
1523 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1524 (Point::new(1, 0)..Point::new(2, 0), "".into())
1525 ]
1526 );
1527
1528 for (range, new_text) in edits {
1529 buffer.edit([(range, new_text)], None, cx);
1530 }
1531 assert_eq!(
1532 buffer.text(),
1533 "
1534 use a::{b, c};
1535
1536 fn f() {
1537 b();
1538 c();
1539 }
1540 "
1541 .unindent()
1542 );
1543 });
1544}
1545
1546#[gpui::test]
1547async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1548 cx.foreground().forbid_parking();
1549
1550 let text = "
1551 use a::b;
1552 use a::c;
1553
1554 fn f() {
1555 b();
1556 c();
1557 }
1558 "
1559 .unindent();
1560
1561 let fs = FakeFs::new(cx.background());
1562 fs.insert_tree(
1563 "/dir",
1564 json!({
1565 "a.rs": text.clone(),
1566 }),
1567 )
1568 .await;
1569
1570 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1571 let buffer = project
1572 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1573 .await
1574 .unwrap();
1575
1576 // Simulate the language server sending us edits in a non-ordered fashion,
1577 // with ranges sometimes being inverted or pointing to invalid locations.
1578 let edits = project
1579 .update(cx, |project, cx| {
1580 project.edits_from_lsp(
1581 &buffer,
1582 [
1583 lsp::TextEdit {
1584 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1585 new_text: "\n\n".into(),
1586 },
1587 lsp::TextEdit {
1588 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1589 new_text: "a::{b, c}".into(),
1590 },
1591 lsp::TextEdit {
1592 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1593 new_text: "".into(),
1594 },
1595 lsp::TextEdit {
1596 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1597 new_text: "
1598 fn f() {
1599 b();
1600 c();
1601 }"
1602 .unindent(),
1603 },
1604 ],
1605 None,
1606 cx,
1607 )
1608 })
1609 .await
1610 .unwrap();
1611
1612 buffer.update(cx, |buffer, cx| {
1613 let edits = edits
1614 .into_iter()
1615 .map(|(range, text)| {
1616 (
1617 range.start.to_point(buffer)..range.end.to_point(buffer),
1618 text,
1619 )
1620 })
1621 .collect::<Vec<_>>();
1622
1623 assert_eq!(
1624 edits,
1625 [
1626 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1627 (Point::new(1, 0)..Point::new(2, 0), "".into())
1628 ]
1629 );
1630
1631 for (range, new_text) in edits {
1632 buffer.edit([(range, new_text)], None, cx);
1633 }
1634 assert_eq!(
1635 buffer.text(),
1636 "
1637 use a::{b, c};
1638
1639 fn f() {
1640 b();
1641 c();
1642 }
1643 "
1644 .unindent()
1645 );
1646 });
1647}
1648
1649fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1650 buffer: &Buffer,
1651 range: Range<T>,
1652) -> Vec<(String, Option<DiagnosticSeverity>)> {
1653 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1654 for chunk in buffer.snapshot().chunks(range, true) {
1655 if chunks.last().map_or(false, |prev_chunk| {
1656 prev_chunk.1 == chunk.diagnostic_severity
1657 }) {
1658 chunks.last_mut().unwrap().0.push_str(chunk.text);
1659 } else {
1660 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1661 }
1662 }
1663 chunks
1664}
1665
1666#[gpui::test(iterations = 10)]
1667async fn test_definition(cx: &mut gpui::TestAppContext) {
1668 let mut language = Language::new(
1669 LanguageConfig {
1670 name: "Rust".into(),
1671 path_suffixes: vec!["rs".to_string()],
1672 ..Default::default()
1673 },
1674 Some(tree_sitter_rust::language()),
1675 );
1676 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1677
1678 let fs = FakeFs::new(cx.background());
1679 fs.insert_tree(
1680 "/dir",
1681 json!({
1682 "a.rs": "const fn a() { A }",
1683 "b.rs": "const y: i32 = crate::a()",
1684 }),
1685 )
1686 .await;
1687
1688 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1689 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1690
1691 let buffer = project
1692 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1693 .await
1694 .unwrap();
1695
1696 let fake_server = fake_servers.next().await.unwrap();
1697 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1698 let params = params.text_document_position_params;
1699 assert_eq!(
1700 params.text_document.uri.to_file_path().unwrap(),
1701 Path::new("/dir/b.rs"),
1702 );
1703 assert_eq!(params.position, lsp::Position::new(0, 22));
1704
1705 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1706 lsp::Location::new(
1707 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1708 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1709 ),
1710 )))
1711 });
1712
1713 let mut definitions = project
1714 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1715 .await
1716 .unwrap();
1717
1718 // Assert no new language server started
1719 cx.foreground().run_until_parked();
1720 assert!(fake_servers.try_next().is_err());
1721
1722 assert_eq!(definitions.len(), 1);
1723 let definition = definitions.pop().unwrap();
1724 cx.update(|cx| {
1725 let target_buffer = definition.target.buffer.read(cx);
1726 assert_eq!(
1727 target_buffer
1728 .file()
1729 .unwrap()
1730 .as_local()
1731 .unwrap()
1732 .abs_path(cx),
1733 Path::new("/dir/a.rs"),
1734 );
1735 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1736 assert_eq!(
1737 list_worktrees(&project, cx),
1738 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1739 );
1740
1741 drop(definition);
1742 });
1743 cx.read(|cx| {
1744 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1745 });
1746
1747 fn list_worktrees<'a>(
1748 project: &'a ModelHandle<Project>,
1749 cx: &'a AppContext,
1750 ) -> Vec<(&'a Path, bool)> {
1751 project
1752 .read(cx)
1753 .worktrees(cx)
1754 .map(|worktree| {
1755 let worktree = worktree.read(cx);
1756 (
1757 worktree.as_local().unwrap().abs_path().as_ref(),
1758 worktree.is_visible(),
1759 )
1760 })
1761 .collect::<Vec<_>>()
1762 }
1763}
1764
1765#[gpui::test]
1766async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1767 let mut language = Language::new(
1768 LanguageConfig {
1769 name: "TypeScript".into(),
1770 path_suffixes: vec!["ts".to_string()],
1771 ..Default::default()
1772 },
1773 Some(tree_sitter_typescript::language_typescript()),
1774 );
1775 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1776
1777 let fs = FakeFs::new(cx.background());
1778 fs.insert_tree(
1779 "/dir",
1780 json!({
1781 "a.ts": "",
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1787 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1788 let buffer = project
1789 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1790 .await
1791 .unwrap();
1792
1793 let fake_server = fake_language_servers.next().await.unwrap();
1794
1795 let text = "let a = b.fqn";
1796 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1797 let completions = project.update(cx, |project, cx| {
1798 project.completions(&buffer, text.len(), cx)
1799 });
1800
1801 fake_server
1802 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1803 Ok(Some(lsp::CompletionResponse::Array(vec![
1804 lsp::CompletionItem {
1805 label: "fullyQualifiedName?".into(),
1806 insert_text: Some("fullyQualifiedName".into()),
1807 ..Default::default()
1808 },
1809 ])))
1810 })
1811 .next()
1812 .await;
1813 let completions = completions.await.unwrap();
1814 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1815 assert_eq!(completions.len(), 1);
1816 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1817 assert_eq!(
1818 completions[0].old_range.to_offset(&snapshot),
1819 text.len() - 3..text.len()
1820 );
1821
1822 let text = "let a = \"atoms/cmp\"";
1823 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1824 let completions = project.update(cx, |project, cx| {
1825 project.completions(&buffer, text.len() - 1, cx)
1826 });
1827
1828 fake_server
1829 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1830 Ok(Some(lsp::CompletionResponse::Array(vec![
1831 lsp::CompletionItem {
1832 label: "component".into(),
1833 ..Default::default()
1834 },
1835 ])))
1836 })
1837 .next()
1838 .await;
1839 let completions = completions.await.unwrap();
1840 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1841 assert_eq!(completions.len(), 1);
1842 assert_eq!(completions[0].new_text, "component");
1843 assert_eq!(
1844 completions[0].old_range.to_offset(&snapshot),
1845 text.len() - 4..text.len() - 1
1846 );
1847}
1848
1849#[gpui::test]
1850async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1851 let mut language = Language::new(
1852 LanguageConfig {
1853 name: "TypeScript".into(),
1854 path_suffixes: vec!["ts".to_string()],
1855 ..Default::default()
1856 },
1857 Some(tree_sitter_typescript::language_typescript()),
1858 );
1859 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1860
1861 let fs = FakeFs::new(cx.background());
1862 fs.insert_tree(
1863 "/dir",
1864 json!({
1865 "a.ts": "",
1866 }),
1867 )
1868 .await;
1869
1870 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1871 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1872 let buffer = project
1873 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1874 .await
1875 .unwrap();
1876
1877 let fake_server = fake_language_servers.next().await.unwrap();
1878
1879 let text = "let a = b.fqn";
1880 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1881 let completions = project.update(cx, |project, cx| {
1882 project.completions(&buffer, text.len(), cx)
1883 });
1884
1885 fake_server
1886 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1887 Ok(Some(lsp::CompletionResponse::Array(vec![
1888 lsp::CompletionItem {
1889 label: "fullyQualifiedName?".into(),
1890 insert_text: Some("fully\rQualified\r\nName".into()),
1891 ..Default::default()
1892 },
1893 ])))
1894 })
1895 .next()
1896 .await;
1897 let completions = completions.await.unwrap();
1898 assert_eq!(completions.len(), 1);
1899 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1900}
1901
1902#[gpui::test(iterations = 10)]
1903async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1904 let mut language = Language::new(
1905 LanguageConfig {
1906 name: "TypeScript".into(),
1907 path_suffixes: vec!["ts".to_string()],
1908 ..Default::default()
1909 },
1910 None,
1911 );
1912 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1913
1914 let fs = FakeFs::new(cx.background());
1915 fs.insert_tree(
1916 "/dir",
1917 json!({
1918 "a.ts": "a",
1919 }),
1920 )
1921 .await;
1922
1923 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1924 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1925 let buffer = project
1926 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1927 .await
1928 .unwrap();
1929
1930 let fake_server = fake_language_servers.next().await.unwrap();
1931
1932 // Language server returns code actions that contain commands, and not edits.
1933 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1934 fake_server
1935 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1936 Ok(Some(vec![
1937 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1938 title: "The code action".into(),
1939 command: Some(lsp::Command {
1940 title: "The command".into(),
1941 command: "_the/command".into(),
1942 arguments: Some(vec![json!("the-argument")]),
1943 }),
1944 ..Default::default()
1945 }),
1946 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1947 title: "two".into(),
1948 ..Default::default()
1949 }),
1950 ]))
1951 })
1952 .next()
1953 .await;
1954
1955 let action = actions.await.unwrap()[0].clone();
1956 let apply = project.update(cx, |project, cx| {
1957 project.apply_code_action(buffer.clone(), action, true, cx)
1958 });
1959
1960 // Resolving the code action does not populate its edits. In absence of
1961 // edits, we must execute the given command.
1962 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1963 |action, _| async move { Ok(action) },
1964 );
1965
1966 // While executing the command, the language server sends the editor
1967 // a `workspaceEdit` request.
1968 fake_server
1969 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1970 let fake = fake_server.clone();
1971 move |params, _| {
1972 assert_eq!(params.command, "_the/command");
1973 let fake = fake.clone();
1974 async move {
1975 fake.server
1976 .request::<lsp::request::ApplyWorkspaceEdit>(
1977 lsp::ApplyWorkspaceEditParams {
1978 label: None,
1979 edit: lsp::WorkspaceEdit {
1980 changes: Some(
1981 [(
1982 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1983 vec![lsp::TextEdit {
1984 range: lsp::Range::new(
1985 lsp::Position::new(0, 0),
1986 lsp::Position::new(0, 0),
1987 ),
1988 new_text: "X".into(),
1989 }],
1990 )]
1991 .into_iter()
1992 .collect(),
1993 ),
1994 ..Default::default()
1995 },
1996 },
1997 )
1998 .await
1999 .unwrap();
2000 Ok(Some(json!(null)))
2001 }
2002 }
2003 })
2004 .next()
2005 .await;
2006
2007 // Applying the code action returns a project transaction containing the edits
2008 // sent by the language server in its `workspaceEdit` request.
2009 let transaction = apply.await.unwrap();
2010 assert!(transaction.0.contains_key(&buffer));
2011 buffer.update(cx, |buffer, cx| {
2012 assert_eq!(buffer.text(), "Xa");
2013 buffer.undo(cx);
2014 assert_eq!(buffer.text(), "a");
2015 });
2016}
2017
2018#[gpui::test]
2019async fn test_save_file(cx: &mut gpui::TestAppContext) {
2020 let fs = FakeFs::new(cx.background());
2021 fs.insert_tree(
2022 "/dir",
2023 json!({
2024 "file1": "the old contents",
2025 }),
2026 )
2027 .await;
2028
2029 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2030 let buffer = project
2031 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2032 .await
2033 .unwrap();
2034 buffer
2035 .update(cx, |buffer, cx| {
2036 assert_eq!(buffer.text(), "the old contents");
2037 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2038 buffer.save(cx)
2039 })
2040 .await
2041 .unwrap();
2042
2043 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2044 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2045}
2046
2047#[gpui::test]
2048async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2049 let fs = FakeFs::new(cx.background());
2050 fs.insert_tree(
2051 "/dir",
2052 json!({
2053 "file1": "the old contents",
2054 }),
2055 )
2056 .await;
2057
2058 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2059 let buffer = project
2060 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2061 .await
2062 .unwrap();
2063 buffer
2064 .update(cx, |buffer, cx| {
2065 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2066 buffer.save(cx)
2067 })
2068 .await
2069 .unwrap();
2070
2071 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2072 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2073}
2074
2075#[gpui::test]
2076async fn test_save_as(cx: &mut gpui::TestAppContext) {
2077 let fs = FakeFs::new(cx.background());
2078 fs.insert_tree("/dir", json!({})).await;
2079
2080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2081 let buffer = project.update(cx, |project, cx| {
2082 project.create_buffer("", None, cx).unwrap()
2083 });
2084 buffer.update(cx, |buffer, cx| {
2085 buffer.edit([(0..0, "abc")], None, cx);
2086 assert!(buffer.is_dirty());
2087 assert!(!buffer.has_conflict());
2088 });
2089 project
2090 .update(cx, |project, cx| {
2091 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2092 })
2093 .await
2094 .unwrap();
2095 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2096 buffer.read_with(cx, |buffer, cx| {
2097 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2098 assert!(!buffer.is_dirty());
2099 assert!(!buffer.has_conflict());
2100 });
2101
2102 let opened_buffer = project
2103 .update(cx, |project, cx| {
2104 project.open_local_buffer("/dir/file1", cx)
2105 })
2106 .await
2107 .unwrap();
2108 assert_eq!(opened_buffer, buffer);
2109}
2110
2111#[gpui::test(retries = 5)]
2112async fn test_rescan_and_remote_updates(
2113 deterministic: Arc<Deterministic>,
2114 cx: &mut gpui::TestAppContext,
2115) {
2116 let dir = temp_tree(json!({
2117 "a": {
2118 "file1": "",
2119 "file2": "",
2120 "file3": "",
2121 },
2122 "b": {
2123 "c": {
2124 "file4": "",
2125 "file5": "",
2126 }
2127 }
2128 }));
2129
2130 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2131 let rpc = project.read_with(cx, |p, _| p.client.clone());
2132
2133 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2134 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2135 async move { buffer.await.unwrap() }
2136 };
2137 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2138 project.read_with(cx, |project, cx| {
2139 let tree = project.worktrees(cx).next().unwrap();
2140 tree.read(cx)
2141 .entry_for_path(path)
2142 .unwrap_or_else(|| panic!("no entry for path {}", path))
2143 .id
2144 })
2145 };
2146
2147 let buffer2 = buffer_for_path("a/file2", cx).await;
2148 let buffer3 = buffer_for_path("a/file3", cx).await;
2149 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2150 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2151
2152 let file2_id = id_for_path("a/file2", cx);
2153 let file3_id = id_for_path("a/file3", cx);
2154 let file4_id = id_for_path("b/c/file4", cx);
2155
2156 // Create a remote copy of this worktree.
2157 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2158 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2159 let remote = cx.update(|cx| {
2160 Worktree::remote(
2161 1,
2162 1,
2163 proto::WorktreeMetadata {
2164 id: initial_snapshot.id().to_proto(),
2165 root_name: initial_snapshot.root_name().into(),
2166 visible: true,
2167 },
2168 rpc.clone(),
2169 cx,
2170 )
2171 });
2172 remote.update(cx, |remote, _| {
2173 let update = initial_snapshot.build_initial_update(1);
2174 remote.as_remote_mut().unwrap().update_from_remote(update);
2175 });
2176 deterministic.run_until_parked();
2177
2178 cx.read(|cx| {
2179 assert!(!buffer2.read(cx).is_dirty());
2180 assert!(!buffer3.read(cx).is_dirty());
2181 assert!(!buffer4.read(cx).is_dirty());
2182 assert!(!buffer5.read(cx).is_dirty());
2183 });
2184
2185 // Rename and delete files and directories.
2186 tree.flush_fs_events(cx).await;
2187 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2188 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2189 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2190 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2191 tree.flush_fs_events(cx).await;
2192
2193 let expected_paths = vec![
2194 "a",
2195 "a/file1",
2196 "a/file2.new",
2197 "b",
2198 "d",
2199 "d/file3",
2200 "d/file4",
2201 ];
2202
2203 cx.read(|app| {
2204 assert_eq!(
2205 tree.read(app)
2206 .paths()
2207 .map(|p| p.to_str().unwrap())
2208 .collect::<Vec<_>>(),
2209 expected_paths
2210 );
2211
2212 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2213 assert_eq!(id_for_path("d/file3", cx), file3_id);
2214 assert_eq!(id_for_path("d/file4", cx), file4_id);
2215
2216 assert_eq!(
2217 buffer2.read(app).file().unwrap().path().as_ref(),
2218 Path::new("a/file2.new")
2219 );
2220 assert_eq!(
2221 buffer3.read(app).file().unwrap().path().as_ref(),
2222 Path::new("d/file3")
2223 );
2224 assert_eq!(
2225 buffer4.read(app).file().unwrap().path().as_ref(),
2226 Path::new("d/file4")
2227 );
2228 assert_eq!(
2229 buffer5.read(app).file().unwrap().path().as_ref(),
2230 Path::new("b/c/file5")
2231 );
2232
2233 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2234 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2235 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2236 assert!(buffer5.read(app).file().unwrap().is_deleted());
2237 });
2238
2239 // Update the remote worktree. Check that it becomes consistent with the
2240 // local worktree.
2241 remote.update(cx, |remote, cx| {
2242 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2243 &initial_snapshot,
2244 1,
2245 1,
2246 true,
2247 );
2248 remote.as_remote_mut().unwrap().update_from_remote(update);
2249 });
2250 deterministic.run_until_parked();
2251 remote.read_with(cx, |remote, _| {
2252 assert_eq!(
2253 remote
2254 .paths()
2255 .map(|p| p.to_str().unwrap())
2256 .collect::<Vec<_>>(),
2257 expected_paths
2258 );
2259 });
2260}
2261
2262#[gpui::test]
2263async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2264 let fs = FakeFs::new(cx.background());
2265 fs.insert_tree(
2266 "/dir",
2267 json!({
2268 "a.txt": "a-contents",
2269 "b.txt": "b-contents",
2270 }),
2271 )
2272 .await;
2273
2274 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2275
2276 // Spawn multiple tasks to open paths, repeating some paths.
2277 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2278 (
2279 p.open_local_buffer("/dir/a.txt", cx),
2280 p.open_local_buffer("/dir/b.txt", cx),
2281 p.open_local_buffer("/dir/a.txt", cx),
2282 )
2283 });
2284
2285 let buffer_a_1 = buffer_a_1.await.unwrap();
2286 let buffer_a_2 = buffer_a_2.await.unwrap();
2287 let buffer_b = buffer_b.await.unwrap();
2288 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2289 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2290
2291 // There is only one buffer per path.
2292 let buffer_a_id = buffer_a_1.id();
2293 assert_eq!(buffer_a_2.id(), buffer_a_id);
2294
2295 // Open the same path again while it is still open.
2296 drop(buffer_a_1);
2297 let buffer_a_3 = project
2298 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2299 .await
2300 .unwrap();
2301
2302 // There's still only one buffer per path.
2303 assert_eq!(buffer_a_3.id(), buffer_a_id);
2304}
2305
2306#[gpui::test]
2307async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2308 let fs = FakeFs::new(cx.background());
2309 fs.insert_tree(
2310 "/dir",
2311 json!({
2312 "file1": "abc",
2313 "file2": "def",
2314 "file3": "ghi",
2315 }),
2316 )
2317 .await;
2318
2319 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2320
2321 let buffer1 = project
2322 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2323 .await
2324 .unwrap();
2325 let events = Rc::new(RefCell::new(Vec::new()));
2326
2327 // initially, the buffer isn't dirty.
2328 buffer1.update(cx, |buffer, cx| {
2329 cx.subscribe(&buffer1, {
2330 let events = events.clone();
2331 move |_, _, event, _| match event {
2332 BufferEvent::Operation(_) => {}
2333 _ => events.borrow_mut().push(event.clone()),
2334 }
2335 })
2336 .detach();
2337
2338 assert!(!buffer.is_dirty());
2339 assert!(events.borrow().is_empty());
2340
2341 buffer.edit([(1..2, "")], None, cx);
2342 });
2343
2344 // after the first edit, the buffer is dirty, and emits a dirtied event.
2345 buffer1.update(cx, |buffer, cx| {
2346 assert!(buffer.text() == "ac");
2347 assert!(buffer.is_dirty());
2348 assert_eq!(
2349 *events.borrow(),
2350 &[language::Event::Edited, language::Event::DirtyChanged]
2351 );
2352 events.borrow_mut().clear();
2353 buffer.did_save(
2354 buffer.version(),
2355 buffer.as_rope().fingerprint(),
2356 buffer.file().unwrap().mtime(),
2357 None,
2358 cx,
2359 );
2360 });
2361
2362 // after saving, the buffer is not dirty, and emits a saved event.
2363 buffer1.update(cx, |buffer, cx| {
2364 assert!(!buffer.is_dirty());
2365 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2366 events.borrow_mut().clear();
2367
2368 buffer.edit([(1..1, "B")], None, cx);
2369 buffer.edit([(2..2, "D")], None, cx);
2370 });
2371
2372 // after editing again, the buffer is dirty, and emits another dirty event.
2373 buffer1.update(cx, |buffer, cx| {
2374 assert!(buffer.text() == "aBDc");
2375 assert!(buffer.is_dirty());
2376 assert_eq!(
2377 *events.borrow(),
2378 &[
2379 language::Event::Edited,
2380 language::Event::DirtyChanged,
2381 language::Event::Edited,
2382 ],
2383 );
2384 events.borrow_mut().clear();
2385
2386 // After restoring the buffer to its previously-saved state,
2387 // the buffer is not considered dirty anymore.
2388 buffer.edit([(1..3, "")], None, cx);
2389 assert!(buffer.text() == "ac");
2390 assert!(!buffer.is_dirty());
2391 });
2392
2393 assert_eq!(
2394 *events.borrow(),
2395 &[language::Event::Edited, language::Event::DirtyChanged]
2396 );
2397
2398 // When a file is deleted, the buffer is considered dirty.
2399 let events = Rc::new(RefCell::new(Vec::new()));
2400 let buffer2 = project
2401 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2402 .await
2403 .unwrap();
2404 buffer2.update(cx, |_, cx| {
2405 cx.subscribe(&buffer2, {
2406 let events = events.clone();
2407 move |_, _, event, _| events.borrow_mut().push(event.clone())
2408 })
2409 .detach();
2410 });
2411
2412 fs.remove_file("/dir/file2".as_ref(), Default::default())
2413 .await
2414 .unwrap();
2415 cx.foreground().run_until_parked();
2416 assert_eq!(
2417 *events.borrow(),
2418 &[
2419 language::Event::DirtyChanged,
2420 language::Event::FileHandleChanged
2421 ]
2422 );
2423
2424 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2425 let events = Rc::new(RefCell::new(Vec::new()));
2426 let buffer3 = project
2427 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2428 .await
2429 .unwrap();
2430 buffer3.update(cx, |_, cx| {
2431 cx.subscribe(&buffer3, {
2432 let events = events.clone();
2433 move |_, _, event, _| events.borrow_mut().push(event.clone())
2434 })
2435 .detach();
2436 });
2437
2438 buffer3.update(cx, |buffer, cx| {
2439 buffer.edit([(0..0, "x")], None, cx);
2440 });
2441 events.borrow_mut().clear();
2442 fs.remove_file("/dir/file3".as_ref(), Default::default())
2443 .await
2444 .unwrap();
2445 cx.foreground().run_until_parked();
2446 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2447 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2448}
2449
2450#[gpui::test]
2451async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2452 let initial_contents = "aaa\nbbbbb\nc\n";
2453 let fs = FakeFs::new(cx.background());
2454 fs.insert_tree(
2455 "/dir",
2456 json!({
2457 "the-file": initial_contents,
2458 }),
2459 )
2460 .await;
2461 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2462 let buffer = project
2463 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2464 .await
2465 .unwrap();
2466
2467 let anchors = (0..3)
2468 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2469 .collect::<Vec<_>>();
2470
2471 // Change the file on disk, adding two new lines of text, and removing
2472 // one line.
2473 buffer.read_with(cx, |buffer, _| {
2474 assert!(!buffer.is_dirty());
2475 assert!(!buffer.has_conflict());
2476 });
2477 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2478 fs.save(
2479 "/dir/the-file".as_ref(),
2480 &new_contents.into(),
2481 LineEnding::Unix,
2482 )
2483 .await
2484 .unwrap();
2485
2486 // Because the buffer was not modified, it is reloaded from disk. Its
2487 // contents are edited according to the diff between the old and new
2488 // file contents.
2489 cx.foreground().run_until_parked();
2490 buffer.update(cx, |buffer, _| {
2491 assert_eq!(buffer.text(), new_contents);
2492 assert!(!buffer.is_dirty());
2493 assert!(!buffer.has_conflict());
2494
2495 let anchor_positions = anchors
2496 .iter()
2497 .map(|anchor| anchor.to_point(&*buffer))
2498 .collect::<Vec<_>>();
2499 assert_eq!(
2500 anchor_positions,
2501 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2502 );
2503 });
2504
2505 // Modify the buffer
2506 buffer.update(cx, |buffer, cx| {
2507 buffer.edit([(0..0, " ")], None, cx);
2508 assert!(buffer.is_dirty());
2509 assert!(!buffer.has_conflict());
2510 });
2511
2512 // Change the file on disk again, adding blank lines to the beginning.
2513 fs.save(
2514 "/dir/the-file".as_ref(),
2515 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2516 LineEnding::Unix,
2517 )
2518 .await
2519 .unwrap();
2520
2521 // Because the buffer is modified, it doesn't reload from disk, but is
2522 // marked as having a conflict.
2523 cx.foreground().run_until_parked();
2524 buffer.read_with(cx, |buffer, _| {
2525 assert!(buffer.has_conflict());
2526 });
2527}
2528
2529#[gpui::test]
2530async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2531 let fs = FakeFs::new(cx.background());
2532 fs.insert_tree(
2533 "/dir",
2534 json!({
2535 "file1": "a\nb\nc\n",
2536 "file2": "one\r\ntwo\r\nthree\r\n",
2537 }),
2538 )
2539 .await;
2540
2541 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2542 let buffer1 = project
2543 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2544 .await
2545 .unwrap();
2546 let buffer2 = project
2547 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2548 .await
2549 .unwrap();
2550
2551 buffer1.read_with(cx, |buffer, _| {
2552 assert_eq!(buffer.text(), "a\nb\nc\n");
2553 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2554 });
2555 buffer2.read_with(cx, |buffer, _| {
2556 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2557 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2558 });
2559
2560 // Change a file's line endings on disk from unix to windows. The buffer's
2561 // state updates correctly.
2562 fs.save(
2563 "/dir/file1".as_ref(),
2564 &"aaa\nb\nc\n".into(),
2565 LineEnding::Windows,
2566 )
2567 .await
2568 .unwrap();
2569 cx.foreground().run_until_parked();
2570 buffer1.read_with(cx, |buffer, _| {
2571 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2572 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2573 });
2574
2575 // Save a file with windows line endings. The file is written correctly.
2576 buffer2
2577 .update(cx, |buffer, cx| {
2578 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2579 buffer.save(cx)
2580 })
2581 .await
2582 .unwrap();
2583 assert_eq!(
2584 fs.load("/dir/file2".as_ref()).await.unwrap(),
2585 "one\r\ntwo\r\nthree\r\nfour\r\n",
2586 );
2587}
2588
2589#[gpui::test]
2590async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2591 cx.foreground().forbid_parking();
2592
2593 let fs = FakeFs::new(cx.background());
2594 fs.insert_tree(
2595 "/the-dir",
2596 json!({
2597 "a.rs": "
2598 fn foo(mut v: Vec<usize>) {
2599 for x in &v {
2600 v.push(1);
2601 }
2602 }
2603 "
2604 .unindent(),
2605 }),
2606 )
2607 .await;
2608
2609 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2610 let buffer = project
2611 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2612 .await
2613 .unwrap();
2614
2615 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2616 let message = lsp::PublishDiagnosticsParams {
2617 uri: buffer_uri.clone(),
2618 diagnostics: vec![
2619 lsp::Diagnostic {
2620 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2621 severity: Some(DiagnosticSeverity::WARNING),
2622 message: "error 1".to_string(),
2623 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2624 location: lsp::Location {
2625 uri: buffer_uri.clone(),
2626 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2627 },
2628 message: "error 1 hint 1".to_string(),
2629 }]),
2630 ..Default::default()
2631 },
2632 lsp::Diagnostic {
2633 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2634 severity: Some(DiagnosticSeverity::HINT),
2635 message: "error 1 hint 1".to_string(),
2636 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2637 location: lsp::Location {
2638 uri: buffer_uri.clone(),
2639 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2640 },
2641 message: "original diagnostic".to_string(),
2642 }]),
2643 ..Default::default()
2644 },
2645 lsp::Diagnostic {
2646 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2647 severity: Some(DiagnosticSeverity::ERROR),
2648 message: "error 2".to_string(),
2649 related_information: Some(vec![
2650 lsp::DiagnosticRelatedInformation {
2651 location: lsp::Location {
2652 uri: buffer_uri.clone(),
2653 range: lsp::Range::new(
2654 lsp::Position::new(1, 13),
2655 lsp::Position::new(1, 15),
2656 ),
2657 },
2658 message: "error 2 hint 1".to_string(),
2659 },
2660 lsp::DiagnosticRelatedInformation {
2661 location: lsp::Location {
2662 uri: buffer_uri.clone(),
2663 range: lsp::Range::new(
2664 lsp::Position::new(1, 13),
2665 lsp::Position::new(1, 15),
2666 ),
2667 },
2668 message: "error 2 hint 2".to_string(),
2669 },
2670 ]),
2671 ..Default::default()
2672 },
2673 lsp::Diagnostic {
2674 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2675 severity: Some(DiagnosticSeverity::HINT),
2676 message: "error 2 hint 1".to_string(),
2677 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2678 location: lsp::Location {
2679 uri: buffer_uri.clone(),
2680 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2681 },
2682 message: "original diagnostic".to_string(),
2683 }]),
2684 ..Default::default()
2685 },
2686 lsp::Diagnostic {
2687 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2688 severity: Some(DiagnosticSeverity::HINT),
2689 message: "error 2 hint 2".to_string(),
2690 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2691 location: lsp::Location {
2692 uri: buffer_uri,
2693 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2694 },
2695 message: "original diagnostic".to_string(),
2696 }]),
2697 ..Default::default()
2698 },
2699 ],
2700 version: None,
2701 };
2702
2703 project
2704 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2705 .unwrap();
2706 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2707
2708 assert_eq!(
2709 buffer
2710 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2711 .collect::<Vec<_>>(),
2712 &[
2713 DiagnosticEntry {
2714 range: Point::new(1, 8)..Point::new(1, 9),
2715 diagnostic: Diagnostic {
2716 severity: DiagnosticSeverity::WARNING,
2717 message: "error 1".to_string(),
2718 group_id: 0,
2719 is_primary: true,
2720 ..Default::default()
2721 }
2722 },
2723 DiagnosticEntry {
2724 range: Point::new(1, 8)..Point::new(1, 9),
2725 diagnostic: Diagnostic {
2726 severity: DiagnosticSeverity::HINT,
2727 message: "error 1 hint 1".to_string(),
2728 group_id: 0,
2729 is_primary: false,
2730 ..Default::default()
2731 }
2732 },
2733 DiagnosticEntry {
2734 range: Point::new(1, 13)..Point::new(1, 15),
2735 diagnostic: Diagnostic {
2736 severity: DiagnosticSeverity::HINT,
2737 message: "error 2 hint 1".to_string(),
2738 group_id: 1,
2739 is_primary: false,
2740 ..Default::default()
2741 }
2742 },
2743 DiagnosticEntry {
2744 range: Point::new(1, 13)..Point::new(1, 15),
2745 diagnostic: Diagnostic {
2746 severity: DiagnosticSeverity::HINT,
2747 message: "error 2 hint 2".to_string(),
2748 group_id: 1,
2749 is_primary: false,
2750 ..Default::default()
2751 }
2752 },
2753 DiagnosticEntry {
2754 range: Point::new(2, 8)..Point::new(2, 17),
2755 diagnostic: Diagnostic {
2756 severity: DiagnosticSeverity::ERROR,
2757 message: "error 2".to_string(),
2758 group_id: 1,
2759 is_primary: true,
2760 ..Default::default()
2761 }
2762 }
2763 ]
2764 );
2765
2766 assert_eq!(
2767 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2768 &[
2769 DiagnosticEntry {
2770 range: Point::new(1, 8)..Point::new(1, 9),
2771 diagnostic: Diagnostic {
2772 severity: DiagnosticSeverity::WARNING,
2773 message: "error 1".to_string(),
2774 group_id: 0,
2775 is_primary: true,
2776 ..Default::default()
2777 }
2778 },
2779 DiagnosticEntry {
2780 range: Point::new(1, 8)..Point::new(1, 9),
2781 diagnostic: Diagnostic {
2782 severity: DiagnosticSeverity::HINT,
2783 message: "error 1 hint 1".to_string(),
2784 group_id: 0,
2785 is_primary: false,
2786 ..Default::default()
2787 }
2788 },
2789 ]
2790 );
2791 assert_eq!(
2792 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2793 &[
2794 DiagnosticEntry {
2795 range: Point::new(1, 13)..Point::new(1, 15),
2796 diagnostic: Diagnostic {
2797 severity: DiagnosticSeverity::HINT,
2798 message: "error 2 hint 1".to_string(),
2799 group_id: 1,
2800 is_primary: false,
2801 ..Default::default()
2802 }
2803 },
2804 DiagnosticEntry {
2805 range: Point::new(1, 13)..Point::new(1, 15),
2806 diagnostic: Diagnostic {
2807 severity: DiagnosticSeverity::HINT,
2808 message: "error 2 hint 2".to_string(),
2809 group_id: 1,
2810 is_primary: false,
2811 ..Default::default()
2812 }
2813 },
2814 DiagnosticEntry {
2815 range: Point::new(2, 8)..Point::new(2, 17),
2816 diagnostic: Diagnostic {
2817 severity: DiagnosticSeverity::ERROR,
2818 message: "error 2".to_string(),
2819 group_id: 1,
2820 is_primary: true,
2821 ..Default::default()
2822 }
2823 }
2824 ]
2825 );
2826}
2827
2828#[gpui::test]
2829async fn test_rename(cx: &mut gpui::TestAppContext) {
2830 cx.foreground().forbid_parking();
2831
2832 let mut language = Language::new(
2833 LanguageConfig {
2834 name: "Rust".into(),
2835 path_suffixes: vec!["rs".to_string()],
2836 ..Default::default()
2837 },
2838 Some(tree_sitter_rust::language()),
2839 );
2840 let mut fake_servers = language
2841 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2842 capabilities: lsp::ServerCapabilities {
2843 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2844 prepare_provider: Some(true),
2845 work_done_progress_options: Default::default(),
2846 })),
2847 ..Default::default()
2848 },
2849 ..Default::default()
2850 }))
2851 .await;
2852
2853 let fs = FakeFs::new(cx.background());
2854 fs.insert_tree(
2855 "/dir",
2856 json!({
2857 "one.rs": "const ONE: usize = 1;",
2858 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2859 }),
2860 )
2861 .await;
2862
2863 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2864 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2865 let buffer = project
2866 .update(cx, |project, cx| {
2867 project.open_local_buffer("/dir/one.rs", cx)
2868 })
2869 .await
2870 .unwrap();
2871
2872 let fake_server = fake_servers.next().await.unwrap();
2873
2874 let response = project.update(cx, |project, cx| {
2875 project.prepare_rename(buffer.clone(), 7, cx)
2876 });
2877 fake_server
2878 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2879 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2880 assert_eq!(params.position, lsp::Position::new(0, 7));
2881 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2882 lsp::Position::new(0, 6),
2883 lsp::Position::new(0, 9),
2884 ))))
2885 })
2886 .next()
2887 .await
2888 .unwrap();
2889 let range = response.await.unwrap().unwrap();
2890 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2891 assert_eq!(range, 6..9);
2892
2893 let response = project.update(cx, |project, cx| {
2894 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2895 });
2896 fake_server
2897 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2898 assert_eq!(
2899 params.text_document_position.text_document.uri.as_str(),
2900 "file:///dir/one.rs"
2901 );
2902 assert_eq!(
2903 params.text_document_position.position,
2904 lsp::Position::new(0, 7)
2905 );
2906 assert_eq!(params.new_name, "THREE");
2907 Ok(Some(lsp::WorkspaceEdit {
2908 changes: Some(
2909 [
2910 (
2911 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2912 vec![lsp::TextEdit::new(
2913 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2914 "THREE".to_string(),
2915 )],
2916 ),
2917 (
2918 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2919 vec![
2920 lsp::TextEdit::new(
2921 lsp::Range::new(
2922 lsp::Position::new(0, 24),
2923 lsp::Position::new(0, 27),
2924 ),
2925 "THREE".to_string(),
2926 ),
2927 lsp::TextEdit::new(
2928 lsp::Range::new(
2929 lsp::Position::new(0, 35),
2930 lsp::Position::new(0, 38),
2931 ),
2932 "THREE".to_string(),
2933 ),
2934 ],
2935 ),
2936 ]
2937 .into_iter()
2938 .collect(),
2939 ),
2940 ..Default::default()
2941 }))
2942 })
2943 .next()
2944 .await
2945 .unwrap();
2946 let mut transaction = response.await.unwrap().0;
2947 assert_eq!(transaction.len(), 2);
2948 assert_eq!(
2949 transaction
2950 .remove_entry(&buffer)
2951 .unwrap()
2952 .0
2953 .read_with(cx, |buffer, _| buffer.text()),
2954 "const THREE: usize = 1;"
2955 );
2956 assert_eq!(
2957 transaction
2958 .into_keys()
2959 .next()
2960 .unwrap()
2961 .read_with(cx, |buffer, _| buffer.text()),
2962 "const TWO: usize = one::THREE + one::THREE;"
2963 );
2964}
2965
2966#[gpui::test]
2967async fn test_search(cx: &mut gpui::TestAppContext) {
2968 let fs = FakeFs::new(cx.background());
2969 fs.insert_tree(
2970 "/dir",
2971 json!({
2972 "one.rs": "const ONE: usize = 1;",
2973 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
2974 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
2975 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
2976 }),
2977 )
2978 .await;
2979 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2980 assert_eq!(
2981 search(&project, SearchQuery::text("TWO", false, true), cx)
2982 .await
2983 .unwrap(),
2984 HashMap::from_iter([
2985 ("two.rs".to_string(), vec![6..9]),
2986 ("three.rs".to_string(), vec![37..40])
2987 ])
2988 );
2989
2990 let buffer_4 = project
2991 .update(cx, |project, cx| {
2992 project.open_local_buffer("/dir/four.rs", cx)
2993 })
2994 .await
2995 .unwrap();
2996 buffer_4.update(cx, |buffer, cx| {
2997 let text = "two::TWO";
2998 buffer.edit([(20..28, text), (31..43, text)], None, cx);
2999 });
3000
3001 assert_eq!(
3002 search(&project, SearchQuery::text("TWO", false, true), cx)
3003 .await
3004 .unwrap(),
3005 HashMap::from_iter([
3006 ("two.rs".to_string(), vec![6..9]),
3007 ("three.rs".to_string(), vec![37..40]),
3008 ("four.rs".to_string(), vec![25..28, 36..39])
3009 ])
3010 );
3011
3012 async fn search(
3013 project: &ModelHandle<Project>,
3014 query: SearchQuery,
3015 cx: &mut gpui::TestAppContext,
3016 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3017 let results = project
3018 .update(cx, |project, cx| project.search(query, cx))
3019 .await?;
3020
3021 Ok(results
3022 .into_iter()
3023 .map(|(buffer, ranges)| {
3024 buffer.read_with(cx, |buffer, _| {
3025 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3026 let ranges = ranges
3027 .into_iter()
3028 .map(|range| range.to_offset(buffer))
3029 .collect::<Vec<_>>();
3030 (path, ranges)
3031 })
3032 })
3033 .collect())
3034 }
3035}