1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, ToPoint,
9};
10use lsp::Url;
11use rope::point::Point;
12use serde_json::json;
13use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
14use unindent::Unindent as _;
15use util::{assert_set_eq, test::temp_tree};
16
17#[gpui::test]
18async fn test_symlinks(cx: &mut gpui::TestAppContext) {
19 let dir = temp_tree(json!({
20 "root": {
21 "apple": "",
22 "banana": {
23 "carrot": {
24 "date": "",
25 "endive": "",
26 }
27 },
28 "fennel": {
29 "grape": "",
30 }
31 }
32 }));
33
34 let root_link_path = dir.path().join("root_link");
35 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
36 unix::fs::symlink(
37 &dir.path().join("root/fennel"),
38 &dir.path().join("root/finnochio"),
39 )
40 .unwrap();
41
42 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
43 project.read_with(cx, |project, cx| {
44 let tree = project.worktrees(cx).next().unwrap().read(cx);
45 assert_eq!(tree.file_count(), 5);
46 assert_eq!(
47 tree.inode_for_path("fennel/grape"),
48 tree.inode_for_path("finnochio/grape")
49 );
50 });
51}
52
53#[gpui::test]
54async fn test_managing_language_servers(
55 deterministic: Arc<Deterministic>,
56 cx: &mut gpui::TestAppContext,
57) {
58 cx.foreground().forbid_parking();
59
60 let mut rust_language = Language::new(
61 LanguageConfig {
62 name: "Rust".into(),
63 path_suffixes: vec!["rs".to_string()],
64 ..Default::default()
65 },
66 Some(tree_sitter_rust::language()),
67 );
68 let mut json_language = Language::new(
69 LanguageConfig {
70 name: "JSON".into(),
71 path_suffixes: vec!["json".to_string()],
72 ..Default::default()
73 },
74 None,
75 );
76 let mut fake_rust_servers = rust_language
77 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
78 name: "the-rust-language-server",
79 capabilities: lsp::ServerCapabilities {
80 completion_provider: Some(lsp::CompletionOptions {
81 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
82 ..Default::default()
83 }),
84 ..Default::default()
85 },
86 ..Default::default()
87 }))
88 .await;
89 let mut fake_json_servers = json_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-json-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![":".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102
103 let fs = FakeFs::new(cx.background());
104 fs.insert_tree(
105 "/the-root",
106 json!({
107 "test.rs": "const A: i32 = 1;",
108 "test2.rs": "",
109 "Cargo.toml": "a = 1",
110 "package.json": "{\"a\": 1}",
111 }),
112 )
113 .await;
114
115 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
116
117 // Open a buffer without an associated language server.
118 let toml_buffer = project
119 .update(cx, |project, cx| {
120 project.open_local_buffer("/the-root/Cargo.toml", cx)
121 })
122 .await
123 .unwrap();
124
125 // Open a buffer with an associated language server before the language for it has been loaded.
126 let rust_buffer = project
127 .update(cx, |project, cx| {
128 project.open_local_buffer("/the-root/test.rs", cx)
129 })
130 .await
131 .unwrap();
132 rust_buffer.read_with(cx, |buffer, _| {
133 assert_eq!(buffer.language().map(|l| l.name()), None);
134 });
135
136 // Now we add the languages to the project, and ensure they get assigned to all
137 // the relevant open buffers.
138 project.update(cx, |project, _| {
139 project.languages.add(Arc::new(json_language));
140 project.languages.add(Arc::new(rust_language));
141 });
142 deterministic.run_until_parked();
143 rust_buffer.read_with(cx, |buffer, _| {
144 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
145 });
146
147 // A server is started up, and it is notified about Rust files.
148 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
149 assert_eq!(
150 fake_rust_server
151 .receive_notification::<lsp::notification::DidOpenTextDocument>()
152 .await
153 .text_document,
154 lsp::TextDocumentItem {
155 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
156 version: 0,
157 text: "const A: i32 = 1;".to_string(),
158 language_id: Default::default()
159 }
160 );
161
162 // The buffer is configured based on the language server's capabilities.
163 rust_buffer.read_with(cx, |buffer, _| {
164 assert_eq!(
165 buffer.completion_triggers(),
166 &[".".to_string(), "::".to_string()]
167 );
168 });
169 toml_buffer.read_with(cx, |buffer, _| {
170 assert!(buffer.completion_triggers().is_empty());
171 });
172
173 // Edit a buffer. The changes are reported to the language server.
174 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
175 assert_eq!(
176 fake_rust_server
177 .receive_notification::<lsp::notification::DidChangeTextDocument>()
178 .await
179 .text_document,
180 lsp::VersionedTextDocumentIdentifier::new(
181 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
182 1
183 )
184 );
185
186 // Open a third buffer with a different associated language server.
187 let json_buffer = project
188 .update(cx, |project, cx| {
189 project.open_local_buffer("/the-root/package.json", cx)
190 })
191 .await
192 .unwrap();
193
194 // A json language server is started up and is only notified about the json buffer.
195 let mut fake_json_server = fake_json_servers.next().await.unwrap();
196 assert_eq!(
197 fake_json_server
198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
199 .await
200 .text_document,
201 lsp::TextDocumentItem {
202 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
203 version: 0,
204 text: "{\"a\": 1}".to_string(),
205 language_id: Default::default()
206 }
207 );
208
209 // This buffer is configured based on the second language server's
210 // capabilities.
211 json_buffer.read_with(cx, |buffer, _| {
212 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
213 });
214
215 // When opening another buffer whose language server is already running,
216 // it is also configured based on the existing language server's capabilities.
217 let rust_buffer2 = project
218 .update(cx, |project, cx| {
219 project.open_local_buffer("/the-root/test2.rs", cx)
220 })
221 .await
222 .unwrap();
223 rust_buffer2.read_with(cx, |buffer, _| {
224 assert_eq!(
225 buffer.completion_triggers(),
226 &[".".to_string(), "::".to_string()]
227 );
228 });
229
230 // Changes are reported only to servers matching the buffer's language.
231 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
232 rust_buffer2.update(cx, |buffer, cx| {
233 buffer.edit([(0..0, "let x = 1;")], None, cx)
234 });
235 assert_eq!(
236 fake_rust_server
237 .receive_notification::<lsp::notification::DidChangeTextDocument>()
238 .await
239 .text_document,
240 lsp::VersionedTextDocumentIdentifier::new(
241 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
242 1
243 )
244 );
245
246 // Save notifications are reported to all servers.
247 toml_buffer
248 .update(cx, |buffer, cx| buffer.save(cx))
249 .await
250 .unwrap();
251 assert_eq!(
252 fake_rust_server
253 .receive_notification::<lsp::notification::DidSaveTextDocument>()
254 .await
255 .text_document,
256 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
257 );
258 assert_eq!(
259 fake_json_server
260 .receive_notification::<lsp::notification::DidSaveTextDocument>()
261 .await
262 .text_document,
263 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
264 );
265
266 // Renames are reported only to servers matching the buffer's language.
267 fs.rename(
268 Path::new("/the-root/test2.rs"),
269 Path::new("/the-root/test3.rs"),
270 Default::default(),
271 )
272 .await
273 .unwrap();
274 assert_eq!(
275 fake_rust_server
276 .receive_notification::<lsp::notification::DidCloseTextDocument>()
277 .await
278 .text_document,
279 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
280 );
281 assert_eq!(
282 fake_rust_server
283 .receive_notification::<lsp::notification::DidOpenTextDocument>()
284 .await
285 .text_document,
286 lsp::TextDocumentItem {
287 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
288 version: 0,
289 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
290 language_id: Default::default()
291 },
292 );
293
294 rust_buffer2.update(cx, |buffer, cx| {
295 buffer.update_diagnostics(
296 DiagnosticSet::from_sorted_entries(
297 vec![DiagnosticEntry {
298 diagnostic: Default::default(),
299 range: Anchor::MIN..Anchor::MAX,
300 }],
301 &buffer.snapshot(),
302 ),
303 cx,
304 );
305 assert_eq!(
306 buffer
307 .snapshot()
308 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
309 .count(),
310 1
311 );
312 });
313
314 // When the rename changes the extension of the file, the buffer gets closed on the old
315 // language server and gets opened on the new one.
316 fs.rename(
317 Path::new("/the-root/test3.rs"),
318 Path::new("/the-root/test3.json"),
319 Default::default(),
320 )
321 .await
322 .unwrap();
323 assert_eq!(
324 fake_rust_server
325 .receive_notification::<lsp::notification::DidCloseTextDocument>()
326 .await
327 .text_document,
328 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
329 );
330 assert_eq!(
331 fake_json_server
332 .receive_notification::<lsp::notification::DidOpenTextDocument>()
333 .await
334 .text_document,
335 lsp::TextDocumentItem {
336 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
337 version: 0,
338 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
339 language_id: Default::default()
340 },
341 );
342
343 // We clear the diagnostics, since the language has changed.
344 rust_buffer2.read_with(cx, |buffer, _| {
345 assert_eq!(
346 buffer
347 .snapshot()
348 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
349 .count(),
350 0
351 );
352 });
353
354 // The renamed file's version resets after changing language server.
355 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
356 assert_eq!(
357 fake_json_server
358 .receive_notification::<lsp::notification::DidChangeTextDocument>()
359 .await
360 .text_document,
361 lsp::VersionedTextDocumentIdentifier::new(
362 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
363 1
364 )
365 );
366
367 // Restart language servers
368 project.update(cx, |project, cx| {
369 project.restart_language_servers_for_buffers(
370 vec![rust_buffer.clone(), json_buffer.clone()],
371 cx,
372 );
373 });
374
375 let mut rust_shutdown_requests = fake_rust_server
376 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
377 let mut json_shutdown_requests = fake_json_server
378 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
379 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
380
381 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
382 let mut fake_json_server = fake_json_servers.next().await.unwrap();
383
384 // Ensure rust document is reopened in new rust language server
385 assert_eq!(
386 fake_rust_server
387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
388 .await
389 .text_document,
390 lsp::TextDocumentItem {
391 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
392 version: 1,
393 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
394 language_id: Default::default()
395 }
396 );
397
398 // Ensure json documents are reopened in new json language server
399 assert_set_eq!(
400 [
401 fake_json_server
402 .receive_notification::<lsp::notification::DidOpenTextDocument>()
403 .await
404 .text_document,
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 ],
410 [
411 lsp::TextDocumentItem {
412 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
413 version: 0,
414 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
415 language_id: Default::default()
416 },
417 lsp::TextDocumentItem {
418 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
419 version: 1,
420 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
421 language_id: Default::default()
422 }
423 ]
424 );
425
426 // Close notifications are reported only to servers matching the buffer's language.
427 cx.update(|_| drop(json_buffer));
428 let close_message = lsp::DidCloseTextDocumentParams {
429 text_document: lsp::TextDocumentIdentifier::new(
430 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
431 ),
432 };
433 assert_eq!(
434 fake_json_server
435 .receive_notification::<lsp::notification::DidCloseTextDocument>()
436 .await,
437 close_message,
438 );
439}
440
441#[gpui::test]
442async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
443 cx.foreground().forbid_parking();
444
445 let fs = FakeFs::new(cx.background());
446 fs.insert_tree(
447 "/dir",
448 json!({
449 "a.rs": "let a = 1;",
450 "b.rs": "let b = 2;"
451 }),
452 )
453 .await;
454
455 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
456
457 let buffer_a = project
458 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
459 .await
460 .unwrap();
461 let buffer_b = project
462 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
463 .await
464 .unwrap();
465
466 project.update(cx, |project, cx| {
467 project
468 .update_diagnostics(
469 0,
470 lsp::PublishDiagnosticsParams {
471 uri: Url::from_file_path("/dir/a.rs").unwrap(),
472 version: None,
473 diagnostics: vec![lsp::Diagnostic {
474 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
475 severity: Some(lsp::DiagnosticSeverity::ERROR),
476 message: "error 1".to_string(),
477 ..Default::default()
478 }],
479 },
480 &[],
481 cx,
482 )
483 .unwrap();
484 project
485 .update_diagnostics(
486 0,
487 lsp::PublishDiagnosticsParams {
488 uri: Url::from_file_path("/dir/b.rs").unwrap(),
489 version: None,
490 diagnostics: vec![lsp::Diagnostic {
491 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
492 severity: Some(lsp::DiagnosticSeverity::WARNING),
493 message: "error 2".to_string(),
494 ..Default::default()
495 }],
496 },
497 &[],
498 cx,
499 )
500 .unwrap();
501 });
502
503 buffer_a.read_with(cx, |buffer, _| {
504 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
505 assert_eq!(
506 chunks
507 .iter()
508 .map(|(s, d)| (s.as_str(), *d))
509 .collect::<Vec<_>>(),
510 &[
511 ("let ", None),
512 ("a", Some(DiagnosticSeverity::ERROR)),
513 (" = 1;", None),
514 ]
515 );
516 });
517 buffer_b.read_with(cx, |buffer, _| {
518 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
519 assert_eq!(
520 chunks
521 .iter()
522 .map(|(s, d)| (s.as_str(), *d))
523 .collect::<Vec<_>>(),
524 &[
525 ("let ", None),
526 ("b", Some(DiagnosticSeverity::WARNING)),
527 (" = 2;", None),
528 ]
529 );
530 });
531}
532
533#[gpui::test]
534async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
535 cx.foreground().forbid_parking();
536
537 let fs = FakeFs::new(cx.background());
538 fs.insert_tree(
539 "/root",
540 json!({
541 "dir": {
542 "a.rs": "let a = 1;",
543 },
544 "other.rs": "let b = c;"
545 }),
546 )
547 .await;
548
549 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
550
551 let (worktree, _) = project
552 .update(cx, |project, cx| {
553 project.find_or_create_local_worktree("/root/other.rs", false, cx)
554 })
555 .await
556 .unwrap();
557 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
558
559 project.update(cx, |project, cx| {
560 project
561 .update_diagnostics(
562 0,
563 lsp::PublishDiagnosticsParams {
564 uri: Url::from_file_path("/root/other.rs").unwrap(),
565 version: None,
566 diagnostics: vec![lsp::Diagnostic {
567 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
568 severity: Some(lsp::DiagnosticSeverity::ERROR),
569 message: "unknown variable 'c'".to_string(),
570 ..Default::default()
571 }],
572 },
573 &[],
574 cx,
575 )
576 .unwrap();
577 });
578
579 let buffer = project
580 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
581 .await
582 .unwrap();
583 buffer.read_with(cx, |buffer, _| {
584 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
585 assert_eq!(
586 chunks
587 .iter()
588 .map(|(s, d)| (s.as_str(), *d))
589 .collect::<Vec<_>>(),
590 &[
591 ("let b = ", None),
592 ("c", Some(DiagnosticSeverity::ERROR)),
593 (";", None),
594 ]
595 );
596 });
597
598 project.read_with(cx, |project, cx| {
599 assert_eq!(project.diagnostic_summaries(cx).next(), None);
600 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
601 });
602}
603
604#[gpui::test]
605async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
606 cx.foreground().forbid_parking();
607
608 let progress_token = "the-progress-token";
609 let mut language = Language::new(
610 LanguageConfig {
611 name: "Rust".into(),
612 path_suffixes: vec!["rs".to_string()],
613 ..Default::default()
614 },
615 Some(tree_sitter_rust::language()),
616 );
617 let mut fake_servers = language
618 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
619 disk_based_diagnostics_progress_token: Some(progress_token.into()),
620 disk_based_diagnostics_sources: vec!["disk".into()],
621 ..Default::default()
622 }))
623 .await;
624
625 let fs = FakeFs::new(cx.background());
626 fs.insert_tree(
627 "/dir",
628 json!({
629 "a.rs": "fn a() { A }",
630 "b.rs": "const y: i32 = 1",
631 }),
632 )
633 .await;
634
635 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
636 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
637 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
638
639 // Cause worktree to start the fake language server
640 let _buffer = project
641 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
642 .await
643 .unwrap();
644
645 let mut events = subscribe(&project, cx);
646
647 let fake_server = fake_servers.next().await.unwrap();
648 fake_server.start_progress(progress_token).await;
649 assert_eq!(
650 events.next().await.unwrap(),
651 Event::DiskBasedDiagnosticsStarted {
652 language_server_id: 0,
653 }
654 );
655
656 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
657 uri: Url::from_file_path("/dir/a.rs").unwrap(),
658 version: None,
659 diagnostics: vec![lsp::Diagnostic {
660 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
661 severity: Some(lsp::DiagnosticSeverity::ERROR),
662 message: "undefined variable 'A'".to_string(),
663 ..Default::default()
664 }],
665 });
666 assert_eq!(
667 events.next().await.unwrap(),
668 Event::DiagnosticsUpdated {
669 language_server_id: 0,
670 path: (worktree_id, Path::new("a.rs")).into()
671 }
672 );
673
674 fake_server.end_progress(progress_token);
675 assert_eq!(
676 events.next().await.unwrap(),
677 Event::DiskBasedDiagnosticsFinished {
678 language_server_id: 0
679 }
680 );
681
682 let buffer = project
683 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
684 .await
685 .unwrap();
686
687 buffer.read_with(cx, |buffer, _| {
688 let snapshot = buffer.snapshot();
689 let diagnostics = snapshot
690 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
691 .collect::<Vec<_>>();
692 assert_eq!(
693 diagnostics,
694 &[DiagnosticEntry {
695 range: Point::new(0, 9)..Point::new(0, 10),
696 diagnostic: Diagnostic {
697 severity: lsp::DiagnosticSeverity::ERROR,
698 message: "undefined variable 'A'".to_string(),
699 group_id: 0,
700 is_primary: true,
701 ..Default::default()
702 }
703 }]
704 )
705 });
706
707 // Ensure publishing empty diagnostics twice only results in one update event.
708 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
709 uri: Url::from_file_path("/dir/a.rs").unwrap(),
710 version: None,
711 diagnostics: Default::default(),
712 });
713 assert_eq!(
714 events.next().await.unwrap(),
715 Event::DiagnosticsUpdated {
716 language_server_id: 0,
717 path: (worktree_id, Path::new("a.rs")).into()
718 }
719 );
720
721 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
722 uri: Url::from_file_path("/dir/a.rs").unwrap(),
723 version: None,
724 diagnostics: Default::default(),
725 });
726 cx.foreground().run_until_parked();
727 assert_eq!(futures::poll!(events.next()), Poll::Pending);
728}
729
730#[gpui::test]
731async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
732 cx.foreground().forbid_parking();
733
734 let progress_token = "the-progress-token";
735 let mut language = Language::new(
736 LanguageConfig {
737 path_suffixes: vec!["rs".to_string()],
738 ..Default::default()
739 },
740 None,
741 );
742 let mut fake_servers = language
743 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
744 disk_based_diagnostics_sources: vec!["disk".into()],
745 disk_based_diagnostics_progress_token: Some(progress_token.into()),
746 ..Default::default()
747 }))
748 .await;
749
750 let fs = FakeFs::new(cx.background());
751 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
752
753 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
754 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
755
756 let buffer = project
757 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
758 .await
759 .unwrap();
760
761 // Simulate diagnostics starting to update.
762 let fake_server = fake_servers.next().await.unwrap();
763 fake_server.start_progress(progress_token).await;
764
765 // Restart the server before the diagnostics finish updating.
766 project.update(cx, |project, cx| {
767 project.restart_language_servers_for_buffers([buffer], cx);
768 });
769 let mut events = subscribe(&project, cx);
770
771 // Simulate the newly started server sending more diagnostics.
772 let fake_server = fake_servers.next().await.unwrap();
773 fake_server.start_progress(progress_token).await;
774 assert_eq!(
775 events.next().await.unwrap(),
776 Event::DiskBasedDiagnosticsStarted {
777 language_server_id: 1
778 }
779 );
780 project.read_with(cx, |project, _| {
781 assert_eq!(
782 project
783 .language_servers_running_disk_based_diagnostics()
784 .collect::<Vec<_>>(),
785 [1]
786 );
787 });
788
789 // All diagnostics are considered done, despite the old server's diagnostic
790 // task never completing.
791 fake_server.end_progress(progress_token);
792 assert_eq!(
793 events.next().await.unwrap(),
794 Event::DiskBasedDiagnosticsFinished {
795 language_server_id: 1
796 }
797 );
798 project.read_with(cx, |project, _| {
799 assert_eq!(
800 project
801 .language_servers_running_disk_based_diagnostics()
802 .collect::<Vec<_>>(),
803 [0; 0]
804 );
805 });
806}
807
808#[gpui::test]
809async fn test_toggling_enable_language_server(
810 deterministic: Arc<Deterministic>,
811 cx: &mut gpui::TestAppContext,
812) {
813 deterministic.forbid_parking();
814
815 let mut rust = Language::new(
816 LanguageConfig {
817 name: Arc::from("Rust"),
818 path_suffixes: vec!["rs".to_string()],
819 ..Default::default()
820 },
821 None,
822 );
823 let mut fake_rust_servers = rust
824 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
825 name: "rust-lsp",
826 ..Default::default()
827 }))
828 .await;
829 let mut js = Language::new(
830 LanguageConfig {
831 name: Arc::from("JavaScript"),
832 path_suffixes: vec!["js".to_string()],
833 ..Default::default()
834 },
835 None,
836 );
837 let mut fake_js_servers = js
838 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
839 name: "js-lsp",
840 ..Default::default()
841 }))
842 .await;
843
844 let fs = FakeFs::new(cx.background());
845 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
846 .await;
847
848 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
849 project.update(cx, |project, _| {
850 project.languages.add(Arc::new(rust));
851 project.languages.add(Arc::new(js));
852 });
853
854 let _rs_buffer = project
855 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
856 .await
857 .unwrap();
858 let _js_buffer = project
859 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
860 .await
861 .unwrap();
862
863 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
864 assert_eq!(
865 fake_rust_server_1
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document
869 .uri
870 .as_str(),
871 "file:///dir/a.rs"
872 );
873
874 let mut fake_js_server = fake_js_servers.next().await.unwrap();
875 assert_eq!(
876 fake_js_server
877 .receive_notification::<lsp::notification::DidOpenTextDocument>()
878 .await
879 .text_document
880 .uri
881 .as_str(),
882 "file:///dir/b.js"
883 );
884
885 // Disable Rust language server, ensuring only that server gets stopped.
886 cx.update(|cx| {
887 cx.update_global(|settings: &mut Settings, _| {
888 settings.language_overrides.insert(
889 Arc::from("Rust"),
890 settings::EditorSettings {
891 enable_language_server: Some(false),
892 ..Default::default()
893 },
894 );
895 })
896 });
897 fake_rust_server_1
898 .receive_notification::<lsp::notification::Exit>()
899 .await;
900
901 // Enable Rust and disable JavaScript language servers, ensuring that the
902 // former gets started again and that the latter stops.
903 cx.update(|cx| {
904 cx.update_global(|settings: &mut Settings, _| {
905 settings.language_overrides.insert(
906 Arc::from("Rust"),
907 settings::EditorSettings {
908 enable_language_server: Some(true),
909 ..Default::default()
910 },
911 );
912 settings.language_overrides.insert(
913 Arc::from("JavaScript"),
914 settings::EditorSettings {
915 enable_language_server: Some(false),
916 ..Default::default()
917 },
918 );
919 })
920 });
921 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
922 assert_eq!(
923 fake_rust_server_2
924 .receive_notification::<lsp::notification::DidOpenTextDocument>()
925 .await
926 .text_document
927 .uri
928 .as_str(),
929 "file:///dir/a.rs"
930 );
931 fake_js_server
932 .receive_notification::<lsp::notification::Exit>()
933 .await;
934}
935
936#[gpui::test]
937async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
938 cx.foreground().forbid_parking();
939
940 let mut language = Language::new(
941 LanguageConfig {
942 name: "Rust".into(),
943 path_suffixes: vec!["rs".to_string()],
944 ..Default::default()
945 },
946 Some(tree_sitter_rust::language()),
947 );
948 let mut fake_servers = language
949 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
950 disk_based_diagnostics_sources: vec!["disk".into()],
951 ..Default::default()
952 }))
953 .await;
954
955 let text = "
956 fn a() { A }
957 fn b() { BB }
958 fn c() { CCC }
959 "
960 .unindent();
961
962 let fs = FakeFs::new(cx.background());
963 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
964
965 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
966 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
967
968 let buffer = project
969 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
970 .await
971 .unwrap();
972
973 let mut fake_server = fake_servers.next().await.unwrap();
974 let open_notification = fake_server
975 .receive_notification::<lsp::notification::DidOpenTextDocument>()
976 .await;
977
978 // Edit the buffer, moving the content down
979 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
980 let change_notification_1 = fake_server
981 .receive_notification::<lsp::notification::DidChangeTextDocument>()
982 .await;
983 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
984
985 // Report some diagnostics for the initial version of the buffer
986 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
987 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
988 version: Some(open_notification.text_document.version),
989 diagnostics: vec![
990 lsp::Diagnostic {
991 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
992 severity: Some(DiagnosticSeverity::ERROR),
993 message: "undefined variable 'A'".to_string(),
994 source: Some("disk".to_string()),
995 ..Default::default()
996 },
997 lsp::Diagnostic {
998 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
999 severity: Some(DiagnosticSeverity::ERROR),
1000 message: "undefined variable 'BB'".to_string(),
1001 source: Some("disk".to_string()),
1002 ..Default::default()
1003 },
1004 lsp::Diagnostic {
1005 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1006 severity: Some(DiagnosticSeverity::ERROR),
1007 source: Some("disk".to_string()),
1008 message: "undefined variable 'CCC'".to_string(),
1009 ..Default::default()
1010 },
1011 ],
1012 });
1013
1014 // The diagnostics have moved down since they were created.
1015 buffer.next_notification(cx).await;
1016 buffer.read_with(cx, |buffer, _| {
1017 assert_eq!(
1018 buffer
1019 .snapshot()
1020 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1021 .collect::<Vec<_>>(),
1022 &[
1023 DiagnosticEntry {
1024 range: Point::new(3, 9)..Point::new(3, 11),
1025 diagnostic: Diagnostic {
1026 severity: DiagnosticSeverity::ERROR,
1027 message: "undefined variable 'BB'".to_string(),
1028 is_disk_based: true,
1029 group_id: 1,
1030 is_primary: true,
1031 ..Default::default()
1032 },
1033 },
1034 DiagnosticEntry {
1035 range: Point::new(4, 9)..Point::new(4, 12),
1036 diagnostic: Diagnostic {
1037 severity: DiagnosticSeverity::ERROR,
1038 message: "undefined variable 'CCC'".to_string(),
1039 is_disk_based: true,
1040 group_id: 2,
1041 is_primary: true,
1042 ..Default::default()
1043 }
1044 }
1045 ]
1046 );
1047 assert_eq!(
1048 chunks_with_diagnostics(buffer, 0..buffer.len()),
1049 [
1050 ("\n\nfn a() { ".to_string(), None),
1051 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1052 (" }\nfn b() { ".to_string(), None),
1053 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1054 (" }\nfn c() { ".to_string(), None),
1055 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1056 (" }\n".to_string(), None),
1057 ]
1058 );
1059 assert_eq!(
1060 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1061 [
1062 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1063 (" }\nfn c() { ".to_string(), None),
1064 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1065 ]
1066 );
1067 });
1068
1069 // Ensure overlapping diagnostics are highlighted correctly.
1070 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1071 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1072 version: Some(open_notification.text_document.version),
1073 diagnostics: vec![
1074 lsp::Diagnostic {
1075 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1076 severity: Some(DiagnosticSeverity::ERROR),
1077 message: "undefined variable 'A'".to_string(),
1078 source: Some("disk".to_string()),
1079 ..Default::default()
1080 },
1081 lsp::Diagnostic {
1082 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1083 severity: Some(DiagnosticSeverity::WARNING),
1084 message: "unreachable statement".to_string(),
1085 source: Some("disk".to_string()),
1086 ..Default::default()
1087 },
1088 ],
1089 });
1090
1091 buffer.next_notification(cx).await;
1092 buffer.read_with(cx, |buffer, _| {
1093 assert_eq!(
1094 buffer
1095 .snapshot()
1096 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1097 .collect::<Vec<_>>(),
1098 &[
1099 DiagnosticEntry {
1100 range: Point::new(2, 9)..Point::new(2, 12),
1101 diagnostic: Diagnostic {
1102 severity: DiagnosticSeverity::WARNING,
1103 message: "unreachable statement".to_string(),
1104 is_disk_based: true,
1105 group_id: 4,
1106 is_primary: true,
1107 ..Default::default()
1108 }
1109 },
1110 DiagnosticEntry {
1111 range: Point::new(2, 9)..Point::new(2, 10),
1112 diagnostic: Diagnostic {
1113 severity: DiagnosticSeverity::ERROR,
1114 message: "undefined variable 'A'".to_string(),
1115 is_disk_based: true,
1116 group_id: 3,
1117 is_primary: true,
1118 ..Default::default()
1119 },
1120 }
1121 ]
1122 );
1123 assert_eq!(
1124 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1125 [
1126 ("fn a() { ".to_string(), None),
1127 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1128 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1129 ("\n".to_string(), None),
1130 ]
1131 );
1132 assert_eq!(
1133 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1134 [
1135 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1136 ("\n".to_string(), None),
1137 ]
1138 );
1139 });
1140
1141 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1142 // changes since the last save.
1143 buffer.update(cx, |buffer, cx| {
1144 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1145 buffer.edit(
1146 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1147 None,
1148 cx,
1149 );
1150 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1151 });
1152 let change_notification_2 = fake_server
1153 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1154 .await;
1155 assert!(
1156 change_notification_2.text_document.version > change_notification_1.text_document.version
1157 );
1158
1159 // Handle out-of-order diagnostics
1160 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1161 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1162 version: Some(change_notification_2.text_document.version),
1163 diagnostics: vec![
1164 lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1166 severity: Some(DiagnosticSeverity::ERROR),
1167 message: "undefined variable 'BB'".to_string(),
1168 source: Some("disk".to_string()),
1169 ..Default::default()
1170 },
1171 lsp::Diagnostic {
1172 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1173 severity: Some(DiagnosticSeverity::WARNING),
1174 message: "undefined variable 'A'".to_string(),
1175 source: Some("disk".to_string()),
1176 ..Default::default()
1177 },
1178 ],
1179 });
1180
1181 buffer.next_notification(cx).await;
1182 buffer.read_with(cx, |buffer, _| {
1183 assert_eq!(
1184 buffer
1185 .snapshot()
1186 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1187 .collect::<Vec<_>>(),
1188 &[
1189 DiagnosticEntry {
1190 range: Point::new(2, 21)..Point::new(2, 22),
1191 diagnostic: Diagnostic {
1192 severity: DiagnosticSeverity::WARNING,
1193 message: "undefined variable 'A'".to_string(),
1194 is_disk_based: true,
1195 group_id: 6,
1196 is_primary: true,
1197 ..Default::default()
1198 }
1199 },
1200 DiagnosticEntry {
1201 range: Point::new(3, 9)..Point::new(3, 14),
1202 diagnostic: Diagnostic {
1203 severity: DiagnosticSeverity::ERROR,
1204 message: "undefined variable 'BB'".to_string(),
1205 is_disk_based: true,
1206 group_id: 5,
1207 is_primary: true,
1208 ..Default::default()
1209 },
1210 }
1211 ]
1212 );
1213 });
1214}
1215
1216#[gpui::test]
1217async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1218 cx.foreground().forbid_parking();
1219
1220 let text = concat!(
1221 "let one = ;\n", //
1222 "let two = \n",
1223 "let three = 3;\n",
1224 );
1225
1226 let fs = FakeFs::new(cx.background());
1227 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1228
1229 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1230 let buffer = project
1231 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1232 .await
1233 .unwrap();
1234
1235 project.update(cx, |project, cx| {
1236 project
1237 .update_buffer_diagnostics(
1238 &buffer,
1239 vec![
1240 DiagnosticEntry {
1241 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1242 diagnostic: Diagnostic {
1243 severity: DiagnosticSeverity::ERROR,
1244 message: "syntax error 1".to_string(),
1245 ..Default::default()
1246 },
1247 },
1248 DiagnosticEntry {
1249 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1250 diagnostic: Diagnostic {
1251 severity: DiagnosticSeverity::ERROR,
1252 message: "syntax error 2".to_string(),
1253 ..Default::default()
1254 },
1255 },
1256 ],
1257 None,
1258 cx,
1259 )
1260 .unwrap();
1261 });
1262
1263 // An empty range is extended forward to include the following character.
1264 // At the end of a line, an empty range is extended backward to include
1265 // the preceding character.
1266 buffer.read_with(cx, |buffer, _| {
1267 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1268 assert_eq!(
1269 chunks
1270 .iter()
1271 .map(|(s, d)| (s.as_str(), *d))
1272 .collect::<Vec<_>>(),
1273 &[
1274 ("let one = ", None),
1275 (";", Some(DiagnosticSeverity::ERROR)),
1276 ("\nlet two =", None),
1277 (" ", Some(DiagnosticSeverity::ERROR)),
1278 ("\nlet three = 3;\n", None)
1279 ]
1280 );
1281 });
1282}
1283
1284#[gpui::test]
1285async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1286 cx.foreground().forbid_parking();
1287
1288 let mut language = Language::new(
1289 LanguageConfig {
1290 name: "Rust".into(),
1291 path_suffixes: vec!["rs".to_string()],
1292 ..Default::default()
1293 },
1294 Some(tree_sitter_rust::language()),
1295 );
1296 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1297
1298 let text = "
1299 fn a() {
1300 f1();
1301 }
1302 fn b() {
1303 f2();
1304 }
1305 fn c() {
1306 f3();
1307 }
1308 "
1309 .unindent();
1310
1311 let fs = FakeFs::new(cx.background());
1312 fs.insert_tree(
1313 "/dir",
1314 json!({
1315 "a.rs": text.clone(),
1316 }),
1317 )
1318 .await;
1319
1320 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1321 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1322 let buffer = project
1323 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1324 .await
1325 .unwrap();
1326
1327 let mut fake_server = fake_servers.next().await.unwrap();
1328 let lsp_document_version = fake_server
1329 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1330 .await
1331 .text_document
1332 .version;
1333
1334 // Simulate editing the buffer after the language server computes some edits.
1335 buffer.update(cx, |buffer, cx| {
1336 buffer.edit(
1337 [(
1338 Point::new(0, 0)..Point::new(0, 0),
1339 "// above first function\n",
1340 )],
1341 None,
1342 cx,
1343 );
1344 buffer.edit(
1345 [(
1346 Point::new(2, 0)..Point::new(2, 0),
1347 " // inside first function\n",
1348 )],
1349 None,
1350 cx,
1351 );
1352 buffer.edit(
1353 [(
1354 Point::new(6, 4)..Point::new(6, 4),
1355 "// inside second function ",
1356 )],
1357 None,
1358 cx,
1359 );
1360
1361 assert_eq!(
1362 buffer.text(),
1363 "
1364 // above first function
1365 fn a() {
1366 // inside first function
1367 f1();
1368 }
1369 fn b() {
1370 // inside second function f2();
1371 }
1372 fn c() {
1373 f3();
1374 }
1375 "
1376 .unindent()
1377 );
1378 });
1379
1380 let edits = project
1381 .update(cx, |project, cx| {
1382 project.edits_from_lsp(
1383 &buffer,
1384 vec![
1385 // replace body of first function
1386 lsp::TextEdit {
1387 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1388 new_text: "
1389 fn a() {
1390 f10();
1391 }
1392 "
1393 .unindent(),
1394 },
1395 // edit inside second function
1396 lsp::TextEdit {
1397 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1398 new_text: "00".into(),
1399 },
1400 // edit inside third function via two distinct edits
1401 lsp::TextEdit {
1402 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1403 new_text: "4000".into(),
1404 },
1405 lsp::TextEdit {
1406 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1407 new_text: "".into(),
1408 },
1409 ],
1410 Some(lsp_document_version),
1411 cx,
1412 )
1413 })
1414 .await
1415 .unwrap();
1416
1417 buffer.update(cx, |buffer, cx| {
1418 for (range, new_text) in edits {
1419 buffer.edit([(range, new_text)], None, cx);
1420 }
1421 assert_eq!(
1422 buffer.text(),
1423 "
1424 // above first function
1425 fn a() {
1426 // inside first function
1427 f10();
1428 }
1429 fn b() {
1430 // inside second function f200();
1431 }
1432 fn c() {
1433 f4000();
1434 }
1435 "
1436 .unindent()
1437 );
1438 });
1439}
1440
1441#[gpui::test]
1442async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1443 cx.foreground().forbid_parking();
1444
1445 let text = "
1446 use a::b;
1447 use a::c;
1448
1449 fn f() {
1450 b();
1451 c();
1452 }
1453 "
1454 .unindent();
1455
1456 let fs = FakeFs::new(cx.background());
1457 fs.insert_tree(
1458 "/dir",
1459 json!({
1460 "a.rs": text.clone(),
1461 }),
1462 )
1463 .await;
1464
1465 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1466 let buffer = project
1467 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1468 .await
1469 .unwrap();
1470
1471 // Simulate the language server sending us a small edit in the form of a very large diff.
1472 // Rust-analyzer does this when performing a merge-imports code action.
1473 let edits = project
1474 .update(cx, |project, cx| {
1475 project.edits_from_lsp(
1476 &buffer,
1477 [
1478 // Replace the first use statement without editing the semicolon.
1479 lsp::TextEdit {
1480 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1481 new_text: "a::{b, c}".into(),
1482 },
1483 // Reinsert the remainder of the file between the semicolon and the final
1484 // newline of the file.
1485 lsp::TextEdit {
1486 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1487 new_text: "\n\n".into(),
1488 },
1489 lsp::TextEdit {
1490 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1491 new_text: "
1492 fn f() {
1493 b();
1494 c();
1495 }"
1496 .unindent(),
1497 },
1498 // Delete everything after the first newline of the file.
1499 lsp::TextEdit {
1500 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1501 new_text: "".into(),
1502 },
1503 ],
1504 None,
1505 cx,
1506 )
1507 })
1508 .await
1509 .unwrap();
1510
1511 buffer.update(cx, |buffer, cx| {
1512 let edits = edits
1513 .into_iter()
1514 .map(|(range, text)| {
1515 (
1516 range.start.to_point(buffer)..range.end.to_point(buffer),
1517 text,
1518 )
1519 })
1520 .collect::<Vec<_>>();
1521
1522 assert_eq!(
1523 edits,
1524 [
1525 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1526 (Point::new(1, 0)..Point::new(2, 0), "".into())
1527 ]
1528 );
1529
1530 for (range, new_text) in edits {
1531 buffer.edit([(range, new_text)], None, cx);
1532 }
1533 assert_eq!(
1534 buffer.text(),
1535 "
1536 use a::{b, c};
1537
1538 fn f() {
1539 b();
1540 c();
1541 }
1542 "
1543 .unindent()
1544 );
1545 });
1546}
1547
1548#[gpui::test]
1549async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1550 cx.foreground().forbid_parking();
1551
1552 let text = "
1553 use a::b;
1554 use a::c;
1555
1556 fn f() {
1557 b();
1558 c();
1559 }
1560 "
1561 .unindent();
1562
1563 let fs = FakeFs::new(cx.background());
1564 fs.insert_tree(
1565 "/dir",
1566 json!({
1567 "a.rs": text.clone(),
1568 }),
1569 )
1570 .await;
1571
1572 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1573 let buffer = project
1574 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1575 .await
1576 .unwrap();
1577
1578 // Simulate the language server sending us edits in a non-ordered fashion,
1579 // with ranges sometimes being inverted or pointing to invalid locations.
1580 let edits = project
1581 .update(cx, |project, cx| {
1582 project.edits_from_lsp(
1583 &buffer,
1584 [
1585 lsp::TextEdit {
1586 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1587 new_text: "\n\n".into(),
1588 },
1589 lsp::TextEdit {
1590 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1591 new_text: "a::{b, c}".into(),
1592 },
1593 lsp::TextEdit {
1594 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1595 new_text: "".into(),
1596 },
1597 lsp::TextEdit {
1598 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1599 new_text: "
1600 fn f() {
1601 b();
1602 c();
1603 }"
1604 .unindent(),
1605 },
1606 ],
1607 None,
1608 cx,
1609 )
1610 })
1611 .await
1612 .unwrap();
1613
1614 buffer.update(cx, |buffer, cx| {
1615 let edits = edits
1616 .into_iter()
1617 .map(|(range, text)| {
1618 (
1619 range.start.to_point(buffer)..range.end.to_point(buffer),
1620 text,
1621 )
1622 })
1623 .collect::<Vec<_>>();
1624
1625 assert_eq!(
1626 edits,
1627 [
1628 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1629 (Point::new(1, 0)..Point::new(2, 0), "".into())
1630 ]
1631 );
1632
1633 for (range, new_text) in edits {
1634 buffer.edit([(range, new_text)], None, cx);
1635 }
1636 assert_eq!(
1637 buffer.text(),
1638 "
1639 use a::{b, c};
1640
1641 fn f() {
1642 b();
1643 c();
1644 }
1645 "
1646 .unindent()
1647 );
1648 });
1649}
1650
1651fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1652 buffer: &Buffer,
1653 range: Range<T>,
1654) -> Vec<(String, Option<DiagnosticSeverity>)> {
1655 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1656 for chunk in buffer.snapshot().chunks(range, true) {
1657 if chunks.last().map_or(false, |prev_chunk| {
1658 prev_chunk.1 == chunk.diagnostic_severity
1659 }) {
1660 chunks.last_mut().unwrap().0.push_str(chunk.text);
1661 } else {
1662 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1663 }
1664 }
1665 chunks
1666}
1667
1668#[gpui::test(iterations = 10)]
1669async fn test_definition(cx: &mut gpui::TestAppContext) {
1670 let mut language = Language::new(
1671 LanguageConfig {
1672 name: "Rust".into(),
1673 path_suffixes: vec!["rs".to_string()],
1674 ..Default::default()
1675 },
1676 Some(tree_sitter_rust::language()),
1677 );
1678 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1679
1680 let fs = FakeFs::new(cx.background());
1681 fs.insert_tree(
1682 "/dir",
1683 json!({
1684 "a.rs": "const fn a() { A }",
1685 "b.rs": "const y: i32 = crate::a()",
1686 }),
1687 )
1688 .await;
1689
1690 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1691 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1692
1693 let buffer = project
1694 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1695 .await
1696 .unwrap();
1697
1698 let fake_server = fake_servers.next().await.unwrap();
1699 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1700 let params = params.text_document_position_params;
1701 assert_eq!(
1702 params.text_document.uri.to_file_path().unwrap(),
1703 Path::new("/dir/b.rs"),
1704 );
1705 assert_eq!(params.position, lsp::Position::new(0, 22));
1706
1707 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1708 lsp::Location::new(
1709 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1710 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1711 ),
1712 )))
1713 });
1714
1715 let mut definitions = project
1716 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1717 .await
1718 .unwrap();
1719
1720 // Assert no new language server started
1721 cx.foreground().run_until_parked();
1722 assert!(fake_servers.try_next().is_err());
1723
1724 assert_eq!(definitions.len(), 1);
1725 let definition = definitions.pop().unwrap();
1726 cx.update(|cx| {
1727 let target_buffer = definition.target.buffer.read(cx);
1728 assert_eq!(
1729 target_buffer
1730 .file()
1731 .unwrap()
1732 .as_local()
1733 .unwrap()
1734 .abs_path(cx),
1735 Path::new("/dir/a.rs"),
1736 );
1737 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1738 assert_eq!(
1739 list_worktrees(&project, cx),
1740 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1741 );
1742
1743 drop(definition);
1744 });
1745 cx.read(|cx| {
1746 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1747 });
1748
1749 fn list_worktrees<'a>(
1750 project: &'a ModelHandle<Project>,
1751 cx: &'a AppContext,
1752 ) -> Vec<(&'a Path, bool)> {
1753 project
1754 .read(cx)
1755 .worktrees(cx)
1756 .map(|worktree| {
1757 let worktree = worktree.read(cx);
1758 (
1759 worktree.as_local().unwrap().abs_path().as_ref(),
1760 worktree.is_visible(),
1761 )
1762 })
1763 .collect::<Vec<_>>()
1764 }
1765}
1766
1767#[gpui::test]
1768async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1769 let mut language = Language::new(
1770 LanguageConfig {
1771 name: "TypeScript".into(),
1772 path_suffixes: vec!["ts".to_string()],
1773 ..Default::default()
1774 },
1775 Some(tree_sitter_typescript::language_typescript()),
1776 );
1777 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1778
1779 let fs = FakeFs::new(cx.background());
1780 fs.insert_tree(
1781 "/dir",
1782 json!({
1783 "a.ts": "",
1784 }),
1785 )
1786 .await;
1787
1788 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1789 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1790 let buffer = project
1791 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1792 .await
1793 .unwrap();
1794
1795 let fake_server = fake_language_servers.next().await.unwrap();
1796
1797 let text = "let a = b.fqn";
1798 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1799 let completions = project.update(cx, |project, cx| {
1800 project.completions(&buffer, text.len(), cx)
1801 });
1802
1803 fake_server
1804 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1805 Ok(Some(lsp::CompletionResponse::Array(vec![
1806 lsp::CompletionItem {
1807 label: "fullyQualifiedName?".into(),
1808 insert_text: Some("fullyQualifiedName".into()),
1809 ..Default::default()
1810 },
1811 ])))
1812 })
1813 .next()
1814 .await;
1815 let completions = completions.await.unwrap();
1816 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1817 assert_eq!(completions.len(), 1);
1818 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1819 assert_eq!(
1820 completions[0].old_range.to_offset(&snapshot),
1821 text.len() - 3..text.len()
1822 );
1823
1824 let text = "let a = \"atoms/cmp\"";
1825 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1826 let completions = project.update(cx, |project, cx| {
1827 project.completions(&buffer, text.len() - 1, cx)
1828 });
1829
1830 fake_server
1831 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1832 Ok(Some(lsp::CompletionResponse::Array(vec![
1833 lsp::CompletionItem {
1834 label: "component".into(),
1835 ..Default::default()
1836 },
1837 ])))
1838 })
1839 .next()
1840 .await;
1841 let completions = completions.await.unwrap();
1842 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1843 assert_eq!(completions.len(), 1);
1844 assert_eq!(completions[0].new_text, "component");
1845 assert_eq!(
1846 completions[0].old_range.to_offset(&snapshot),
1847 text.len() - 4..text.len() - 1
1848 );
1849}
1850
1851#[gpui::test]
1852async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1853 let mut language = Language::new(
1854 LanguageConfig {
1855 name: "TypeScript".into(),
1856 path_suffixes: vec!["ts".to_string()],
1857 ..Default::default()
1858 },
1859 Some(tree_sitter_typescript::language_typescript()),
1860 );
1861 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1862
1863 let fs = FakeFs::new(cx.background());
1864 fs.insert_tree(
1865 "/dir",
1866 json!({
1867 "a.ts": "",
1868 }),
1869 )
1870 .await;
1871
1872 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1873 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1874 let buffer = project
1875 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1876 .await
1877 .unwrap();
1878
1879 let fake_server = fake_language_servers.next().await.unwrap();
1880
1881 let text = "let a = b.fqn";
1882 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1883 let completions = project.update(cx, |project, cx| {
1884 project.completions(&buffer, text.len(), cx)
1885 });
1886
1887 fake_server
1888 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1889 Ok(Some(lsp::CompletionResponse::Array(vec![
1890 lsp::CompletionItem {
1891 label: "fullyQualifiedName?".into(),
1892 insert_text: Some("fully\rQualified\r\nName".into()),
1893 ..Default::default()
1894 },
1895 ])))
1896 })
1897 .next()
1898 .await;
1899 let completions = completions.await.unwrap();
1900 assert_eq!(completions.len(), 1);
1901 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1902}
1903
1904#[gpui::test(iterations = 10)]
1905async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1906 let mut language = Language::new(
1907 LanguageConfig {
1908 name: "TypeScript".into(),
1909 path_suffixes: vec!["ts".to_string()],
1910 ..Default::default()
1911 },
1912 None,
1913 );
1914 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1915
1916 let fs = FakeFs::new(cx.background());
1917 fs.insert_tree(
1918 "/dir",
1919 json!({
1920 "a.ts": "a",
1921 }),
1922 )
1923 .await;
1924
1925 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1926 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1927 let buffer = project
1928 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1929 .await
1930 .unwrap();
1931
1932 let fake_server = fake_language_servers.next().await.unwrap();
1933
1934 // Language server returns code actions that contain commands, and not edits.
1935 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1936 fake_server
1937 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1938 Ok(Some(vec![
1939 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1940 title: "The code action".into(),
1941 command: Some(lsp::Command {
1942 title: "The command".into(),
1943 command: "_the/command".into(),
1944 arguments: Some(vec![json!("the-argument")]),
1945 }),
1946 ..Default::default()
1947 }),
1948 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1949 title: "two".into(),
1950 ..Default::default()
1951 }),
1952 ]))
1953 })
1954 .next()
1955 .await;
1956
1957 let action = actions.await.unwrap()[0].clone();
1958 let apply = project.update(cx, |project, cx| {
1959 project.apply_code_action(buffer.clone(), action, true, cx)
1960 });
1961
1962 // Resolving the code action does not populate its edits. In absence of
1963 // edits, we must execute the given command.
1964 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1965 |action, _| async move { Ok(action) },
1966 );
1967
1968 // While executing the command, the language server sends the editor
1969 // a `workspaceEdit` request.
1970 fake_server
1971 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1972 let fake = fake_server.clone();
1973 move |params, _| {
1974 assert_eq!(params.command, "_the/command");
1975 let fake = fake.clone();
1976 async move {
1977 fake.server
1978 .request::<lsp::request::ApplyWorkspaceEdit>(
1979 lsp::ApplyWorkspaceEditParams {
1980 label: None,
1981 edit: lsp::WorkspaceEdit {
1982 changes: Some(
1983 [(
1984 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1985 vec![lsp::TextEdit {
1986 range: lsp::Range::new(
1987 lsp::Position::new(0, 0),
1988 lsp::Position::new(0, 0),
1989 ),
1990 new_text: "X".into(),
1991 }],
1992 )]
1993 .into_iter()
1994 .collect(),
1995 ),
1996 ..Default::default()
1997 },
1998 },
1999 )
2000 .await
2001 .unwrap();
2002 Ok(Some(json!(null)))
2003 }
2004 }
2005 })
2006 .next()
2007 .await;
2008
2009 // Applying the code action returns a project transaction containing the edits
2010 // sent by the language server in its `workspaceEdit` request.
2011 let transaction = apply.await.unwrap();
2012 assert!(transaction.0.contains_key(&buffer));
2013 buffer.update(cx, |buffer, cx| {
2014 assert_eq!(buffer.text(), "Xa");
2015 buffer.undo(cx);
2016 assert_eq!(buffer.text(), "a");
2017 });
2018}
2019
2020#[gpui::test]
2021async fn test_save_file(cx: &mut gpui::TestAppContext) {
2022 let fs = FakeFs::new(cx.background());
2023 fs.insert_tree(
2024 "/dir",
2025 json!({
2026 "file1": "the old contents",
2027 }),
2028 )
2029 .await;
2030
2031 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2032 let buffer = project
2033 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2034 .await
2035 .unwrap();
2036 buffer
2037 .update(cx, |buffer, cx| {
2038 assert_eq!(buffer.text(), "the old contents");
2039 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2040 buffer.save(cx)
2041 })
2042 .await
2043 .unwrap();
2044
2045 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2046 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2047}
2048
2049#[gpui::test]
2050async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2051 let fs = FakeFs::new(cx.background());
2052 fs.insert_tree(
2053 "/dir",
2054 json!({
2055 "file1": "the old contents",
2056 }),
2057 )
2058 .await;
2059
2060 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2061 let buffer = project
2062 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2063 .await
2064 .unwrap();
2065 buffer
2066 .update(cx, |buffer, cx| {
2067 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2068 buffer.save(cx)
2069 })
2070 .await
2071 .unwrap();
2072
2073 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2074 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2075}
2076
2077#[gpui::test]
2078async fn test_save_as(cx: &mut gpui::TestAppContext) {
2079 let fs = FakeFs::new(cx.background());
2080 fs.insert_tree("/dir", json!({})).await;
2081
2082 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2083 let buffer = project.update(cx, |project, cx| {
2084 project.create_buffer("", None, cx).unwrap()
2085 });
2086 buffer.update(cx, |buffer, cx| {
2087 buffer.edit([(0..0, "abc")], None, cx);
2088 assert!(buffer.is_dirty());
2089 assert!(!buffer.has_conflict());
2090 });
2091 project
2092 .update(cx, |project, cx| {
2093 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2094 })
2095 .await
2096 .unwrap();
2097 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2098 buffer.read_with(cx, |buffer, cx| {
2099 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2100 assert!(!buffer.is_dirty());
2101 assert!(!buffer.has_conflict());
2102 });
2103
2104 let opened_buffer = project
2105 .update(cx, |project, cx| {
2106 project.open_local_buffer("/dir/file1", cx)
2107 })
2108 .await
2109 .unwrap();
2110 assert_eq!(opened_buffer, buffer);
2111}
2112
2113#[gpui::test(retries = 5)]
2114async fn test_rescan_and_remote_updates(
2115 deterministic: Arc<Deterministic>,
2116 cx: &mut gpui::TestAppContext,
2117) {
2118 let dir = temp_tree(json!({
2119 "a": {
2120 "file1": "",
2121 "file2": "",
2122 "file3": "",
2123 },
2124 "b": {
2125 "c": {
2126 "file4": "",
2127 "file5": "",
2128 }
2129 }
2130 }));
2131
2132 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2133 let rpc = project.read_with(cx, |p, _| p.client.clone());
2134
2135 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2136 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2137 async move { buffer.await.unwrap() }
2138 };
2139 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2140 project.read_with(cx, |project, cx| {
2141 let tree = project.worktrees(cx).next().unwrap();
2142 tree.read(cx)
2143 .entry_for_path(path)
2144 .unwrap_or_else(|| panic!("no entry for path {}", path))
2145 .id
2146 })
2147 };
2148
2149 let buffer2 = buffer_for_path("a/file2", cx).await;
2150 let buffer3 = buffer_for_path("a/file3", cx).await;
2151 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2152 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2153
2154 let file2_id = id_for_path("a/file2", cx);
2155 let file3_id = id_for_path("a/file3", cx);
2156 let file4_id = id_for_path("b/c/file4", cx);
2157
2158 // Create a remote copy of this worktree.
2159 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2160 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2161 let remote = cx.update(|cx| {
2162 Worktree::remote(
2163 1,
2164 1,
2165 proto::WorktreeMetadata {
2166 id: initial_snapshot.id().to_proto(),
2167 root_name: initial_snapshot.root_name().into(),
2168 visible: true,
2169 },
2170 rpc.clone(),
2171 cx,
2172 )
2173 });
2174 remote.update(cx, |remote, _| {
2175 let update = initial_snapshot.build_initial_update(1);
2176 remote.as_remote_mut().unwrap().update_from_remote(update);
2177 });
2178 deterministic.run_until_parked();
2179
2180 cx.read(|cx| {
2181 assert!(!buffer2.read(cx).is_dirty());
2182 assert!(!buffer3.read(cx).is_dirty());
2183 assert!(!buffer4.read(cx).is_dirty());
2184 assert!(!buffer5.read(cx).is_dirty());
2185 });
2186
2187 // Rename and delete files and directories.
2188 tree.flush_fs_events(cx).await;
2189 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2190 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2191 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2192 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2193 tree.flush_fs_events(cx).await;
2194
2195 let expected_paths = vec![
2196 "a",
2197 "a/file1",
2198 "a/file2.new",
2199 "b",
2200 "d",
2201 "d/file3",
2202 "d/file4",
2203 ];
2204
2205 cx.read(|app| {
2206 assert_eq!(
2207 tree.read(app)
2208 .paths()
2209 .map(|p| p.to_str().unwrap())
2210 .collect::<Vec<_>>(),
2211 expected_paths
2212 );
2213
2214 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2215 assert_eq!(id_for_path("d/file3", cx), file3_id);
2216 assert_eq!(id_for_path("d/file4", cx), file4_id);
2217
2218 assert_eq!(
2219 buffer2.read(app).file().unwrap().path().as_ref(),
2220 Path::new("a/file2.new")
2221 );
2222 assert_eq!(
2223 buffer3.read(app).file().unwrap().path().as_ref(),
2224 Path::new("d/file3")
2225 );
2226 assert_eq!(
2227 buffer4.read(app).file().unwrap().path().as_ref(),
2228 Path::new("d/file4")
2229 );
2230 assert_eq!(
2231 buffer5.read(app).file().unwrap().path().as_ref(),
2232 Path::new("b/c/file5")
2233 );
2234
2235 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2236 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2237 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2238 assert!(buffer5.read(app).file().unwrap().is_deleted());
2239 });
2240
2241 // Update the remote worktree. Check that it becomes consistent with the
2242 // local worktree.
2243 remote.update(cx, |remote, cx| {
2244 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2245 &initial_snapshot,
2246 1,
2247 1,
2248 true,
2249 );
2250 remote.as_remote_mut().unwrap().update_from_remote(update);
2251 });
2252 deterministic.run_until_parked();
2253 remote.read_with(cx, |remote, _| {
2254 assert_eq!(
2255 remote
2256 .paths()
2257 .map(|p| p.to_str().unwrap())
2258 .collect::<Vec<_>>(),
2259 expected_paths
2260 );
2261 });
2262}
2263
2264#[gpui::test]
2265async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2266 let fs = FakeFs::new(cx.background());
2267 fs.insert_tree(
2268 "/dir",
2269 json!({
2270 "a.txt": "a-contents",
2271 "b.txt": "b-contents",
2272 }),
2273 )
2274 .await;
2275
2276 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2277
2278 // Spawn multiple tasks to open paths, repeating some paths.
2279 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2280 (
2281 p.open_local_buffer("/dir/a.txt", cx),
2282 p.open_local_buffer("/dir/b.txt", cx),
2283 p.open_local_buffer("/dir/a.txt", cx),
2284 )
2285 });
2286
2287 let buffer_a_1 = buffer_a_1.await.unwrap();
2288 let buffer_a_2 = buffer_a_2.await.unwrap();
2289 let buffer_b = buffer_b.await.unwrap();
2290 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2291 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2292
2293 // There is only one buffer per path.
2294 let buffer_a_id = buffer_a_1.id();
2295 assert_eq!(buffer_a_2.id(), buffer_a_id);
2296
2297 // Open the same path again while it is still open.
2298 drop(buffer_a_1);
2299 let buffer_a_3 = project
2300 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2301 .await
2302 .unwrap();
2303
2304 // There's still only one buffer per path.
2305 assert_eq!(buffer_a_3.id(), buffer_a_id);
2306}
2307
2308#[gpui::test]
2309async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2310 let fs = FakeFs::new(cx.background());
2311 fs.insert_tree(
2312 "/dir",
2313 json!({
2314 "file1": "abc",
2315 "file2": "def",
2316 "file3": "ghi",
2317 }),
2318 )
2319 .await;
2320
2321 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2322
2323 let buffer1 = project
2324 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2325 .await
2326 .unwrap();
2327 let events = Rc::new(RefCell::new(Vec::new()));
2328
2329 // initially, the buffer isn't dirty.
2330 buffer1.update(cx, |buffer, cx| {
2331 cx.subscribe(&buffer1, {
2332 let events = events.clone();
2333 move |_, _, event, _| match event {
2334 BufferEvent::Operation(_) => {}
2335 _ => events.borrow_mut().push(event.clone()),
2336 }
2337 })
2338 .detach();
2339
2340 assert!(!buffer.is_dirty());
2341 assert!(events.borrow().is_empty());
2342
2343 buffer.edit([(1..2, "")], None, cx);
2344 });
2345
2346 // after the first edit, the buffer is dirty, and emits a dirtied event.
2347 buffer1.update(cx, |buffer, cx| {
2348 assert!(buffer.text() == "ac");
2349 assert!(buffer.is_dirty());
2350 assert_eq!(
2351 *events.borrow(),
2352 &[language::Event::Edited, language::Event::DirtyChanged]
2353 );
2354 events.borrow_mut().clear();
2355 buffer.did_save(
2356 buffer.version(),
2357 buffer.as_rope().fingerprint(),
2358 buffer.file().unwrap().mtime(),
2359 None,
2360 cx,
2361 );
2362 });
2363
2364 // after saving, the buffer is not dirty, and emits a saved event.
2365 buffer1.update(cx, |buffer, cx| {
2366 assert!(!buffer.is_dirty());
2367 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2368 events.borrow_mut().clear();
2369
2370 buffer.edit([(1..1, "B")], None, cx);
2371 buffer.edit([(2..2, "D")], None, cx);
2372 });
2373
2374 // after editing again, the buffer is dirty, and emits another dirty event.
2375 buffer1.update(cx, |buffer, cx| {
2376 assert!(buffer.text() == "aBDc");
2377 assert!(buffer.is_dirty());
2378 assert_eq!(
2379 *events.borrow(),
2380 &[
2381 language::Event::Edited,
2382 language::Event::DirtyChanged,
2383 language::Event::Edited,
2384 ],
2385 );
2386 events.borrow_mut().clear();
2387
2388 // After restoring the buffer to its previously-saved state,
2389 // the buffer is not considered dirty anymore.
2390 buffer.edit([(1..3, "")], None, cx);
2391 assert!(buffer.text() == "ac");
2392 assert!(!buffer.is_dirty());
2393 });
2394
2395 assert_eq!(
2396 *events.borrow(),
2397 &[language::Event::Edited, language::Event::DirtyChanged]
2398 );
2399
2400 // When a file is deleted, the buffer is considered dirty.
2401 let events = Rc::new(RefCell::new(Vec::new()));
2402 let buffer2 = project
2403 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2404 .await
2405 .unwrap();
2406 buffer2.update(cx, |_, cx| {
2407 cx.subscribe(&buffer2, {
2408 let events = events.clone();
2409 move |_, _, event, _| events.borrow_mut().push(event.clone())
2410 })
2411 .detach();
2412 });
2413
2414 fs.remove_file("/dir/file2".as_ref(), Default::default())
2415 .await
2416 .unwrap();
2417 cx.foreground().run_until_parked();
2418 assert_eq!(
2419 *events.borrow(),
2420 &[
2421 language::Event::DirtyChanged,
2422 language::Event::FileHandleChanged
2423 ]
2424 );
2425
2426 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2427 let events = Rc::new(RefCell::new(Vec::new()));
2428 let buffer3 = project
2429 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2430 .await
2431 .unwrap();
2432 buffer3.update(cx, |_, cx| {
2433 cx.subscribe(&buffer3, {
2434 let events = events.clone();
2435 move |_, _, event, _| events.borrow_mut().push(event.clone())
2436 })
2437 .detach();
2438 });
2439
2440 buffer3.update(cx, |buffer, cx| {
2441 buffer.edit([(0..0, "x")], None, cx);
2442 });
2443 events.borrow_mut().clear();
2444 fs.remove_file("/dir/file3".as_ref(), Default::default())
2445 .await
2446 .unwrap();
2447 cx.foreground().run_until_parked();
2448 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2449 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2450}
2451
2452#[gpui::test]
2453async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2454 let initial_contents = "aaa\nbbbbb\nc\n";
2455 let fs = FakeFs::new(cx.background());
2456 fs.insert_tree(
2457 "/dir",
2458 json!({
2459 "the-file": initial_contents,
2460 }),
2461 )
2462 .await;
2463 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2464 let buffer = project
2465 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2466 .await
2467 .unwrap();
2468
2469 let anchors = (0..3)
2470 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2471 .collect::<Vec<_>>();
2472
2473 // Change the file on disk, adding two new lines of text, and removing
2474 // one line.
2475 buffer.read_with(cx, |buffer, _| {
2476 assert!(!buffer.is_dirty());
2477 assert!(!buffer.has_conflict());
2478 });
2479 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2480 fs.save(
2481 "/dir/the-file".as_ref(),
2482 &new_contents.into(),
2483 LineEnding::Unix,
2484 )
2485 .await
2486 .unwrap();
2487
2488 // Because the buffer was not modified, it is reloaded from disk. Its
2489 // contents are edited according to the diff between the old and new
2490 // file contents.
2491 cx.foreground().run_until_parked();
2492 buffer.update(cx, |buffer, _| {
2493 assert_eq!(buffer.text(), new_contents);
2494 assert!(!buffer.is_dirty());
2495 assert!(!buffer.has_conflict());
2496
2497 let anchor_positions = anchors
2498 .iter()
2499 .map(|anchor| anchor.to_point(&*buffer))
2500 .collect::<Vec<_>>();
2501 assert_eq!(
2502 anchor_positions,
2503 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2504 );
2505 });
2506
2507 // Modify the buffer
2508 buffer.update(cx, |buffer, cx| {
2509 buffer.edit([(0..0, " ")], None, cx);
2510 assert!(buffer.is_dirty());
2511 assert!(!buffer.has_conflict());
2512 });
2513
2514 // Change the file on disk again, adding blank lines to the beginning.
2515 fs.save(
2516 "/dir/the-file".as_ref(),
2517 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2518 LineEnding::Unix,
2519 )
2520 .await
2521 .unwrap();
2522
2523 // Because the buffer is modified, it doesn't reload from disk, but is
2524 // marked as having a conflict.
2525 cx.foreground().run_until_parked();
2526 buffer.read_with(cx, |buffer, _| {
2527 assert!(buffer.has_conflict());
2528 });
2529}
2530
2531#[gpui::test]
2532async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2533 let fs = FakeFs::new(cx.background());
2534 fs.insert_tree(
2535 "/dir",
2536 json!({
2537 "file1": "a\nb\nc\n",
2538 "file2": "one\r\ntwo\r\nthree\r\n",
2539 }),
2540 )
2541 .await;
2542
2543 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2544 let buffer1 = project
2545 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2546 .await
2547 .unwrap();
2548 let buffer2 = project
2549 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2550 .await
2551 .unwrap();
2552
2553 buffer1.read_with(cx, |buffer, _| {
2554 assert_eq!(buffer.text(), "a\nb\nc\n");
2555 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2556 });
2557 buffer2.read_with(cx, |buffer, _| {
2558 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2559 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2560 });
2561
2562 // Change a file's line endings on disk from unix to windows. The buffer's
2563 // state updates correctly.
2564 fs.save(
2565 "/dir/file1".as_ref(),
2566 &"aaa\nb\nc\n".into(),
2567 LineEnding::Windows,
2568 )
2569 .await
2570 .unwrap();
2571 cx.foreground().run_until_parked();
2572 buffer1.read_with(cx, |buffer, _| {
2573 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2574 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2575 });
2576
2577 // Save a file with windows line endings. The file is written correctly.
2578 buffer2
2579 .update(cx, |buffer, cx| {
2580 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2581 buffer.save(cx)
2582 })
2583 .await
2584 .unwrap();
2585 assert_eq!(
2586 fs.load("/dir/file2".as_ref()).await.unwrap(),
2587 "one\r\ntwo\r\nthree\r\nfour\r\n",
2588 );
2589}
2590
2591#[gpui::test]
2592async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2593 cx.foreground().forbid_parking();
2594
2595 let fs = FakeFs::new(cx.background());
2596 fs.insert_tree(
2597 "/the-dir",
2598 json!({
2599 "a.rs": "
2600 fn foo(mut v: Vec<usize>) {
2601 for x in &v {
2602 v.push(1);
2603 }
2604 }
2605 "
2606 .unindent(),
2607 }),
2608 )
2609 .await;
2610
2611 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2612 let buffer = project
2613 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2614 .await
2615 .unwrap();
2616
2617 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2618 let message = lsp::PublishDiagnosticsParams {
2619 uri: buffer_uri.clone(),
2620 diagnostics: vec![
2621 lsp::Diagnostic {
2622 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2623 severity: Some(DiagnosticSeverity::WARNING),
2624 message: "error 1".to_string(),
2625 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2626 location: lsp::Location {
2627 uri: buffer_uri.clone(),
2628 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2629 },
2630 message: "error 1 hint 1".to_string(),
2631 }]),
2632 ..Default::default()
2633 },
2634 lsp::Diagnostic {
2635 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2636 severity: Some(DiagnosticSeverity::HINT),
2637 message: "error 1 hint 1".to_string(),
2638 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2639 location: lsp::Location {
2640 uri: buffer_uri.clone(),
2641 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2642 },
2643 message: "original diagnostic".to_string(),
2644 }]),
2645 ..Default::default()
2646 },
2647 lsp::Diagnostic {
2648 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2649 severity: Some(DiagnosticSeverity::ERROR),
2650 message: "error 2".to_string(),
2651 related_information: Some(vec![
2652 lsp::DiagnosticRelatedInformation {
2653 location: lsp::Location {
2654 uri: buffer_uri.clone(),
2655 range: lsp::Range::new(
2656 lsp::Position::new(1, 13),
2657 lsp::Position::new(1, 15),
2658 ),
2659 },
2660 message: "error 2 hint 1".to_string(),
2661 },
2662 lsp::DiagnosticRelatedInformation {
2663 location: lsp::Location {
2664 uri: buffer_uri.clone(),
2665 range: lsp::Range::new(
2666 lsp::Position::new(1, 13),
2667 lsp::Position::new(1, 15),
2668 ),
2669 },
2670 message: "error 2 hint 2".to_string(),
2671 },
2672 ]),
2673 ..Default::default()
2674 },
2675 lsp::Diagnostic {
2676 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2677 severity: Some(DiagnosticSeverity::HINT),
2678 message: "error 2 hint 1".to_string(),
2679 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2680 location: lsp::Location {
2681 uri: buffer_uri.clone(),
2682 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2683 },
2684 message: "original diagnostic".to_string(),
2685 }]),
2686 ..Default::default()
2687 },
2688 lsp::Diagnostic {
2689 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2690 severity: Some(DiagnosticSeverity::HINT),
2691 message: "error 2 hint 2".to_string(),
2692 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2693 location: lsp::Location {
2694 uri: buffer_uri,
2695 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2696 },
2697 message: "original diagnostic".to_string(),
2698 }]),
2699 ..Default::default()
2700 },
2701 ],
2702 version: None,
2703 };
2704
2705 project
2706 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2707 .unwrap();
2708 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2709
2710 assert_eq!(
2711 buffer
2712 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2713 .collect::<Vec<_>>(),
2714 &[
2715 DiagnosticEntry {
2716 range: Point::new(1, 8)..Point::new(1, 9),
2717 diagnostic: Diagnostic {
2718 severity: DiagnosticSeverity::WARNING,
2719 message: "error 1".to_string(),
2720 group_id: 0,
2721 is_primary: true,
2722 ..Default::default()
2723 }
2724 },
2725 DiagnosticEntry {
2726 range: Point::new(1, 8)..Point::new(1, 9),
2727 diagnostic: Diagnostic {
2728 severity: DiagnosticSeverity::HINT,
2729 message: "error 1 hint 1".to_string(),
2730 group_id: 0,
2731 is_primary: false,
2732 ..Default::default()
2733 }
2734 },
2735 DiagnosticEntry {
2736 range: Point::new(1, 13)..Point::new(1, 15),
2737 diagnostic: Diagnostic {
2738 severity: DiagnosticSeverity::HINT,
2739 message: "error 2 hint 1".to_string(),
2740 group_id: 1,
2741 is_primary: false,
2742 ..Default::default()
2743 }
2744 },
2745 DiagnosticEntry {
2746 range: Point::new(1, 13)..Point::new(1, 15),
2747 diagnostic: Diagnostic {
2748 severity: DiagnosticSeverity::HINT,
2749 message: "error 2 hint 2".to_string(),
2750 group_id: 1,
2751 is_primary: false,
2752 ..Default::default()
2753 }
2754 },
2755 DiagnosticEntry {
2756 range: Point::new(2, 8)..Point::new(2, 17),
2757 diagnostic: Diagnostic {
2758 severity: DiagnosticSeverity::ERROR,
2759 message: "error 2".to_string(),
2760 group_id: 1,
2761 is_primary: true,
2762 ..Default::default()
2763 }
2764 }
2765 ]
2766 );
2767
2768 assert_eq!(
2769 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2770 &[
2771 DiagnosticEntry {
2772 range: Point::new(1, 8)..Point::new(1, 9),
2773 diagnostic: Diagnostic {
2774 severity: DiagnosticSeverity::WARNING,
2775 message: "error 1".to_string(),
2776 group_id: 0,
2777 is_primary: true,
2778 ..Default::default()
2779 }
2780 },
2781 DiagnosticEntry {
2782 range: Point::new(1, 8)..Point::new(1, 9),
2783 diagnostic: Diagnostic {
2784 severity: DiagnosticSeverity::HINT,
2785 message: "error 1 hint 1".to_string(),
2786 group_id: 0,
2787 is_primary: false,
2788 ..Default::default()
2789 }
2790 },
2791 ]
2792 );
2793 assert_eq!(
2794 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2795 &[
2796 DiagnosticEntry {
2797 range: Point::new(1, 13)..Point::new(1, 15),
2798 diagnostic: Diagnostic {
2799 severity: DiagnosticSeverity::HINT,
2800 message: "error 2 hint 1".to_string(),
2801 group_id: 1,
2802 is_primary: false,
2803 ..Default::default()
2804 }
2805 },
2806 DiagnosticEntry {
2807 range: Point::new(1, 13)..Point::new(1, 15),
2808 diagnostic: Diagnostic {
2809 severity: DiagnosticSeverity::HINT,
2810 message: "error 2 hint 2".to_string(),
2811 group_id: 1,
2812 is_primary: false,
2813 ..Default::default()
2814 }
2815 },
2816 DiagnosticEntry {
2817 range: Point::new(2, 8)..Point::new(2, 17),
2818 diagnostic: Diagnostic {
2819 severity: DiagnosticSeverity::ERROR,
2820 message: "error 2".to_string(),
2821 group_id: 1,
2822 is_primary: true,
2823 ..Default::default()
2824 }
2825 }
2826 ]
2827 );
2828}
2829
2830#[gpui::test]
2831async fn test_rename(cx: &mut gpui::TestAppContext) {
2832 cx.foreground().forbid_parking();
2833
2834 let mut language = Language::new(
2835 LanguageConfig {
2836 name: "Rust".into(),
2837 path_suffixes: vec!["rs".to_string()],
2838 ..Default::default()
2839 },
2840 Some(tree_sitter_rust::language()),
2841 );
2842 let mut fake_servers = language
2843 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2844 capabilities: lsp::ServerCapabilities {
2845 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2846 prepare_provider: Some(true),
2847 work_done_progress_options: Default::default(),
2848 })),
2849 ..Default::default()
2850 },
2851 ..Default::default()
2852 }))
2853 .await;
2854
2855 let fs = FakeFs::new(cx.background());
2856 fs.insert_tree(
2857 "/dir",
2858 json!({
2859 "one.rs": "const ONE: usize = 1;",
2860 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2861 }),
2862 )
2863 .await;
2864
2865 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2866 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2867 let buffer = project
2868 .update(cx, |project, cx| {
2869 project.open_local_buffer("/dir/one.rs", cx)
2870 })
2871 .await
2872 .unwrap();
2873
2874 let fake_server = fake_servers.next().await.unwrap();
2875
2876 let response = project.update(cx, |project, cx| {
2877 project.prepare_rename(buffer.clone(), 7, cx)
2878 });
2879 fake_server
2880 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2881 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2882 assert_eq!(params.position, lsp::Position::new(0, 7));
2883 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2884 lsp::Position::new(0, 6),
2885 lsp::Position::new(0, 9),
2886 ))))
2887 })
2888 .next()
2889 .await
2890 .unwrap();
2891 let range = response.await.unwrap().unwrap();
2892 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2893 assert_eq!(range, 6..9);
2894
2895 let response = project.update(cx, |project, cx| {
2896 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2897 });
2898 fake_server
2899 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2900 assert_eq!(
2901 params.text_document_position.text_document.uri.as_str(),
2902 "file:///dir/one.rs"
2903 );
2904 assert_eq!(
2905 params.text_document_position.position,
2906 lsp::Position::new(0, 7)
2907 );
2908 assert_eq!(params.new_name, "THREE");
2909 Ok(Some(lsp::WorkspaceEdit {
2910 changes: Some(
2911 [
2912 (
2913 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2914 vec![lsp::TextEdit::new(
2915 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2916 "THREE".to_string(),
2917 )],
2918 ),
2919 (
2920 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2921 vec![
2922 lsp::TextEdit::new(
2923 lsp::Range::new(
2924 lsp::Position::new(0, 24),
2925 lsp::Position::new(0, 27),
2926 ),
2927 "THREE".to_string(),
2928 ),
2929 lsp::TextEdit::new(
2930 lsp::Range::new(
2931 lsp::Position::new(0, 35),
2932 lsp::Position::new(0, 38),
2933 ),
2934 "THREE".to_string(),
2935 ),
2936 ],
2937 ),
2938 ]
2939 .into_iter()
2940 .collect(),
2941 ),
2942 ..Default::default()
2943 }))
2944 })
2945 .next()
2946 .await
2947 .unwrap();
2948 let mut transaction = response.await.unwrap().0;
2949 assert_eq!(transaction.len(), 2);
2950 assert_eq!(
2951 transaction
2952 .remove_entry(&buffer)
2953 .unwrap()
2954 .0
2955 .read_with(cx, |buffer, _| buffer.text()),
2956 "const THREE: usize = 1;"
2957 );
2958 assert_eq!(
2959 transaction
2960 .into_keys()
2961 .next()
2962 .unwrap()
2963 .read_with(cx, |buffer, _| buffer.text()),
2964 "const TWO: usize = one::THREE + one::THREE;"
2965 );
2966}
2967
2968#[gpui::test]
2969async fn test_search(cx: &mut gpui::TestAppContext) {
2970 let fs = FakeFs::new(cx.background());
2971 fs.insert_tree(
2972 "/dir",
2973 json!({
2974 "one.rs": "const ONE: usize = 1;",
2975 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
2976 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
2977 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
2978 }),
2979 )
2980 .await;
2981 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2982 assert_eq!(
2983 search(&project, SearchQuery::text("TWO", false, true), cx)
2984 .await
2985 .unwrap(),
2986 HashMap::from_iter([
2987 ("two.rs".to_string(), vec![6..9]),
2988 ("three.rs".to_string(), vec![37..40])
2989 ])
2990 );
2991
2992 let buffer_4 = project
2993 .update(cx, |project, cx| {
2994 project.open_local_buffer("/dir/four.rs", cx)
2995 })
2996 .await
2997 .unwrap();
2998 buffer_4.update(cx, |buffer, cx| {
2999 let text = "two::TWO";
3000 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3001 });
3002
3003 assert_eq!(
3004 search(&project, SearchQuery::text("TWO", false, true), cx)
3005 .await
3006 .unwrap(),
3007 HashMap::from_iter([
3008 ("two.rs".to_string(), vec![6..9]),
3009 ("three.rs".to_string(), vec![37..40]),
3010 ("four.rs".to_string(), vec![25..28, 36..39])
3011 ])
3012 );
3013
3014 async fn search(
3015 project: &ModelHandle<Project>,
3016 query: SearchQuery,
3017 cx: &mut gpui::TestAppContext,
3018 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3019 let results = project
3020 .update(cx, |project, cx| project.search(query, cx))
3021 .await?;
3022
3023 Ok(results
3024 .into_iter()
3025 .map(|(buffer, ranges)| {
3026 buffer.read_with(cx, |buffer, _| {
3027 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3028 let ranges = ranges
3029 .into_iter()
3030 .map(|range| range.to_offset(buffer))
3031 .collect::<Vec<_>>();
3032 (path, ranges)
3033 })
3034 })
3035 .collect())
3036 }
3037}