1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_symlinks(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41 project.read_with(cx, |project, cx| {
42 let tree = project.worktrees(cx).next().unwrap().read(cx);
43 assert_eq!(tree.file_count(), 5);
44 assert_eq!(
45 tree.inode_for_path("fennel/grape"),
46 tree.inode_for_path("finnochio/grape")
47 );
48 });
49}
50
51#[gpui::test]
52async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
53 cx.foreground().forbid_parking();
54
55 let mut rust_language = Language::new(
56 LanguageConfig {
57 name: "Rust".into(),
58 path_suffixes: vec!["rs".to_string()],
59 ..Default::default()
60 },
61 Some(tree_sitter_rust::language()),
62 );
63 let mut json_language = Language::new(
64 LanguageConfig {
65 name: "JSON".into(),
66 path_suffixes: vec!["json".to_string()],
67 ..Default::default()
68 },
69 None,
70 );
71 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
72 name: "the-rust-language-server",
73 capabilities: lsp::ServerCapabilities {
74 completion_provider: Some(lsp::CompletionOptions {
75 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
76 ..Default::default()
77 }),
78 ..Default::default()
79 },
80 ..Default::default()
81 });
82 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
83 name: "the-json-language-server",
84 capabilities: lsp::ServerCapabilities {
85 completion_provider: Some(lsp::CompletionOptions {
86 trigger_characters: Some(vec![":".to_string()]),
87 ..Default::default()
88 }),
89 ..Default::default()
90 },
91 ..Default::default()
92 });
93
94 let fs = FakeFs::new(cx.background());
95 fs.insert_tree(
96 "/the-root",
97 json!({
98 "test.rs": "const A: i32 = 1;",
99 "test2.rs": "",
100 "Cargo.toml": "a = 1",
101 "package.json": "{\"a\": 1}",
102 }),
103 )
104 .await;
105
106 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
107 project.update(cx, |project, _| {
108 project.languages.add(Arc::new(rust_language));
109 project.languages.add(Arc::new(json_language));
110 });
111
112 // Open a buffer without an associated language server.
113 let toml_buffer = project
114 .update(cx, |project, cx| {
115 project.open_local_buffer("/the-root/Cargo.toml", cx)
116 })
117 .await
118 .unwrap();
119
120 // Open a buffer with an associated language server.
121 let rust_buffer = project
122 .update(cx, |project, cx| {
123 project.open_local_buffer("/the-root/test.rs", cx)
124 })
125 .await
126 .unwrap();
127
128 // A server is started up, and it is notified about Rust files.
129 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
130 assert_eq!(
131 fake_rust_server
132 .receive_notification::<lsp::notification::DidOpenTextDocument>()
133 .await
134 .text_document,
135 lsp::TextDocumentItem {
136 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
137 version: 0,
138 text: "const A: i32 = 1;".to_string(),
139 language_id: Default::default()
140 }
141 );
142
143 // The buffer is configured based on the language server's capabilities.
144 rust_buffer.read_with(cx, |buffer, _| {
145 assert_eq!(
146 buffer.completion_triggers(),
147 &[".".to_string(), "::".to_string()]
148 );
149 });
150 toml_buffer.read_with(cx, |buffer, _| {
151 assert!(buffer.completion_triggers().is_empty());
152 });
153
154 // Edit a buffer. The changes are reported to the language server.
155 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
156 assert_eq!(
157 fake_rust_server
158 .receive_notification::<lsp::notification::DidChangeTextDocument>()
159 .await
160 .text_document,
161 lsp::VersionedTextDocumentIdentifier::new(
162 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
163 1
164 )
165 );
166
167 // Open a third buffer with a different associated language server.
168 let json_buffer = project
169 .update(cx, |project, cx| {
170 project.open_local_buffer("/the-root/package.json", cx)
171 })
172 .await
173 .unwrap();
174
175 // A json language server is started up and is only notified about the json buffer.
176 let mut fake_json_server = fake_json_servers.next().await.unwrap();
177 assert_eq!(
178 fake_json_server
179 .receive_notification::<lsp::notification::DidOpenTextDocument>()
180 .await
181 .text_document,
182 lsp::TextDocumentItem {
183 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
184 version: 0,
185 text: "{\"a\": 1}".to_string(),
186 language_id: Default::default()
187 }
188 );
189
190 // This buffer is configured based on the second language server's
191 // capabilities.
192 json_buffer.read_with(cx, |buffer, _| {
193 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
194 });
195
196 // When opening another buffer whose language server is already running,
197 // it is also configured based on the existing language server's capabilities.
198 let rust_buffer2 = project
199 .update(cx, |project, cx| {
200 project.open_local_buffer("/the-root/test2.rs", cx)
201 })
202 .await
203 .unwrap();
204 rust_buffer2.read_with(cx, |buffer, _| {
205 assert_eq!(
206 buffer.completion_triggers(),
207 &[".".to_string(), "::".to_string()]
208 );
209 });
210
211 // Changes are reported only to servers matching the buffer's language.
212 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
213 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
214 assert_eq!(
215 fake_rust_server
216 .receive_notification::<lsp::notification::DidChangeTextDocument>()
217 .await
218 .text_document,
219 lsp::VersionedTextDocumentIdentifier::new(
220 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
221 1
222 )
223 );
224
225 // Save notifications are reported to all servers.
226 toml_buffer
227 .update(cx, |buffer, cx| buffer.save(cx))
228 .await
229 .unwrap();
230 assert_eq!(
231 fake_rust_server
232 .receive_notification::<lsp::notification::DidSaveTextDocument>()
233 .await
234 .text_document,
235 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
236 );
237 assert_eq!(
238 fake_json_server
239 .receive_notification::<lsp::notification::DidSaveTextDocument>()
240 .await
241 .text_document,
242 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
243 );
244
245 // Renames are reported only to servers matching the buffer's language.
246 fs.rename(
247 Path::new("/the-root/test2.rs"),
248 Path::new("/the-root/test3.rs"),
249 Default::default(),
250 )
251 .await
252 .unwrap();
253 assert_eq!(
254 fake_rust_server
255 .receive_notification::<lsp::notification::DidCloseTextDocument>()
256 .await
257 .text_document,
258 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
259 );
260 assert_eq!(
261 fake_rust_server
262 .receive_notification::<lsp::notification::DidOpenTextDocument>()
263 .await
264 .text_document,
265 lsp::TextDocumentItem {
266 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
267 version: 0,
268 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
269 language_id: Default::default()
270 },
271 );
272
273 rust_buffer2.update(cx, |buffer, cx| {
274 buffer.update_diagnostics(
275 DiagnosticSet::from_sorted_entries(
276 vec![DiagnosticEntry {
277 diagnostic: Default::default(),
278 range: Anchor::MIN..Anchor::MAX,
279 }],
280 &buffer.snapshot(),
281 ),
282 cx,
283 );
284 assert_eq!(
285 buffer
286 .snapshot()
287 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
288 .count(),
289 1
290 );
291 });
292
293 // When the rename changes the extension of the file, the buffer gets closed on the old
294 // language server and gets opened on the new one.
295 fs.rename(
296 Path::new("/the-root/test3.rs"),
297 Path::new("/the-root/test3.json"),
298 Default::default(),
299 )
300 .await
301 .unwrap();
302 assert_eq!(
303 fake_rust_server
304 .receive_notification::<lsp::notification::DidCloseTextDocument>()
305 .await
306 .text_document,
307 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
308 );
309 assert_eq!(
310 fake_json_server
311 .receive_notification::<lsp::notification::DidOpenTextDocument>()
312 .await
313 .text_document,
314 lsp::TextDocumentItem {
315 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
316 version: 0,
317 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
318 language_id: Default::default()
319 },
320 );
321
322 // We clear the diagnostics, since the language has changed.
323 rust_buffer2.read_with(cx, |buffer, _| {
324 assert_eq!(
325 buffer
326 .snapshot()
327 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
328 .count(),
329 0
330 );
331 });
332
333 // The renamed file's version resets after changing language server.
334 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
335 assert_eq!(
336 fake_json_server
337 .receive_notification::<lsp::notification::DidChangeTextDocument>()
338 .await
339 .text_document,
340 lsp::VersionedTextDocumentIdentifier::new(
341 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
342 1
343 )
344 );
345
346 // Restart language servers
347 project.update(cx, |project, cx| {
348 project.restart_language_servers_for_buffers(
349 vec![rust_buffer.clone(), json_buffer.clone()],
350 cx,
351 );
352 });
353
354 let mut rust_shutdown_requests = fake_rust_server
355 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
356 let mut json_shutdown_requests = fake_json_server
357 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
358 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
359
360 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
361 let mut fake_json_server = fake_json_servers.next().await.unwrap();
362
363 // Ensure rust document is reopened in new rust language server
364 assert_eq!(
365 fake_rust_server
366 .receive_notification::<lsp::notification::DidOpenTextDocument>()
367 .await
368 .text_document,
369 lsp::TextDocumentItem {
370 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
371 version: 1,
372 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
373 language_id: Default::default()
374 }
375 );
376
377 // Ensure json documents are reopened in new json language server
378 assert_set_eq!(
379 [
380 fake_json_server
381 .receive_notification::<lsp::notification::DidOpenTextDocument>()
382 .await
383 .text_document,
384 fake_json_server
385 .receive_notification::<lsp::notification::DidOpenTextDocument>()
386 .await
387 .text_document,
388 ],
389 [
390 lsp::TextDocumentItem {
391 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
392 version: 0,
393 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
394 language_id: Default::default()
395 },
396 lsp::TextDocumentItem {
397 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
398 version: 1,
399 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
400 language_id: Default::default()
401 }
402 ]
403 );
404
405 // Close notifications are reported only to servers matching the buffer's language.
406 cx.update(|_| drop(json_buffer));
407 let close_message = lsp::DidCloseTextDocumentParams {
408 text_document: lsp::TextDocumentIdentifier::new(
409 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
410 ),
411 };
412 assert_eq!(
413 fake_json_server
414 .receive_notification::<lsp::notification::DidCloseTextDocument>()
415 .await,
416 close_message,
417 );
418}
419
420#[gpui::test]
421async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
422 cx.foreground().forbid_parking();
423
424 let fs = FakeFs::new(cx.background());
425 fs.insert_tree(
426 "/dir",
427 json!({
428 "a.rs": "let a = 1;",
429 "b.rs": "let b = 2;"
430 }),
431 )
432 .await;
433
434 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
435
436 let buffer_a = project
437 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
438 .await
439 .unwrap();
440 let buffer_b = project
441 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
442 .await
443 .unwrap();
444
445 project.update(cx, |project, cx| {
446 project
447 .update_diagnostics(
448 0,
449 lsp::PublishDiagnosticsParams {
450 uri: Url::from_file_path("/dir/a.rs").unwrap(),
451 version: None,
452 diagnostics: vec![lsp::Diagnostic {
453 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
454 severity: Some(lsp::DiagnosticSeverity::ERROR),
455 message: "error 1".to_string(),
456 ..Default::default()
457 }],
458 },
459 &[],
460 cx,
461 )
462 .unwrap();
463 project
464 .update_diagnostics(
465 0,
466 lsp::PublishDiagnosticsParams {
467 uri: Url::from_file_path("/dir/b.rs").unwrap(),
468 version: None,
469 diagnostics: vec![lsp::Diagnostic {
470 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
471 severity: Some(lsp::DiagnosticSeverity::WARNING),
472 message: "error 2".to_string(),
473 ..Default::default()
474 }],
475 },
476 &[],
477 cx,
478 )
479 .unwrap();
480 });
481
482 buffer_a.read_with(cx, |buffer, _| {
483 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
484 assert_eq!(
485 chunks
486 .iter()
487 .map(|(s, d)| (s.as_str(), *d))
488 .collect::<Vec<_>>(),
489 &[
490 ("let ", None),
491 ("a", Some(DiagnosticSeverity::ERROR)),
492 (" = 1;", None),
493 ]
494 );
495 });
496 buffer_b.read_with(cx, |buffer, _| {
497 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
498 assert_eq!(
499 chunks
500 .iter()
501 .map(|(s, d)| (s.as_str(), *d))
502 .collect::<Vec<_>>(),
503 &[
504 ("let ", None),
505 ("b", Some(DiagnosticSeverity::WARNING)),
506 (" = 2;", None),
507 ]
508 );
509 });
510}
511
512#[gpui::test]
513async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
514 cx.foreground().forbid_parking();
515
516 let fs = FakeFs::new(cx.background());
517 fs.insert_tree(
518 "/root",
519 json!({
520 "dir": {
521 "a.rs": "let a = 1;",
522 },
523 "other.rs": "let b = c;"
524 }),
525 )
526 .await;
527
528 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
529
530 let (worktree, _) = project
531 .update(cx, |project, cx| {
532 project.find_or_create_local_worktree("/root/other.rs", false, cx)
533 })
534 .await
535 .unwrap();
536 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
537
538 project.update(cx, |project, cx| {
539 project
540 .update_diagnostics(
541 0,
542 lsp::PublishDiagnosticsParams {
543 uri: Url::from_file_path("/root/other.rs").unwrap(),
544 version: None,
545 diagnostics: vec![lsp::Diagnostic {
546 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
547 severity: Some(lsp::DiagnosticSeverity::ERROR),
548 message: "unknown variable 'c'".to_string(),
549 ..Default::default()
550 }],
551 },
552 &[],
553 cx,
554 )
555 .unwrap();
556 });
557
558 let buffer = project
559 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
560 .await
561 .unwrap();
562 buffer.read_with(cx, |buffer, _| {
563 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
564 assert_eq!(
565 chunks
566 .iter()
567 .map(|(s, d)| (s.as_str(), *d))
568 .collect::<Vec<_>>(),
569 &[
570 ("let b = ", None),
571 ("c", Some(DiagnosticSeverity::ERROR)),
572 (";", None),
573 ]
574 );
575 });
576
577 project.read_with(cx, |project, cx| {
578 assert_eq!(project.diagnostic_summaries(cx).next(), None);
579 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
580 });
581}
582
583#[gpui::test]
584async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
585 cx.foreground().forbid_parking();
586
587 let progress_token = "the-progress-token";
588 let mut language = Language::new(
589 LanguageConfig {
590 name: "Rust".into(),
591 path_suffixes: vec!["rs".to_string()],
592 ..Default::default()
593 },
594 Some(tree_sitter_rust::language()),
595 );
596 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
597 disk_based_diagnostics_progress_token: Some(progress_token),
598 disk_based_diagnostics_sources: &["disk"],
599 ..Default::default()
600 });
601
602 let fs = FakeFs::new(cx.background());
603 fs.insert_tree(
604 "/dir",
605 json!({
606 "a.rs": "fn a() { A }",
607 "b.rs": "const y: i32 = 1",
608 }),
609 )
610 .await;
611
612 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
613 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
614 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
615
616 // Cause worktree to start the fake language server
617 let _buffer = project
618 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
619 .await
620 .unwrap();
621
622 let mut events = subscribe(&project, cx);
623
624 let fake_server = fake_servers.next().await.unwrap();
625 fake_server.start_progress(progress_token).await;
626 assert_eq!(
627 events.next().await.unwrap(),
628 Event::DiskBasedDiagnosticsStarted {
629 language_server_id: 0,
630 }
631 );
632
633 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
634 uri: Url::from_file_path("/dir/a.rs").unwrap(),
635 version: None,
636 diagnostics: vec![lsp::Diagnostic {
637 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
638 severity: Some(lsp::DiagnosticSeverity::ERROR),
639 message: "undefined variable 'A'".to_string(),
640 ..Default::default()
641 }],
642 });
643 assert_eq!(
644 events.next().await.unwrap(),
645 Event::DiagnosticsUpdated {
646 language_server_id: 0,
647 path: (worktree_id, Path::new("a.rs")).into()
648 }
649 );
650
651 fake_server.end_progress(progress_token);
652 assert_eq!(
653 events.next().await.unwrap(),
654 Event::DiskBasedDiagnosticsFinished {
655 language_server_id: 0
656 }
657 );
658
659 let buffer = project
660 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
661 .await
662 .unwrap();
663
664 buffer.read_with(cx, |buffer, _| {
665 let snapshot = buffer.snapshot();
666 let diagnostics = snapshot
667 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
668 .collect::<Vec<_>>();
669 assert_eq!(
670 diagnostics,
671 &[DiagnosticEntry {
672 range: Point::new(0, 9)..Point::new(0, 10),
673 diagnostic: Diagnostic {
674 severity: lsp::DiagnosticSeverity::ERROR,
675 message: "undefined variable 'A'".to_string(),
676 group_id: 0,
677 is_primary: true,
678 ..Default::default()
679 }
680 }]
681 )
682 });
683
684 // Ensure publishing empty diagnostics twice only results in one update event.
685 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
686 uri: Url::from_file_path("/dir/a.rs").unwrap(),
687 version: None,
688 diagnostics: Default::default(),
689 });
690 assert_eq!(
691 events.next().await.unwrap(),
692 Event::DiagnosticsUpdated {
693 language_server_id: 0,
694 path: (worktree_id, Path::new("a.rs")).into()
695 }
696 );
697
698 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
699 uri: Url::from_file_path("/dir/a.rs").unwrap(),
700 version: None,
701 diagnostics: Default::default(),
702 });
703 cx.foreground().run_until_parked();
704 assert_eq!(futures::poll!(events.next()), Poll::Pending);
705}
706
707#[gpui::test]
708async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
709 cx.foreground().forbid_parking();
710
711 let progress_token = "the-progress-token";
712 let mut language = Language::new(
713 LanguageConfig {
714 path_suffixes: vec!["rs".to_string()],
715 ..Default::default()
716 },
717 None,
718 );
719 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
720 disk_based_diagnostics_sources: &["disk"],
721 disk_based_diagnostics_progress_token: Some(progress_token),
722 ..Default::default()
723 });
724
725 let fs = FakeFs::new(cx.background());
726 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
727
728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
729 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
730
731 let buffer = project
732 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
733 .await
734 .unwrap();
735
736 // Simulate diagnostics starting to update.
737 let fake_server = fake_servers.next().await.unwrap();
738 fake_server.start_progress(progress_token).await;
739
740 // Restart the server before the diagnostics finish updating.
741 project.update(cx, |project, cx| {
742 project.restart_language_servers_for_buffers([buffer], cx);
743 });
744 let mut events = subscribe(&project, cx);
745
746 // Simulate the newly started server sending more diagnostics.
747 let fake_server = fake_servers.next().await.unwrap();
748 fake_server.start_progress(progress_token).await;
749 assert_eq!(
750 events.next().await.unwrap(),
751 Event::DiskBasedDiagnosticsStarted {
752 language_server_id: 1
753 }
754 );
755 project.read_with(cx, |project, _| {
756 assert_eq!(
757 project
758 .language_servers_running_disk_based_diagnostics()
759 .collect::<Vec<_>>(),
760 [1]
761 );
762 });
763
764 // All diagnostics are considered done, despite the old server's diagnostic
765 // task never completing.
766 fake_server.end_progress(progress_token);
767 assert_eq!(
768 events.next().await.unwrap(),
769 Event::DiskBasedDiagnosticsFinished {
770 language_server_id: 1
771 }
772 );
773 project.read_with(cx, |project, _| {
774 assert_eq!(
775 project
776 .language_servers_running_disk_based_diagnostics()
777 .collect::<Vec<_>>(),
778 [0; 0]
779 );
780 });
781}
782
783#[gpui::test]
784async fn test_toggling_enable_language_server(
785 deterministic: Arc<Deterministic>,
786 cx: &mut gpui::TestAppContext,
787) {
788 deterministic.forbid_parking();
789
790 let mut rust = Language::new(
791 LanguageConfig {
792 name: Arc::from("Rust"),
793 path_suffixes: vec!["rs".to_string()],
794 ..Default::default()
795 },
796 None,
797 );
798 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
799 name: "rust-lsp",
800 ..Default::default()
801 });
802 let mut js = Language::new(
803 LanguageConfig {
804 name: Arc::from("JavaScript"),
805 path_suffixes: vec!["js".to_string()],
806 ..Default::default()
807 },
808 None,
809 );
810 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
811 name: "js-lsp",
812 ..Default::default()
813 });
814
815 let fs = FakeFs::new(cx.background());
816 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
817 .await;
818
819 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
820 project.update(cx, |project, _| {
821 project.languages.add(Arc::new(rust));
822 project.languages.add(Arc::new(js));
823 });
824
825 let _rs_buffer = project
826 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
827 .await
828 .unwrap();
829 let _js_buffer = project
830 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
831 .await
832 .unwrap();
833
834 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
835 assert_eq!(
836 fake_rust_server_1
837 .receive_notification::<lsp::notification::DidOpenTextDocument>()
838 .await
839 .text_document
840 .uri
841 .as_str(),
842 "file:///dir/a.rs"
843 );
844
845 let mut fake_js_server = fake_js_servers.next().await.unwrap();
846 assert_eq!(
847 fake_js_server
848 .receive_notification::<lsp::notification::DidOpenTextDocument>()
849 .await
850 .text_document
851 .uri
852 .as_str(),
853 "file:///dir/b.js"
854 );
855
856 // Disable Rust language server, ensuring only that server gets stopped.
857 cx.update(|cx| {
858 cx.update_global(|settings: &mut Settings, _| {
859 settings.language_overrides.insert(
860 Arc::from("Rust"),
861 settings::LanguageSettings {
862 enable_language_server: Some(false),
863 ..Default::default()
864 },
865 );
866 })
867 });
868 fake_rust_server_1
869 .receive_notification::<lsp::notification::Exit>()
870 .await;
871
872 // Enable Rust and disable JavaScript language servers, ensuring that the
873 // former gets started again and that the latter stops.
874 cx.update(|cx| {
875 cx.update_global(|settings: &mut Settings, _| {
876 settings.language_overrides.insert(
877 Arc::from("Rust"),
878 settings::LanguageSettings {
879 enable_language_server: Some(true),
880 ..Default::default()
881 },
882 );
883 settings.language_overrides.insert(
884 Arc::from("JavaScript"),
885 settings::LanguageSettings {
886 enable_language_server: Some(false),
887 ..Default::default()
888 },
889 );
890 })
891 });
892 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
893 assert_eq!(
894 fake_rust_server_2
895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
896 .await
897 .text_document
898 .uri
899 .as_str(),
900 "file:///dir/a.rs"
901 );
902 fake_js_server
903 .receive_notification::<lsp::notification::Exit>()
904 .await;
905}
906
907#[gpui::test]
908async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
909 cx.foreground().forbid_parking();
910
911 let mut language = Language::new(
912 LanguageConfig {
913 name: "Rust".into(),
914 path_suffixes: vec!["rs".to_string()],
915 ..Default::default()
916 },
917 Some(tree_sitter_rust::language()),
918 );
919 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
920 disk_based_diagnostics_sources: &["disk"],
921 ..Default::default()
922 });
923
924 let text = "
925 fn a() { A }
926 fn b() { BB }
927 fn c() { CCC }
928 "
929 .unindent();
930
931 let fs = FakeFs::new(cx.background());
932 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
933
934 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
935 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
936
937 let buffer = project
938 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
939 .await
940 .unwrap();
941
942 let mut fake_server = fake_servers.next().await.unwrap();
943 let open_notification = fake_server
944 .receive_notification::<lsp::notification::DidOpenTextDocument>()
945 .await;
946
947 // Edit the buffer, moving the content down
948 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
949 let change_notification_1 = fake_server
950 .receive_notification::<lsp::notification::DidChangeTextDocument>()
951 .await;
952 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
953
954 // Report some diagnostics for the initial version of the buffer
955 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
956 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
957 version: Some(open_notification.text_document.version),
958 diagnostics: vec![
959 lsp::Diagnostic {
960 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
961 severity: Some(DiagnosticSeverity::ERROR),
962 message: "undefined variable 'A'".to_string(),
963 source: Some("disk".to_string()),
964 ..Default::default()
965 },
966 lsp::Diagnostic {
967 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
968 severity: Some(DiagnosticSeverity::ERROR),
969 message: "undefined variable 'BB'".to_string(),
970 source: Some("disk".to_string()),
971 ..Default::default()
972 },
973 lsp::Diagnostic {
974 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
975 severity: Some(DiagnosticSeverity::ERROR),
976 source: Some("disk".to_string()),
977 message: "undefined variable 'CCC'".to_string(),
978 ..Default::default()
979 },
980 ],
981 });
982
983 // The diagnostics have moved down since they were created.
984 buffer.next_notification(cx).await;
985 buffer.read_with(cx, |buffer, _| {
986 assert_eq!(
987 buffer
988 .snapshot()
989 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
990 .collect::<Vec<_>>(),
991 &[
992 DiagnosticEntry {
993 range: Point::new(3, 9)..Point::new(3, 11),
994 diagnostic: Diagnostic {
995 severity: DiagnosticSeverity::ERROR,
996 message: "undefined variable 'BB'".to_string(),
997 is_disk_based: true,
998 group_id: 1,
999 is_primary: true,
1000 ..Default::default()
1001 },
1002 },
1003 DiagnosticEntry {
1004 range: Point::new(4, 9)..Point::new(4, 12),
1005 diagnostic: Diagnostic {
1006 severity: DiagnosticSeverity::ERROR,
1007 message: "undefined variable 'CCC'".to_string(),
1008 is_disk_based: true,
1009 group_id: 2,
1010 is_primary: true,
1011 ..Default::default()
1012 }
1013 }
1014 ]
1015 );
1016 assert_eq!(
1017 chunks_with_diagnostics(buffer, 0..buffer.len()),
1018 [
1019 ("\n\nfn a() { ".to_string(), None),
1020 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1021 (" }\nfn b() { ".to_string(), None),
1022 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1023 (" }\nfn c() { ".to_string(), None),
1024 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1025 (" }\n".to_string(), None),
1026 ]
1027 );
1028 assert_eq!(
1029 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1030 [
1031 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1032 (" }\nfn c() { ".to_string(), None),
1033 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1034 ]
1035 );
1036 });
1037
1038 // Ensure overlapping diagnostics are highlighted correctly.
1039 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1040 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1041 version: Some(open_notification.text_document.version),
1042 diagnostics: vec![
1043 lsp::Diagnostic {
1044 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1045 severity: Some(DiagnosticSeverity::ERROR),
1046 message: "undefined variable 'A'".to_string(),
1047 source: Some("disk".to_string()),
1048 ..Default::default()
1049 },
1050 lsp::Diagnostic {
1051 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1052 severity: Some(DiagnosticSeverity::WARNING),
1053 message: "unreachable statement".to_string(),
1054 source: Some("disk".to_string()),
1055 ..Default::default()
1056 },
1057 ],
1058 });
1059
1060 buffer.next_notification(cx).await;
1061 buffer.read_with(cx, |buffer, _| {
1062 assert_eq!(
1063 buffer
1064 .snapshot()
1065 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1066 .collect::<Vec<_>>(),
1067 &[
1068 DiagnosticEntry {
1069 range: Point::new(2, 9)..Point::new(2, 12),
1070 diagnostic: Diagnostic {
1071 severity: DiagnosticSeverity::WARNING,
1072 message: "unreachable statement".to_string(),
1073 is_disk_based: true,
1074 group_id: 4,
1075 is_primary: true,
1076 ..Default::default()
1077 }
1078 },
1079 DiagnosticEntry {
1080 range: Point::new(2, 9)..Point::new(2, 10),
1081 diagnostic: Diagnostic {
1082 severity: DiagnosticSeverity::ERROR,
1083 message: "undefined variable 'A'".to_string(),
1084 is_disk_based: true,
1085 group_id: 3,
1086 is_primary: true,
1087 ..Default::default()
1088 },
1089 }
1090 ]
1091 );
1092 assert_eq!(
1093 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1094 [
1095 ("fn a() { ".to_string(), None),
1096 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1097 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1098 ("\n".to_string(), None),
1099 ]
1100 );
1101 assert_eq!(
1102 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1103 [
1104 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1105 ("\n".to_string(), None),
1106 ]
1107 );
1108 });
1109
1110 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1111 // changes since the last save.
1112 buffer.update(cx, |buffer, cx| {
1113 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1114 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1115 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1116 });
1117 let change_notification_2 = fake_server
1118 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1119 .await;
1120 assert!(
1121 change_notification_2.text_document.version > change_notification_1.text_document.version
1122 );
1123
1124 // Handle out-of-order diagnostics
1125 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1126 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1127 version: Some(change_notification_2.text_document.version),
1128 diagnostics: vec![
1129 lsp::Diagnostic {
1130 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1131 severity: Some(DiagnosticSeverity::ERROR),
1132 message: "undefined variable 'BB'".to_string(),
1133 source: Some("disk".to_string()),
1134 ..Default::default()
1135 },
1136 lsp::Diagnostic {
1137 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1138 severity: Some(DiagnosticSeverity::WARNING),
1139 message: "undefined variable 'A'".to_string(),
1140 source: Some("disk".to_string()),
1141 ..Default::default()
1142 },
1143 ],
1144 });
1145
1146 buffer.next_notification(cx).await;
1147 buffer.read_with(cx, |buffer, _| {
1148 assert_eq!(
1149 buffer
1150 .snapshot()
1151 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1152 .collect::<Vec<_>>(),
1153 &[
1154 DiagnosticEntry {
1155 range: Point::new(2, 21)..Point::new(2, 22),
1156 diagnostic: Diagnostic {
1157 severity: DiagnosticSeverity::WARNING,
1158 message: "undefined variable 'A'".to_string(),
1159 is_disk_based: true,
1160 group_id: 6,
1161 is_primary: true,
1162 ..Default::default()
1163 }
1164 },
1165 DiagnosticEntry {
1166 range: Point::new(3, 9)..Point::new(3, 14),
1167 diagnostic: Diagnostic {
1168 severity: DiagnosticSeverity::ERROR,
1169 message: "undefined variable 'BB'".to_string(),
1170 is_disk_based: true,
1171 group_id: 5,
1172 is_primary: true,
1173 ..Default::default()
1174 },
1175 }
1176 ]
1177 );
1178 });
1179}
1180
1181#[gpui::test]
1182async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1183 cx.foreground().forbid_parking();
1184
1185 let text = concat!(
1186 "let one = ;\n", //
1187 "let two = \n",
1188 "let three = 3;\n",
1189 );
1190
1191 let fs = FakeFs::new(cx.background());
1192 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1193
1194 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1195 let buffer = project
1196 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1197 .await
1198 .unwrap();
1199
1200 project.update(cx, |project, cx| {
1201 project
1202 .update_buffer_diagnostics(
1203 &buffer,
1204 vec![
1205 DiagnosticEntry {
1206 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1207 diagnostic: Diagnostic {
1208 severity: DiagnosticSeverity::ERROR,
1209 message: "syntax error 1".to_string(),
1210 ..Default::default()
1211 },
1212 },
1213 DiagnosticEntry {
1214 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1215 diagnostic: Diagnostic {
1216 severity: DiagnosticSeverity::ERROR,
1217 message: "syntax error 2".to_string(),
1218 ..Default::default()
1219 },
1220 },
1221 ],
1222 None,
1223 cx,
1224 )
1225 .unwrap();
1226 });
1227
1228 // An empty range is extended forward to include the following character.
1229 // At the end of a line, an empty range is extended backward to include
1230 // the preceding character.
1231 buffer.read_with(cx, |buffer, _| {
1232 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1233 assert_eq!(
1234 chunks
1235 .iter()
1236 .map(|(s, d)| (s.as_str(), *d))
1237 .collect::<Vec<_>>(),
1238 &[
1239 ("let one = ", None),
1240 (";", Some(DiagnosticSeverity::ERROR)),
1241 ("\nlet two =", None),
1242 (" ", Some(DiagnosticSeverity::ERROR)),
1243 ("\nlet three = 3;\n", None)
1244 ]
1245 );
1246 });
1247}
1248
1249#[gpui::test]
1250async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1251 cx.foreground().forbid_parking();
1252
1253 let mut language = Language::new(
1254 LanguageConfig {
1255 name: "Rust".into(),
1256 path_suffixes: vec!["rs".to_string()],
1257 ..Default::default()
1258 },
1259 Some(tree_sitter_rust::language()),
1260 );
1261 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1262
1263 let text = "
1264 fn a() {
1265 f1();
1266 }
1267 fn b() {
1268 f2();
1269 }
1270 fn c() {
1271 f3();
1272 }
1273 "
1274 .unindent();
1275
1276 let fs = FakeFs::new(cx.background());
1277 fs.insert_tree(
1278 "/dir",
1279 json!({
1280 "a.rs": text.clone(),
1281 }),
1282 )
1283 .await;
1284
1285 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1286 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1287 let buffer = project
1288 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1289 .await
1290 .unwrap();
1291
1292 let mut fake_server = fake_servers.next().await.unwrap();
1293 let lsp_document_version = fake_server
1294 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1295 .await
1296 .text_document
1297 .version;
1298
1299 // Simulate editing the buffer after the language server computes some edits.
1300 buffer.update(cx, |buffer, cx| {
1301 buffer.edit(
1302 [(
1303 Point::new(0, 0)..Point::new(0, 0),
1304 "// above first function\n",
1305 )],
1306 cx,
1307 );
1308 buffer.edit(
1309 [(
1310 Point::new(2, 0)..Point::new(2, 0),
1311 " // inside first function\n",
1312 )],
1313 cx,
1314 );
1315 buffer.edit(
1316 [(
1317 Point::new(6, 4)..Point::new(6, 4),
1318 "// inside second function ",
1319 )],
1320 cx,
1321 );
1322
1323 assert_eq!(
1324 buffer.text(),
1325 "
1326 // above first function
1327 fn a() {
1328 // inside first function
1329 f1();
1330 }
1331 fn b() {
1332 // inside second function f2();
1333 }
1334 fn c() {
1335 f3();
1336 }
1337 "
1338 .unindent()
1339 );
1340 });
1341
1342 let edits = project
1343 .update(cx, |project, cx| {
1344 project.edits_from_lsp(
1345 &buffer,
1346 vec![
1347 // replace body of first function
1348 lsp::TextEdit {
1349 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1350 new_text: "
1351 fn a() {
1352 f10();
1353 }
1354 "
1355 .unindent(),
1356 },
1357 // edit inside second function
1358 lsp::TextEdit {
1359 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1360 new_text: "00".into(),
1361 },
1362 // edit inside third function via two distinct edits
1363 lsp::TextEdit {
1364 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1365 new_text: "4000".into(),
1366 },
1367 lsp::TextEdit {
1368 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1369 new_text: "".into(),
1370 },
1371 ],
1372 Some(lsp_document_version),
1373 cx,
1374 )
1375 })
1376 .await
1377 .unwrap();
1378
1379 buffer.update(cx, |buffer, cx| {
1380 for (range, new_text) in edits {
1381 buffer.edit([(range, new_text)], cx);
1382 }
1383 assert_eq!(
1384 buffer.text(),
1385 "
1386 // above first function
1387 fn a() {
1388 // inside first function
1389 f10();
1390 }
1391 fn b() {
1392 // inside second function f200();
1393 }
1394 fn c() {
1395 f4000();
1396 }
1397 "
1398 .unindent()
1399 );
1400 });
1401}
1402
1403#[gpui::test]
1404async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1405 cx.foreground().forbid_parking();
1406
1407 let text = "
1408 use a::b;
1409 use a::c;
1410
1411 fn f() {
1412 b();
1413 c();
1414 }
1415 "
1416 .unindent();
1417
1418 let fs = FakeFs::new(cx.background());
1419 fs.insert_tree(
1420 "/dir",
1421 json!({
1422 "a.rs": text.clone(),
1423 }),
1424 )
1425 .await;
1426
1427 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1428 let buffer = project
1429 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1430 .await
1431 .unwrap();
1432
1433 // Simulate the language server sending us a small edit in the form of a very large diff.
1434 // Rust-analyzer does this when performing a merge-imports code action.
1435 let edits = project
1436 .update(cx, |project, cx| {
1437 project.edits_from_lsp(
1438 &buffer,
1439 [
1440 // Replace the first use statement without editing the semicolon.
1441 lsp::TextEdit {
1442 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1443 new_text: "a::{b, c}".into(),
1444 },
1445 // Reinsert the remainder of the file between the semicolon and the final
1446 // newline of the file.
1447 lsp::TextEdit {
1448 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1449 new_text: "\n\n".into(),
1450 },
1451 lsp::TextEdit {
1452 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1453 new_text: "
1454 fn f() {
1455 b();
1456 c();
1457 }"
1458 .unindent(),
1459 },
1460 // Delete everything after the first newline of the file.
1461 lsp::TextEdit {
1462 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1463 new_text: "".into(),
1464 },
1465 ],
1466 None,
1467 cx,
1468 )
1469 })
1470 .await
1471 .unwrap();
1472
1473 buffer.update(cx, |buffer, cx| {
1474 let edits = edits
1475 .into_iter()
1476 .map(|(range, text)| {
1477 (
1478 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1479 text,
1480 )
1481 })
1482 .collect::<Vec<_>>();
1483
1484 assert_eq!(
1485 edits,
1486 [
1487 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1488 (Point::new(1, 0)..Point::new(2, 0), "".into())
1489 ]
1490 );
1491
1492 for (range, new_text) in edits {
1493 buffer.edit([(range, new_text)], cx);
1494 }
1495 assert_eq!(
1496 buffer.text(),
1497 "
1498 use a::{b, c};
1499
1500 fn f() {
1501 b();
1502 c();
1503 }
1504 "
1505 .unindent()
1506 );
1507 });
1508}
1509
1510#[gpui::test]
1511async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1512 cx.foreground().forbid_parking();
1513
1514 let text = "
1515 use a::b;
1516 use a::c;
1517
1518 fn f() {
1519 b();
1520 c();
1521 }
1522 "
1523 .unindent();
1524
1525 let fs = FakeFs::new(cx.background());
1526 fs.insert_tree(
1527 "/dir",
1528 json!({
1529 "a.rs": text.clone(),
1530 }),
1531 )
1532 .await;
1533
1534 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1535 let buffer = project
1536 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1537 .await
1538 .unwrap();
1539
1540 // Simulate the language server sending us edits in a non-ordered fashion,
1541 // with ranges sometimes being inverted.
1542 let edits = project
1543 .update(cx, |project, cx| {
1544 project.edits_from_lsp(
1545 &buffer,
1546 [
1547 lsp::TextEdit {
1548 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1549 new_text: "\n\n".into(),
1550 },
1551 lsp::TextEdit {
1552 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1553 new_text: "a::{b, c}".into(),
1554 },
1555 lsp::TextEdit {
1556 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1557 new_text: "".into(),
1558 },
1559 lsp::TextEdit {
1560 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1561 new_text: "
1562 fn f() {
1563 b();
1564 c();
1565 }"
1566 .unindent(),
1567 },
1568 ],
1569 None,
1570 cx,
1571 )
1572 })
1573 .await
1574 .unwrap();
1575
1576 buffer.update(cx, |buffer, cx| {
1577 let edits = edits
1578 .into_iter()
1579 .map(|(range, text)| {
1580 (
1581 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1582 text,
1583 )
1584 })
1585 .collect::<Vec<_>>();
1586
1587 assert_eq!(
1588 edits,
1589 [
1590 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1591 (Point::new(1, 0)..Point::new(2, 0), "".into())
1592 ]
1593 );
1594
1595 for (range, new_text) in edits {
1596 buffer.edit([(range, new_text)], cx);
1597 }
1598 assert_eq!(
1599 buffer.text(),
1600 "
1601 use a::{b, c};
1602
1603 fn f() {
1604 b();
1605 c();
1606 }
1607 "
1608 .unindent()
1609 );
1610 });
1611}
1612
1613fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1614 buffer: &Buffer,
1615 range: Range<T>,
1616) -> Vec<(String, Option<DiagnosticSeverity>)> {
1617 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1618 for chunk in buffer.snapshot().chunks(range, true) {
1619 if chunks.last().map_or(false, |prev_chunk| {
1620 prev_chunk.1 == chunk.diagnostic_severity
1621 }) {
1622 chunks.last_mut().unwrap().0.push_str(chunk.text);
1623 } else {
1624 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1625 }
1626 }
1627 chunks
1628}
1629
1630#[gpui::test(iterations = 10)]
1631async fn test_definition(cx: &mut gpui::TestAppContext) {
1632 let mut language = Language::new(
1633 LanguageConfig {
1634 name: "Rust".into(),
1635 path_suffixes: vec!["rs".to_string()],
1636 ..Default::default()
1637 },
1638 Some(tree_sitter_rust::language()),
1639 );
1640 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1641
1642 let fs = FakeFs::new(cx.background());
1643 fs.insert_tree(
1644 "/dir",
1645 json!({
1646 "a.rs": "const fn a() { A }",
1647 "b.rs": "const y: i32 = crate::a()",
1648 }),
1649 )
1650 .await;
1651
1652 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1653 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1654
1655 let buffer = project
1656 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1657 .await
1658 .unwrap();
1659
1660 let fake_server = fake_servers.next().await.unwrap();
1661 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1662 let params = params.text_document_position_params;
1663 assert_eq!(
1664 params.text_document.uri.to_file_path().unwrap(),
1665 Path::new("/dir/b.rs"),
1666 );
1667 assert_eq!(params.position, lsp::Position::new(0, 22));
1668
1669 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1670 lsp::Location::new(
1671 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1672 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1673 ),
1674 )))
1675 });
1676
1677 let mut definitions = project
1678 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1679 .await
1680 .unwrap();
1681
1682 // Assert no new language server started
1683 cx.foreground().run_until_parked();
1684 assert!(fake_servers.try_next().is_err());
1685
1686 assert_eq!(definitions.len(), 1);
1687 let definition = definitions.pop().unwrap();
1688 cx.update(|cx| {
1689 let target_buffer = definition.target.buffer.read(cx);
1690 assert_eq!(
1691 target_buffer
1692 .file()
1693 .unwrap()
1694 .as_local()
1695 .unwrap()
1696 .abs_path(cx),
1697 Path::new("/dir/a.rs"),
1698 );
1699 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1700 assert_eq!(
1701 list_worktrees(&project, cx),
1702 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1703 );
1704
1705 drop(definition);
1706 });
1707 cx.read(|cx| {
1708 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1709 });
1710
1711 fn list_worktrees<'a>(
1712 project: &'a ModelHandle<Project>,
1713 cx: &'a AppContext,
1714 ) -> Vec<(&'a Path, bool)> {
1715 project
1716 .read(cx)
1717 .worktrees(cx)
1718 .map(|worktree| {
1719 let worktree = worktree.read(cx);
1720 (
1721 worktree.as_local().unwrap().abs_path().as_ref(),
1722 worktree.is_visible(),
1723 )
1724 })
1725 .collect::<Vec<_>>()
1726 }
1727}
1728
1729#[gpui::test]
1730async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1731 let mut language = Language::new(
1732 LanguageConfig {
1733 name: "TypeScript".into(),
1734 path_suffixes: vec!["ts".to_string()],
1735 ..Default::default()
1736 },
1737 Some(tree_sitter_typescript::language_typescript()),
1738 );
1739 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1740
1741 let fs = FakeFs::new(cx.background());
1742 fs.insert_tree(
1743 "/dir",
1744 json!({
1745 "a.ts": "",
1746 }),
1747 )
1748 .await;
1749
1750 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1751 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1752 let buffer = project
1753 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1754 .await
1755 .unwrap();
1756
1757 let fake_server = fake_language_servers.next().await.unwrap();
1758
1759 let text = "let a = b.fqn";
1760 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1761 let completions = project.update(cx, |project, cx| {
1762 project.completions(&buffer, text.len(), cx)
1763 });
1764
1765 fake_server
1766 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1767 Ok(Some(lsp::CompletionResponse::Array(vec![
1768 lsp::CompletionItem {
1769 label: "fullyQualifiedName?".into(),
1770 insert_text: Some("fullyQualifiedName".into()),
1771 ..Default::default()
1772 },
1773 ])))
1774 })
1775 .next()
1776 .await;
1777 let completions = completions.await.unwrap();
1778 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1779 assert_eq!(completions.len(), 1);
1780 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1781 assert_eq!(
1782 completions[0].old_range.to_offset(&snapshot),
1783 text.len() - 3..text.len()
1784 );
1785
1786 let text = "let a = \"atoms/cmp\"";
1787 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1788 let completions = project.update(cx, |project, cx| {
1789 project.completions(&buffer, text.len() - 1, cx)
1790 });
1791
1792 fake_server
1793 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1794 Ok(Some(lsp::CompletionResponse::Array(vec![
1795 lsp::CompletionItem {
1796 label: "component".into(),
1797 ..Default::default()
1798 },
1799 ])))
1800 })
1801 .next()
1802 .await;
1803 let completions = completions.await.unwrap();
1804 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1805 assert_eq!(completions.len(), 1);
1806 assert_eq!(completions[0].new_text, "component");
1807 assert_eq!(
1808 completions[0].old_range.to_offset(&snapshot),
1809 text.len() - 4..text.len() - 1
1810 );
1811}
1812
1813#[gpui::test]
1814async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1815 let mut language = Language::new(
1816 LanguageConfig {
1817 name: "TypeScript".into(),
1818 path_suffixes: vec!["ts".to_string()],
1819 ..Default::default()
1820 },
1821 Some(tree_sitter_typescript::language_typescript()),
1822 );
1823 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1824
1825 let fs = FakeFs::new(cx.background());
1826 fs.insert_tree(
1827 "/dir",
1828 json!({
1829 "a.ts": "",
1830 }),
1831 )
1832 .await;
1833
1834 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1835 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1836 let buffer = project
1837 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1838 .await
1839 .unwrap();
1840
1841 let fake_server = fake_language_servers.next().await.unwrap();
1842
1843 let text = "let a = b.fqn";
1844 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1845 let completions = project.update(cx, |project, cx| {
1846 project.completions(&buffer, text.len(), cx)
1847 });
1848
1849 fake_server
1850 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1851 Ok(Some(lsp::CompletionResponse::Array(vec![
1852 lsp::CompletionItem {
1853 label: "fullyQualifiedName?".into(),
1854 insert_text: Some("fully\rQualified\r\nName".into()),
1855 ..Default::default()
1856 },
1857 ])))
1858 })
1859 .next()
1860 .await;
1861 let completions = completions.await.unwrap();
1862 assert_eq!(completions.len(), 1);
1863 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1864}
1865
1866#[gpui::test(iterations = 10)]
1867async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1868 let mut language = Language::new(
1869 LanguageConfig {
1870 name: "TypeScript".into(),
1871 path_suffixes: vec!["ts".to_string()],
1872 ..Default::default()
1873 },
1874 None,
1875 );
1876 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1877
1878 let fs = FakeFs::new(cx.background());
1879 fs.insert_tree(
1880 "/dir",
1881 json!({
1882 "a.ts": "a",
1883 }),
1884 )
1885 .await;
1886
1887 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1888 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1889 let buffer = project
1890 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1891 .await
1892 .unwrap();
1893
1894 let fake_server = fake_language_servers.next().await.unwrap();
1895
1896 // Language server returns code actions that contain commands, and not edits.
1897 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1898 fake_server
1899 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1900 Ok(Some(vec![
1901 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1902 title: "The code action".into(),
1903 command: Some(lsp::Command {
1904 title: "The command".into(),
1905 command: "_the/command".into(),
1906 arguments: Some(vec![json!("the-argument")]),
1907 }),
1908 ..Default::default()
1909 }),
1910 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1911 title: "two".into(),
1912 ..Default::default()
1913 }),
1914 ]))
1915 })
1916 .next()
1917 .await;
1918
1919 let action = actions.await.unwrap()[0].clone();
1920 let apply = project.update(cx, |project, cx| {
1921 project.apply_code_action(buffer.clone(), action, true, cx)
1922 });
1923
1924 // Resolving the code action does not populate its edits. In absence of
1925 // edits, we must execute the given command.
1926 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1927 |action, _| async move { Ok(action) },
1928 );
1929
1930 // While executing the command, the language server sends the editor
1931 // a `workspaceEdit` request.
1932 fake_server
1933 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1934 let fake = fake_server.clone();
1935 move |params, _| {
1936 assert_eq!(params.command, "_the/command");
1937 let fake = fake.clone();
1938 async move {
1939 fake.server
1940 .request::<lsp::request::ApplyWorkspaceEdit>(
1941 lsp::ApplyWorkspaceEditParams {
1942 label: None,
1943 edit: lsp::WorkspaceEdit {
1944 changes: Some(
1945 [(
1946 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1947 vec![lsp::TextEdit {
1948 range: lsp::Range::new(
1949 lsp::Position::new(0, 0),
1950 lsp::Position::new(0, 0),
1951 ),
1952 new_text: "X".into(),
1953 }],
1954 )]
1955 .into_iter()
1956 .collect(),
1957 ),
1958 ..Default::default()
1959 },
1960 },
1961 )
1962 .await
1963 .unwrap();
1964 Ok(Some(json!(null)))
1965 }
1966 }
1967 })
1968 .next()
1969 .await;
1970
1971 // Applying the code action returns a project transaction containing the edits
1972 // sent by the language server in its `workspaceEdit` request.
1973 let transaction = apply.await.unwrap();
1974 assert!(transaction.0.contains_key(&buffer));
1975 buffer.update(cx, |buffer, cx| {
1976 assert_eq!(buffer.text(), "Xa");
1977 buffer.undo(cx);
1978 assert_eq!(buffer.text(), "a");
1979 });
1980}
1981
1982#[gpui::test]
1983async fn test_save_file(cx: &mut gpui::TestAppContext) {
1984 let fs = FakeFs::new(cx.background());
1985 fs.insert_tree(
1986 "/dir",
1987 json!({
1988 "file1": "the old contents",
1989 }),
1990 )
1991 .await;
1992
1993 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
1994 let buffer = project
1995 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
1996 .await
1997 .unwrap();
1998 buffer
1999 .update(cx, |buffer, cx| {
2000 assert_eq!(buffer.text(), "the old contents");
2001 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2002 buffer.save(cx)
2003 })
2004 .await
2005 .unwrap();
2006
2007 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2008 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2009}
2010
2011#[gpui::test]
2012async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2013 let fs = FakeFs::new(cx.background());
2014 fs.insert_tree(
2015 "/dir",
2016 json!({
2017 "file1": "the old contents",
2018 }),
2019 )
2020 .await;
2021
2022 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2023 let buffer = project
2024 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2025 .await
2026 .unwrap();
2027 buffer
2028 .update(cx, |buffer, cx| {
2029 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2030 buffer.save(cx)
2031 })
2032 .await
2033 .unwrap();
2034
2035 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2036 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2037}
2038
2039#[gpui::test]
2040async fn test_save_as(cx: &mut gpui::TestAppContext) {
2041 let fs = FakeFs::new(cx.background());
2042 fs.insert_tree("/dir", json!({})).await;
2043
2044 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2045 let buffer = project.update(cx, |project, cx| {
2046 project.create_buffer("", None, cx).unwrap()
2047 });
2048 buffer.update(cx, |buffer, cx| {
2049 buffer.edit([(0..0, "abc")], cx);
2050 assert!(buffer.is_dirty());
2051 assert!(!buffer.has_conflict());
2052 });
2053 project
2054 .update(cx, |project, cx| {
2055 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2056 })
2057 .await
2058 .unwrap();
2059 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2060 buffer.read_with(cx, |buffer, cx| {
2061 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2062 assert!(!buffer.is_dirty());
2063 assert!(!buffer.has_conflict());
2064 });
2065
2066 let opened_buffer = project
2067 .update(cx, |project, cx| {
2068 project.open_local_buffer("/dir/file1", cx)
2069 })
2070 .await
2071 .unwrap();
2072 assert_eq!(opened_buffer, buffer);
2073}
2074
2075#[gpui::test(retries = 5)]
2076async fn test_rescan_and_remote_updates(
2077 deterministic: Arc<Deterministic>,
2078 cx: &mut gpui::TestAppContext,
2079) {
2080 let dir = temp_tree(json!({
2081 "a": {
2082 "file1": "",
2083 "file2": "",
2084 "file3": "",
2085 },
2086 "b": {
2087 "c": {
2088 "file4": "",
2089 "file5": "",
2090 }
2091 }
2092 }));
2093
2094 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2095 let rpc = project.read_with(cx, |p, _| p.client.clone());
2096
2097 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2098 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2099 async move { buffer.await.unwrap() }
2100 };
2101 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2102 project.read_with(cx, |project, cx| {
2103 let tree = project.worktrees(cx).next().unwrap();
2104 tree.read(cx)
2105 .entry_for_path(path)
2106 .expect(&format!("no entry for path {}", path))
2107 .id
2108 })
2109 };
2110
2111 let buffer2 = buffer_for_path("a/file2", cx).await;
2112 let buffer3 = buffer_for_path("a/file3", cx).await;
2113 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2114 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2115
2116 let file2_id = id_for_path("a/file2", &cx);
2117 let file3_id = id_for_path("a/file3", &cx);
2118 let file4_id = id_for_path("b/c/file4", &cx);
2119
2120 // Create a remote copy of this worktree.
2121 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2122 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2123 let remote = cx.update(|cx| {
2124 Worktree::remote(
2125 1,
2126 1,
2127 proto::WorktreeMetadata {
2128 id: initial_snapshot.id().to_proto(),
2129 root_name: initial_snapshot.root_name().into(),
2130 visible: true,
2131 },
2132 rpc.clone(),
2133 cx,
2134 )
2135 });
2136 remote.update(cx, |remote, _| {
2137 let update = initial_snapshot.build_initial_update(1);
2138 remote.as_remote_mut().unwrap().update_from_remote(update);
2139 });
2140 deterministic.run_until_parked();
2141
2142 cx.read(|cx| {
2143 assert!(!buffer2.read(cx).is_dirty());
2144 assert!(!buffer3.read(cx).is_dirty());
2145 assert!(!buffer4.read(cx).is_dirty());
2146 assert!(!buffer5.read(cx).is_dirty());
2147 });
2148
2149 // Rename and delete files and directories.
2150 tree.flush_fs_events(&cx).await;
2151 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2152 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2153 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2154 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2155 tree.flush_fs_events(&cx).await;
2156
2157 let expected_paths = vec![
2158 "a",
2159 "a/file1",
2160 "a/file2.new",
2161 "b",
2162 "d",
2163 "d/file3",
2164 "d/file4",
2165 ];
2166
2167 cx.read(|app| {
2168 assert_eq!(
2169 tree.read(app)
2170 .paths()
2171 .map(|p| p.to_str().unwrap())
2172 .collect::<Vec<_>>(),
2173 expected_paths
2174 );
2175
2176 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2177 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2178 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2179
2180 assert_eq!(
2181 buffer2.read(app).file().unwrap().path().as_ref(),
2182 Path::new("a/file2.new")
2183 );
2184 assert_eq!(
2185 buffer3.read(app).file().unwrap().path().as_ref(),
2186 Path::new("d/file3")
2187 );
2188 assert_eq!(
2189 buffer4.read(app).file().unwrap().path().as_ref(),
2190 Path::new("d/file4")
2191 );
2192 assert_eq!(
2193 buffer5.read(app).file().unwrap().path().as_ref(),
2194 Path::new("b/c/file5")
2195 );
2196
2197 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2198 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2199 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2200 assert!(buffer5.read(app).file().unwrap().is_deleted());
2201 });
2202
2203 // Update the remote worktree. Check that it becomes consistent with the
2204 // local worktree.
2205 remote.update(cx, |remote, cx| {
2206 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2207 &initial_snapshot,
2208 1,
2209 1,
2210 true,
2211 );
2212 remote.as_remote_mut().unwrap().update_from_remote(update);
2213 });
2214 deterministic.run_until_parked();
2215 remote.read_with(cx, |remote, _| {
2216 assert_eq!(
2217 remote
2218 .paths()
2219 .map(|p| p.to_str().unwrap())
2220 .collect::<Vec<_>>(),
2221 expected_paths
2222 );
2223 });
2224}
2225
2226#[gpui::test]
2227async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2228 let fs = FakeFs::new(cx.background());
2229 fs.insert_tree(
2230 "/dir",
2231 json!({
2232 "a.txt": "a-contents",
2233 "b.txt": "b-contents",
2234 }),
2235 )
2236 .await;
2237
2238 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2239
2240 // Spawn multiple tasks to open paths, repeating some paths.
2241 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2242 (
2243 p.open_local_buffer("/dir/a.txt", cx),
2244 p.open_local_buffer("/dir/b.txt", cx),
2245 p.open_local_buffer("/dir/a.txt", cx),
2246 )
2247 });
2248
2249 let buffer_a_1 = buffer_a_1.await.unwrap();
2250 let buffer_a_2 = buffer_a_2.await.unwrap();
2251 let buffer_b = buffer_b.await.unwrap();
2252 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2253 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2254
2255 // There is only one buffer per path.
2256 let buffer_a_id = buffer_a_1.id();
2257 assert_eq!(buffer_a_2.id(), buffer_a_id);
2258
2259 // Open the same path again while it is still open.
2260 drop(buffer_a_1);
2261 let buffer_a_3 = project
2262 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2263 .await
2264 .unwrap();
2265
2266 // There's still only one buffer per path.
2267 assert_eq!(buffer_a_3.id(), buffer_a_id);
2268}
2269
2270#[gpui::test]
2271async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2272 let fs = FakeFs::new(cx.background());
2273 fs.insert_tree(
2274 "/dir",
2275 json!({
2276 "file1": "abc",
2277 "file2": "def",
2278 "file3": "ghi",
2279 }),
2280 )
2281 .await;
2282
2283 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2284
2285 let buffer1 = project
2286 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2287 .await
2288 .unwrap();
2289 let events = Rc::new(RefCell::new(Vec::new()));
2290
2291 // initially, the buffer isn't dirty.
2292 buffer1.update(cx, |buffer, cx| {
2293 cx.subscribe(&buffer1, {
2294 let events = events.clone();
2295 move |_, _, event, _| match event {
2296 BufferEvent::Operation(_) => {}
2297 _ => events.borrow_mut().push(event.clone()),
2298 }
2299 })
2300 .detach();
2301
2302 assert!(!buffer.is_dirty());
2303 assert!(events.borrow().is_empty());
2304
2305 buffer.edit([(1..2, "")], cx);
2306 });
2307
2308 // after the first edit, the buffer is dirty, and emits a dirtied event.
2309 buffer1.update(cx, |buffer, cx| {
2310 assert!(buffer.text() == "ac");
2311 assert!(buffer.is_dirty());
2312 assert_eq!(
2313 *events.borrow(),
2314 &[language::Event::Edited, language::Event::DirtyChanged]
2315 );
2316 events.borrow_mut().clear();
2317 buffer.did_save(
2318 buffer.version(),
2319 buffer.as_rope().fingerprint(),
2320 buffer.file().unwrap().mtime(),
2321 None,
2322 cx,
2323 );
2324 });
2325
2326 // after saving, the buffer is not dirty, and emits a saved event.
2327 buffer1.update(cx, |buffer, cx| {
2328 assert!(!buffer.is_dirty());
2329 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2330 events.borrow_mut().clear();
2331
2332 buffer.edit([(1..1, "B")], cx);
2333 buffer.edit([(2..2, "D")], cx);
2334 });
2335
2336 // after editing again, the buffer is dirty, and emits another dirty event.
2337 buffer1.update(cx, |buffer, cx| {
2338 assert!(buffer.text() == "aBDc");
2339 assert!(buffer.is_dirty());
2340 assert_eq!(
2341 *events.borrow(),
2342 &[
2343 language::Event::Edited,
2344 language::Event::DirtyChanged,
2345 language::Event::Edited,
2346 ],
2347 );
2348 events.borrow_mut().clear();
2349
2350 // After restoring the buffer to its previously-saved state,
2351 // the buffer is not considered dirty anymore.
2352 buffer.edit([(1..3, "")], cx);
2353 assert!(buffer.text() == "ac");
2354 assert!(!buffer.is_dirty());
2355 });
2356
2357 assert_eq!(
2358 *events.borrow(),
2359 &[language::Event::Edited, language::Event::DirtyChanged]
2360 );
2361
2362 // When a file is deleted, the buffer is considered dirty.
2363 let events = Rc::new(RefCell::new(Vec::new()));
2364 let buffer2 = project
2365 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2366 .await
2367 .unwrap();
2368 buffer2.update(cx, |_, cx| {
2369 cx.subscribe(&buffer2, {
2370 let events = events.clone();
2371 move |_, _, event, _| events.borrow_mut().push(event.clone())
2372 })
2373 .detach();
2374 });
2375
2376 fs.remove_file("/dir/file2".as_ref(), Default::default())
2377 .await
2378 .unwrap();
2379 cx.foreground().run_until_parked();
2380 assert_eq!(
2381 *events.borrow(),
2382 &[
2383 language::Event::DirtyChanged,
2384 language::Event::FileHandleChanged
2385 ]
2386 );
2387
2388 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2389 let events = Rc::new(RefCell::new(Vec::new()));
2390 let buffer3 = project
2391 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2392 .await
2393 .unwrap();
2394 buffer3.update(cx, |_, cx| {
2395 cx.subscribe(&buffer3, {
2396 let events = events.clone();
2397 move |_, _, event, _| events.borrow_mut().push(event.clone())
2398 })
2399 .detach();
2400 });
2401
2402 buffer3.update(cx, |buffer, cx| {
2403 buffer.edit([(0..0, "x")], cx);
2404 });
2405 events.borrow_mut().clear();
2406 fs.remove_file("/dir/file3".as_ref(), Default::default())
2407 .await
2408 .unwrap();
2409 cx.foreground().run_until_parked();
2410 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2411 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2412}
2413
2414#[gpui::test]
2415async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2416 let initial_contents = "aaa\nbbbbb\nc\n";
2417 let fs = FakeFs::new(cx.background());
2418 fs.insert_tree(
2419 "/dir",
2420 json!({
2421 "the-file": initial_contents,
2422 }),
2423 )
2424 .await;
2425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2426 let buffer = project
2427 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2428 .await
2429 .unwrap();
2430
2431 let anchors = (0..3)
2432 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2433 .collect::<Vec<_>>();
2434
2435 // Change the file on disk, adding two new lines of text, and removing
2436 // one line.
2437 buffer.read_with(cx, |buffer, _| {
2438 assert!(!buffer.is_dirty());
2439 assert!(!buffer.has_conflict());
2440 });
2441 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2442 fs.save(
2443 "/dir/the-file".as_ref(),
2444 &new_contents.into(),
2445 LineEnding::Unix,
2446 )
2447 .await
2448 .unwrap();
2449
2450 // Because the buffer was not modified, it is reloaded from disk. Its
2451 // contents are edited according to the diff between the old and new
2452 // file contents.
2453 cx.foreground().run_until_parked();
2454 buffer.update(cx, |buffer, _| {
2455 assert_eq!(buffer.text(), new_contents);
2456 assert!(!buffer.is_dirty());
2457 assert!(!buffer.has_conflict());
2458
2459 let anchor_positions = anchors
2460 .iter()
2461 .map(|anchor| anchor.to_point(&*buffer))
2462 .collect::<Vec<_>>();
2463 assert_eq!(
2464 anchor_positions,
2465 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2466 );
2467 });
2468
2469 // Modify the buffer
2470 buffer.update(cx, |buffer, cx| {
2471 buffer.edit([(0..0, " ")], cx);
2472 assert!(buffer.is_dirty());
2473 assert!(!buffer.has_conflict());
2474 });
2475
2476 // Change the file on disk again, adding blank lines to the beginning.
2477 fs.save(
2478 "/dir/the-file".as_ref(),
2479 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2480 LineEnding::Unix,
2481 )
2482 .await
2483 .unwrap();
2484
2485 // Because the buffer is modified, it doesn't reload from disk, but is
2486 // marked as having a conflict.
2487 cx.foreground().run_until_parked();
2488 buffer.read_with(cx, |buffer, _| {
2489 assert!(buffer.has_conflict());
2490 });
2491}
2492
2493#[gpui::test]
2494async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2495 let fs = FakeFs::new(cx.background());
2496 fs.insert_tree(
2497 "/dir",
2498 json!({
2499 "file1": "a\nb\nc\n",
2500 "file2": "one\r\ntwo\r\nthree\r\n",
2501 }),
2502 )
2503 .await;
2504
2505 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2506 let buffer1 = project
2507 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2508 .await
2509 .unwrap();
2510 let buffer2 = project
2511 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2512 .await
2513 .unwrap();
2514
2515 buffer1.read_with(cx, |buffer, _| {
2516 assert_eq!(buffer.text(), "a\nb\nc\n");
2517 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2518 });
2519 buffer2.read_with(cx, |buffer, _| {
2520 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2521 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2522 });
2523
2524 // Change a file's line endings on disk from unix to windows. The buffer's
2525 // state updates correctly.
2526 fs.save(
2527 "/dir/file1".as_ref(),
2528 &"aaa\nb\nc\n".into(),
2529 LineEnding::Windows,
2530 )
2531 .await
2532 .unwrap();
2533 cx.foreground().run_until_parked();
2534 buffer1.read_with(cx, |buffer, _| {
2535 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2536 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2537 });
2538
2539 // Save a file with windows line endings. The file is written correctly.
2540 buffer2
2541 .update(cx, |buffer, cx| {
2542 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2543 buffer.save(cx)
2544 })
2545 .await
2546 .unwrap();
2547 assert_eq!(
2548 fs.load("/dir/file2".as_ref()).await.unwrap(),
2549 "one\r\ntwo\r\nthree\r\nfour\r\n",
2550 );
2551}
2552
2553#[gpui::test]
2554async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2555 cx.foreground().forbid_parking();
2556
2557 let fs = FakeFs::new(cx.background());
2558 fs.insert_tree(
2559 "/the-dir",
2560 json!({
2561 "a.rs": "
2562 fn foo(mut v: Vec<usize>) {
2563 for x in &v {
2564 v.push(1);
2565 }
2566 }
2567 "
2568 .unindent(),
2569 }),
2570 )
2571 .await;
2572
2573 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2574 let buffer = project
2575 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2576 .await
2577 .unwrap();
2578
2579 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2580 let message = lsp::PublishDiagnosticsParams {
2581 uri: buffer_uri.clone(),
2582 diagnostics: vec![
2583 lsp::Diagnostic {
2584 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2585 severity: Some(DiagnosticSeverity::WARNING),
2586 message: "error 1".to_string(),
2587 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2588 location: lsp::Location {
2589 uri: buffer_uri.clone(),
2590 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2591 },
2592 message: "error 1 hint 1".to_string(),
2593 }]),
2594 ..Default::default()
2595 },
2596 lsp::Diagnostic {
2597 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2598 severity: Some(DiagnosticSeverity::HINT),
2599 message: "error 1 hint 1".to_string(),
2600 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2601 location: lsp::Location {
2602 uri: buffer_uri.clone(),
2603 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2604 },
2605 message: "original diagnostic".to_string(),
2606 }]),
2607 ..Default::default()
2608 },
2609 lsp::Diagnostic {
2610 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2611 severity: Some(DiagnosticSeverity::ERROR),
2612 message: "error 2".to_string(),
2613 related_information: Some(vec![
2614 lsp::DiagnosticRelatedInformation {
2615 location: lsp::Location {
2616 uri: buffer_uri.clone(),
2617 range: lsp::Range::new(
2618 lsp::Position::new(1, 13),
2619 lsp::Position::new(1, 15),
2620 ),
2621 },
2622 message: "error 2 hint 1".to_string(),
2623 },
2624 lsp::DiagnosticRelatedInformation {
2625 location: lsp::Location {
2626 uri: buffer_uri.clone(),
2627 range: lsp::Range::new(
2628 lsp::Position::new(1, 13),
2629 lsp::Position::new(1, 15),
2630 ),
2631 },
2632 message: "error 2 hint 2".to_string(),
2633 },
2634 ]),
2635 ..Default::default()
2636 },
2637 lsp::Diagnostic {
2638 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2639 severity: Some(DiagnosticSeverity::HINT),
2640 message: "error 2 hint 1".to_string(),
2641 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2642 location: lsp::Location {
2643 uri: buffer_uri.clone(),
2644 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2645 },
2646 message: "original diagnostic".to_string(),
2647 }]),
2648 ..Default::default()
2649 },
2650 lsp::Diagnostic {
2651 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2652 severity: Some(DiagnosticSeverity::HINT),
2653 message: "error 2 hint 2".to_string(),
2654 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2655 location: lsp::Location {
2656 uri: buffer_uri.clone(),
2657 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2658 },
2659 message: "original diagnostic".to_string(),
2660 }]),
2661 ..Default::default()
2662 },
2663 ],
2664 version: None,
2665 };
2666
2667 project
2668 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2669 .unwrap();
2670 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2671
2672 assert_eq!(
2673 buffer
2674 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2675 .collect::<Vec<_>>(),
2676 &[
2677 DiagnosticEntry {
2678 range: Point::new(1, 8)..Point::new(1, 9),
2679 diagnostic: Diagnostic {
2680 severity: DiagnosticSeverity::WARNING,
2681 message: "error 1".to_string(),
2682 group_id: 0,
2683 is_primary: true,
2684 ..Default::default()
2685 }
2686 },
2687 DiagnosticEntry {
2688 range: Point::new(1, 8)..Point::new(1, 9),
2689 diagnostic: Diagnostic {
2690 severity: DiagnosticSeverity::HINT,
2691 message: "error 1 hint 1".to_string(),
2692 group_id: 0,
2693 is_primary: false,
2694 ..Default::default()
2695 }
2696 },
2697 DiagnosticEntry {
2698 range: Point::new(1, 13)..Point::new(1, 15),
2699 diagnostic: Diagnostic {
2700 severity: DiagnosticSeverity::HINT,
2701 message: "error 2 hint 1".to_string(),
2702 group_id: 1,
2703 is_primary: false,
2704 ..Default::default()
2705 }
2706 },
2707 DiagnosticEntry {
2708 range: Point::new(1, 13)..Point::new(1, 15),
2709 diagnostic: Diagnostic {
2710 severity: DiagnosticSeverity::HINT,
2711 message: "error 2 hint 2".to_string(),
2712 group_id: 1,
2713 is_primary: false,
2714 ..Default::default()
2715 }
2716 },
2717 DiagnosticEntry {
2718 range: Point::new(2, 8)..Point::new(2, 17),
2719 diagnostic: Diagnostic {
2720 severity: DiagnosticSeverity::ERROR,
2721 message: "error 2".to_string(),
2722 group_id: 1,
2723 is_primary: true,
2724 ..Default::default()
2725 }
2726 }
2727 ]
2728 );
2729
2730 assert_eq!(
2731 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2732 &[
2733 DiagnosticEntry {
2734 range: Point::new(1, 8)..Point::new(1, 9),
2735 diagnostic: Diagnostic {
2736 severity: DiagnosticSeverity::WARNING,
2737 message: "error 1".to_string(),
2738 group_id: 0,
2739 is_primary: true,
2740 ..Default::default()
2741 }
2742 },
2743 DiagnosticEntry {
2744 range: Point::new(1, 8)..Point::new(1, 9),
2745 diagnostic: Diagnostic {
2746 severity: DiagnosticSeverity::HINT,
2747 message: "error 1 hint 1".to_string(),
2748 group_id: 0,
2749 is_primary: false,
2750 ..Default::default()
2751 }
2752 },
2753 ]
2754 );
2755 assert_eq!(
2756 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2757 &[
2758 DiagnosticEntry {
2759 range: Point::new(1, 13)..Point::new(1, 15),
2760 diagnostic: Diagnostic {
2761 severity: DiagnosticSeverity::HINT,
2762 message: "error 2 hint 1".to_string(),
2763 group_id: 1,
2764 is_primary: false,
2765 ..Default::default()
2766 }
2767 },
2768 DiagnosticEntry {
2769 range: Point::new(1, 13)..Point::new(1, 15),
2770 diagnostic: Diagnostic {
2771 severity: DiagnosticSeverity::HINT,
2772 message: "error 2 hint 2".to_string(),
2773 group_id: 1,
2774 is_primary: false,
2775 ..Default::default()
2776 }
2777 },
2778 DiagnosticEntry {
2779 range: Point::new(2, 8)..Point::new(2, 17),
2780 diagnostic: Diagnostic {
2781 severity: DiagnosticSeverity::ERROR,
2782 message: "error 2".to_string(),
2783 group_id: 1,
2784 is_primary: true,
2785 ..Default::default()
2786 }
2787 }
2788 ]
2789 );
2790}
2791
2792#[gpui::test]
2793async fn test_rename(cx: &mut gpui::TestAppContext) {
2794 cx.foreground().forbid_parking();
2795
2796 let mut language = Language::new(
2797 LanguageConfig {
2798 name: "Rust".into(),
2799 path_suffixes: vec!["rs".to_string()],
2800 ..Default::default()
2801 },
2802 Some(tree_sitter_rust::language()),
2803 );
2804 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
2805 capabilities: lsp::ServerCapabilities {
2806 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2807 prepare_provider: Some(true),
2808 work_done_progress_options: Default::default(),
2809 })),
2810 ..Default::default()
2811 },
2812 ..Default::default()
2813 });
2814
2815 let fs = FakeFs::new(cx.background());
2816 fs.insert_tree(
2817 "/dir",
2818 json!({
2819 "one.rs": "const ONE: usize = 1;",
2820 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2821 }),
2822 )
2823 .await;
2824
2825 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2826 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2827 let buffer = project
2828 .update(cx, |project, cx| {
2829 project.open_local_buffer("/dir/one.rs", cx)
2830 })
2831 .await
2832 .unwrap();
2833
2834 let fake_server = fake_servers.next().await.unwrap();
2835
2836 let response = project.update(cx, |project, cx| {
2837 project.prepare_rename(buffer.clone(), 7, cx)
2838 });
2839 fake_server
2840 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2841 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2842 assert_eq!(params.position, lsp::Position::new(0, 7));
2843 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2844 lsp::Position::new(0, 6),
2845 lsp::Position::new(0, 9),
2846 ))))
2847 })
2848 .next()
2849 .await
2850 .unwrap();
2851 let range = response.await.unwrap().unwrap();
2852 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2853 assert_eq!(range, 6..9);
2854
2855 let response = project.update(cx, |project, cx| {
2856 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2857 });
2858 fake_server
2859 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2860 assert_eq!(
2861 params.text_document_position.text_document.uri.as_str(),
2862 "file:///dir/one.rs"
2863 );
2864 assert_eq!(
2865 params.text_document_position.position,
2866 lsp::Position::new(0, 7)
2867 );
2868 assert_eq!(params.new_name, "THREE");
2869 Ok(Some(lsp::WorkspaceEdit {
2870 changes: Some(
2871 [
2872 (
2873 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2874 vec![lsp::TextEdit::new(
2875 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2876 "THREE".to_string(),
2877 )],
2878 ),
2879 (
2880 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2881 vec![
2882 lsp::TextEdit::new(
2883 lsp::Range::new(
2884 lsp::Position::new(0, 24),
2885 lsp::Position::new(0, 27),
2886 ),
2887 "THREE".to_string(),
2888 ),
2889 lsp::TextEdit::new(
2890 lsp::Range::new(
2891 lsp::Position::new(0, 35),
2892 lsp::Position::new(0, 38),
2893 ),
2894 "THREE".to_string(),
2895 ),
2896 ],
2897 ),
2898 ]
2899 .into_iter()
2900 .collect(),
2901 ),
2902 ..Default::default()
2903 }))
2904 })
2905 .next()
2906 .await
2907 .unwrap();
2908 let mut transaction = response.await.unwrap().0;
2909 assert_eq!(transaction.len(), 2);
2910 assert_eq!(
2911 transaction
2912 .remove_entry(&buffer)
2913 .unwrap()
2914 .0
2915 .read_with(cx, |buffer, _| buffer.text()),
2916 "const THREE: usize = 1;"
2917 );
2918 assert_eq!(
2919 transaction
2920 .into_keys()
2921 .next()
2922 .unwrap()
2923 .read_with(cx, |buffer, _| buffer.text()),
2924 "const TWO: usize = one::THREE + one::THREE;"
2925 );
2926}
2927
2928#[gpui::test]
2929async fn test_search(cx: &mut gpui::TestAppContext) {
2930 let fs = FakeFs::new(cx.background());
2931 fs.insert_tree(
2932 "/dir",
2933 json!({
2934 "one.rs": "const ONE: usize = 1;",
2935 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
2936 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
2937 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
2938 }),
2939 )
2940 .await;
2941 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2942 assert_eq!(
2943 search(&project, SearchQuery::text("TWO", false, true), cx)
2944 .await
2945 .unwrap(),
2946 HashMap::from_iter([
2947 ("two.rs".to_string(), vec![6..9]),
2948 ("three.rs".to_string(), vec![37..40])
2949 ])
2950 );
2951
2952 let buffer_4 = project
2953 .update(cx, |project, cx| {
2954 project.open_local_buffer("/dir/four.rs", cx)
2955 })
2956 .await
2957 .unwrap();
2958 buffer_4.update(cx, |buffer, cx| {
2959 let text = "two::TWO";
2960 buffer.edit([(20..28, text), (31..43, text)], cx);
2961 });
2962
2963 assert_eq!(
2964 search(&project, SearchQuery::text("TWO", false, true), cx)
2965 .await
2966 .unwrap(),
2967 HashMap::from_iter([
2968 ("two.rs".to_string(), vec![6..9]),
2969 ("three.rs".to_string(), vec![37..40]),
2970 ("four.rs".to_string(), vec![25..28, 36..39])
2971 ])
2972 );
2973
2974 async fn search(
2975 project: &ModelHandle<Project>,
2976 query: SearchQuery,
2977 cx: &mut gpui::TestAppContext,
2978 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
2979 let results = project
2980 .update(cx, |project, cx| project.search(query, cx))
2981 .await?;
2982
2983 Ok(results
2984 .into_iter()
2985 .map(|(buffer, ranges)| {
2986 buffer.read_with(cx, |buffer, _| {
2987 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
2988 let ranges = ranges
2989 .into_iter()
2990 .map(|range| range.to_offset(buffer))
2991 .collect::<Vec<_>>();
2992 (path, ranges)
2993 })
2994 })
2995 .collect())
2996 }
2997}