1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50
51 let cancel_flag = Default::default();
52 let results = project
53 .read_with(cx, |project, cx| {
54 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
55 })
56 .await;
57 assert_eq!(
58 results
59 .into_iter()
60 .map(|result| result.path)
61 .collect::<Vec<Arc<Path>>>(),
62 vec![
63 PathBuf::from("banana/carrot/date").into(),
64 PathBuf::from("banana/carrot/endive").into(),
65 ]
66 );
67}
68
69#[gpui::test]
70async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
71 cx.foreground().forbid_parking();
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
90 name: "the-rust-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 });
100 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
101 name: "the-json-language-server",
102 capabilities: lsp::ServerCapabilities {
103 completion_provider: Some(lsp::CompletionOptions {
104 trigger_characters: Some(vec![":".to_string()]),
105 ..Default::default()
106 }),
107 ..Default::default()
108 },
109 ..Default::default()
110 });
111
112 let fs = FakeFs::new(cx.background());
113 fs.insert_tree(
114 "/the-root",
115 json!({
116 "test.rs": "const A: i32 = 1;",
117 "test2.rs": "",
118 "Cargo.toml": "a = 1",
119 "package.json": "{\"a\": 1}",
120 }),
121 )
122 .await;
123
124 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
125 project.update(cx, |project, _| {
126 project.languages.add(Arc::new(rust_language));
127 project.languages.add(Arc::new(json_language));
128 });
129
130 // Open a buffer without an associated language server.
131 let toml_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/Cargo.toml", cx)
134 })
135 .await
136 .unwrap();
137
138 // Open a buffer with an associated language server.
139 let rust_buffer = project
140 .update(cx, |project, cx| {
141 project.open_local_buffer("/the-root/test.rs", cx)
142 })
143 .await
144 .unwrap();
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
231 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
232 assert_eq!(
233 fake_rust_server
234 .receive_notification::<lsp::notification::DidChangeTextDocument>()
235 .await
236 .text_document,
237 lsp::VersionedTextDocumentIdentifier::new(
238 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
239 1
240 )
241 );
242
243 // Save notifications are reported to all servers.
244 toml_buffer
245 .update(cx, |buffer, cx| buffer.save(cx))
246 .await
247 .unwrap();
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidSaveTextDocument>()
251 .await
252 .text_document,
253 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
254 );
255 assert_eq!(
256 fake_json_server
257 .receive_notification::<lsp::notification::DidSaveTextDocument>()
258 .await
259 .text_document,
260 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
261 );
262
263 // Renames are reported only to servers matching the buffer's language.
264 fs.rename(
265 Path::new("/the-root/test2.rs"),
266 Path::new("/the-root/test3.rs"),
267 Default::default(),
268 )
269 .await
270 .unwrap();
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidCloseTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
277 );
278 assert_eq!(
279 fake_rust_server
280 .receive_notification::<lsp::notification::DidOpenTextDocument>()
281 .await
282 .text_document,
283 lsp::TextDocumentItem {
284 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
285 version: 0,
286 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
287 language_id: Default::default()
288 },
289 );
290
291 rust_buffer2.update(cx, |buffer, cx| {
292 buffer.update_diagnostics(
293 DiagnosticSet::from_sorted_entries(
294 vec![DiagnosticEntry {
295 diagnostic: Default::default(),
296 range: Anchor::MIN..Anchor::MAX,
297 }],
298 &buffer.snapshot(),
299 ),
300 cx,
301 );
302 assert_eq!(
303 buffer
304 .snapshot()
305 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
306 .count(),
307 1
308 );
309 });
310
311 // When the rename changes the extension of the file, the buffer gets closed on the old
312 // language server and gets opened on the new one.
313 fs.rename(
314 Path::new("/the-root/test3.rs"),
315 Path::new("/the-root/test3.json"),
316 Default::default(),
317 )
318 .await
319 .unwrap();
320 assert_eq!(
321 fake_rust_server
322 .receive_notification::<lsp::notification::DidCloseTextDocument>()
323 .await
324 .text_document,
325 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
326 );
327 assert_eq!(
328 fake_json_server
329 .receive_notification::<lsp::notification::DidOpenTextDocument>()
330 .await
331 .text_document,
332 lsp::TextDocumentItem {
333 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
334 version: 0,
335 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
336 language_id: Default::default()
337 },
338 );
339
340 // We clear the diagnostics, since the language has changed.
341 rust_buffer2.read_with(cx, |buffer, _| {
342 assert_eq!(
343 buffer
344 .snapshot()
345 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
346 .count(),
347 0
348 );
349 });
350
351 // The renamed file's version resets after changing language server.
352 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
353 assert_eq!(
354 fake_json_server
355 .receive_notification::<lsp::notification::DidChangeTextDocument>()
356 .await
357 .text_document,
358 lsp::VersionedTextDocumentIdentifier::new(
359 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
360 1
361 )
362 );
363
364 // Restart language servers
365 project.update(cx, |project, cx| {
366 project.restart_language_servers_for_buffers(
367 vec![rust_buffer.clone(), json_buffer.clone()],
368 cx,
369 );
370 });
371
372 let mut rust_shutdown_requests = fake_rust_server
373 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
374 let mut json_shutdown_requests = fake_json_server
375 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
376 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
377
378 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
379 let mut fake_json_server = fake_json_servers.next().await.unwrap();
380
381 // Ensure rust document is reopened in new rust language server
382 assert_eq!(
383 fake_rust_server
384 .receive_notification::<lsp::notification::DidOpenTextDocument>()
385 .await
386 .text_document,
387 lsp::TextDocumentItem {
388 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
389 version: 1,
390 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
391 language_id: Default::default()
392 }
393 );
394
395 // Ensure json documents are reopened in new json language server
396 assert_set_eq!(
397 [
398 fake_json_server
399 .receive_notification::<lsp::notification::DidOpenTextDocument>()
400 .await
401 .text_document,
402 fake_json_server
403 .receive_notification::<lsp::notification::DidOpenTextDocument>()
404 .await
405 .text_document,
406 ],
407 [
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
410 version: 0,
411 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 lsp::TextDocumentItem {
415 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
416 version: 1,
417 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
418 language_id: Default::default()
419 }
420 ]
421 );
422
423 // Close notifications are reported only to servers matching the buffer's language.
424 cx.update(|_| drop(json_buffer));
425 let close_message = lsp::DidCloseTextDocumentParams {
426 text_document: lsp::TextDocumentIdentifier::new(
427 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
428 ),
429 };
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidCloseTextDocument>()
433 .await,
434 close_message,
435 );
436}
437
438#[gpui::test]
439async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
440 cx.foreground().forbid_parking();
441
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/dir",
445 json!({
446 "a.rs": "let a = 1;",
447 "b.rs": "let b = 2;"
448 }),
449 )
450 .await;
451
452 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
453
454 let buffer_a = project
455 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
456 .await
457 .unwrap();
458 let buffer_b = project
459 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
460 .await
461 .unwrap();
462
463 project.update(cx, |project, cx| {
464 project
465 .update_diagnostics(
466 0,
467 lsp::PublishDiagnosticsParams {
468 uri: Url::from_file_path("/dir/a.rs").unwrap(),
469 version: None,
470 diagnostics: vec![lsp::Diagnostic {
471 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
472 severity: Some(lsp::DiagnosticSeverity::ERROR),
473 message: "error 1".to_string(),
474 ..Default::default()
475 }],
476 },
477 &[],
478 cx,
479 )
480 .unwrap();
481 project
482 .update_diagnostics(
483 0,
484 lsp::PublishDiagnosticsParams {
485 uri: Url::from_file_path("/dir/b.rs").unwrap(),
486 version: None,
487 diagnostics: vec![lsp::Diagnostic {
488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
489 severity: Some(lsp::DiagnosticSeverity::WARNING),
490 message: "error 2".to_string(),
491 ..Default::default()
492 }],
493 },
494 &[],
495 cx,
496 )
497 .unwrap();
498 });
499
500 buffer_a.read_with(cx, |buffer, _| {
501 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
502 assert_eq!(
503 chunks
504 .iter()
505 .map(|(s, d)| (s.as_str(), *d))
506 .collect::<Vec<_>>(),
507 &[
508 ("let ", None),
509 ("a", Some(DiagnosticSeverity::ERROR)),
510 (" = 1;", None),
511 ]
512 );
513 });
514 buffer_b.read_with(cx, |buffer, _| {
515 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
516 assert_eq!(
517 chunks
518 .iter()
519 .map(|(s, d)| (s.as_str(), *d))
520 .collect::<Vec<_>>(),
521 &[
522 ("let ", None),
523 ("b", Some(DiagnosticSeverity::WARNING)),
524 (" = 2;", None),
525 ]
526 );
527 });
528}
529
530#[gpui::test]
531async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
532 cx.foreground().forbid_parking();
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/root",
537 json!({
538 "dir": {
539 "a.rs": "let a = 1;",
540 },
541 "other.rs": "let b = c;"
542 }),
543 )
544 .await;
545
546 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
547
548 let (worktree, _) = project
549 .update(cx, |project, cx| {
550 project.find_or_create_local_worktree("/root/other.rs", false, cx)
551 })
552 .await
553 .unwrap();
554 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
555
556 project.update(cx, |project, cx| {
557 project
558 .update_diagnostics(
559 0,
560 lsp::PublishDiagnosticsParams {
561 uri: Url::from_file_path("/root/other.rs").unwrap(),
562 version: None,
563 diagnostics: vec![lsp::Diagnostic {
564 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
565 severity: Some(lsp::DiagnosticSeverity::ERROR),
566 message: "unknown variable 'c'".to_string(),
567 ..Default::default()
568 }],
569 },
570 &[],
571 cx,
572 )
573 .unwrap();
574 });
575
576 let buffer = project
577 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
578 .await
579 .unwrap();
580 buffer.read_with(cx, |buffer, _| {
581 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
582 assert_eq!(
583 chunks
584 .iter()
585 .map(|(s, d)| (s.as_str(), *d))
586 .collect::<Vec<_>>(),
587 &[
588 ("let b = ", None),
589 ("c", Some(DiagnosticSeverity::ERROR)),
590 (";", None),
591 ]
592 );
593 });
594
595 project.read_with(cx, |project, cx| {
596 assert_eq!(project.diagnostic_summaries(cx).next(), None);
597 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
598 });
599}
600
601#[gpui::test]
602async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
603 cx.foreground().forbid_parking();
604
605 let progress_token = "the-progress-token";
606 let mut language = Language::new(
607 LanguageConfig {
608 name: "Rust".into(),
609 path_suffixes: vec!["rs".to_string()],
610 ..Default::default()
611 },
612 Some(tree_sitter_rust::language()),
613 );
614 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
615 disk_based_diagnostics_progress_token: Some(progress_token),
616 disk_based_diagnostics_sources: &["disk"],
617 ..Default::default()
618 });
619
620 let fs = FakeFs::new(cx.background());
621 fs.insert_tree(
622 "/dir",
623 json!({
624 "a.rs": "fn a() { A }",
625 "b.rs": "const y: i32 = 1",
626 }),
627 )
628 .await;
629
630 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
631 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
632 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
633
634 // Cause worktree to start the fake language server
635 let _buffer = project
636 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
637 .await
638 .unwrap();
639
640 let mut events = subscribe(&project, cx);
641
642 let fake_server = fake_servers.next().await.unwrap();
643 fake_server.start_progress(progress_token).await;
644 assert_eq!(
645 events.next().await.unwrap(),
646 Event::DiskBasedDiagnosticsStarted {
647 language_server_id: 0,
648 }
649 );
650
651 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
652 uri: Url::from_file_path("/dir/a.rs").unwrap(),
653 version: None,
654 diagnostics: vec![lsp::Diagnostic {
655 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
656 severity: Some(lsp::DiagnosticSeverity::ERROR),
657 message: "undefined variable 'A'".to_string(),
658 ..Default::default()
659 }],
660 });
661 assert_eq!(
662 events.next().await.unwrap(),
663 Event::DiagnosticsUpdated {
664 language_server_id: 0,
665 path: (worktree_id, Path::new("a.rs")).into()
666 }
667 );
668
669 fake_server.end_progress(progress_token);
670 assert_eq!(
671 events.next().await.unwrap(),
672 Event::DiskBasedDiagnosticsFinished {
673 language_server_id: 0
674 }
675 );
676
677 let buffer = project
678 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
679 .await
680 .unwrap();
681
682 buffer.read_with(cx, |buffer, _| {
683 let snapshot = buffer.snapshot();
684 let diagnostics = snapshot
685 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
686 .collect::<Vec<_>>();
687 assert_eq!(
688 diagnostics,
689 &[DiagnosticEntry {
690 range: Point::new(0, 9)..Point::new(0, 10),
691 diagnostic: Diagnostic {
692 severity: lsp::DiagnosticSeverity::ERROR,
693 message: "undefined variable 'A'".to_string(),
694 group_id: 0,
695 is_primary: true,
696 ..Default::default()
697 }
698 }]
699 )
700 });
701
702 // Ensure publishing empty diagnostics twice only results in one update event.
703 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
704 uri: Url::from_file_path("/dir/a.rs").unwrap(),
705 version: None,
706 diagnostics: Default::default(),
707 });
708 assert_eq!(
709 events.next().await.unwrap(),
710 Event::DiagnosticsUpdated {
711 language_server_id: 0,
712 path: (worktree_id, Path::new("a.rs")).into()
713 }
714 );
715
716 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
717 uri: Url::from_file_path("/dir/a.rs").unwrap(),
718 version: None,
719 diagnostics: Default::default(),
720 });
721 cx.foreground().run_until_parked();
722 assert_eq!(futures::poll!(events.next()), Poll::Pending);
723}
724
725#[gpui::test]
726async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
727 cx.foreground().forbid_parking();
728
729 let progress_token = "the-progress-token";
730 let mut language = Language::new(
731 LanguageConfig {
732 path_suffixes: vec!["rs".to_string()],
733 ..Default::default()
734 },
735 None,
736 );
737 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
738 disk_based_diagnostics_sources: &["disk"],
739 disk_based_diagnostics_progress_token: Some(progress_token),
740 ..Default::default()
741 });
742
743 let fs = FakeFs::new(cx.background());
744 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
745
746 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
747 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
748
749 let buffer = project
750 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
751 .await
752 .unwrap();
753
754 // Simulate diagnostics starting to update.
755 let fake_server = fake_servers.next().await.unwrap();
756 fake_server.start_progress(progress_token).await;
757
758 // Restart the server before the diagnostics finish updating.
759 project.update(cx, |project, cx| {
760 project.restart_language_servers_for_buffers([buffer], cx);
761 });
762 let mut events = subscribe(&project, cx);
763
764 // Simulate the newly started server sending more diagnostics.
765 let fake_server = fake_servers.next().await.unwrap();
766 fake_server.start_progress(progress_token).await;
767 assert_eq!(
768 events.next().await.unwrap(),
769 Event::DiskBasedDiagnosticsStarted {
770 language_server_id: 1
771 }
772 );
773 project.read_with(cx, |project, _| {
774 assert_eq!(
775 project
776 .language_servers_running_disk_based_diagnostics()
777 .collect::<Vec<_>>(),
778 [1]
779 );
780 });
781
782 // All diagnostics are considered done, despite the old server's diagnostic
783 // task never completing.
784 fake_server.end_progress(progress_token);
785 assert_eq!(
786 events.next().await.unwrap(),
787 Event::DiskBasedDiagnosticsFinished {
788 language_server_id: 1
789 }
790 );
791 project.read_with(cx, |project, _| {
792 assert_eq!(
793 project
794 .language_servers_running_disk_based_diagnostics()
795 .collect::<Vec<_>>(),
796 [0; 0]
797 );
798 });
799}
800
801#[gpui::test]
802async fn test_toggling_enable_language_server(
803 deterministic: Arc<Deterministic>,
804 cx: &mut gpui::TestAppContext,
805) {
806 deterministic.forbid_parking();
807
808 let mut rust = Language::new(
809 LanguageConfig {
810 name: Arc::from("Rust"),
811 path_suffixes: vec!["rs".to_string()],
812 ..Default::default()
813 },
814 None,
815 );
816 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
817 name: "rust-lsp",
818 ..Default::default()
819 });
820 let mut js = Language::new(
821 LanguageConfig {
822 name: Arc::from("JavaScript"),
823 path_suffixes: vec!["js".to_string()],
824 ..Default::default()
825 },
826 None,
827 );
828 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
829 name: "js-lsp",
830 ..Default::default()
831 });
832
833 let fs = FakeFs::new(cx.background());
834 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
835 .await;
836
837 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
838 project.update(cx, |project, _| {
839 project.languages.add(Arc::new(rust));
840 project.languages.add(Arc::new(js));
841 });
842
843 let _rs_buffer = project
844 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
845 .await
846 .unwrap();
847 let _js_buffer = project
848 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
849 .await
850 .unwrap();
851
852 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
853 assert_eq!(
854 fake_rust_server_1
855 .receive_notification::<lsp::notification::DidOpenTextDocument>()
856 .await
857 .text_document
858 .uri
859 .as_str(),
860 "file:///dir/a.rs"
861 );
862
863 let mut fake_js_server = fake_js_servers.next().await.unwrap();
864 assert_eq!(
865 fake_js_server
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document
869 .uri
870 .as_str(),
871 "file:///dir/b.js"
872 );
873
874 // Disable Rust language server, ensuring only that server gets stopped.
875 cx.update(|cx| {
876 cx.update_global(|settings: &mut Settings, _| {
877 settings.language_overrides.insert(
878 Arc::from("Rust"),
879 settings::LanguageSettings {
880 enable_language_server: Some(false),
881 ..Default::default()
882 },
883 );
884 })
885 });
886 fake_rust_server_1
887 .receive_notification::<lsp::notification::Exit>()
888 .await;
889
890 // Enable Rust and disable JavaScript language servers, ensuring that the
891 // former gets started again and that the latter stops.
892 cx.update(|cx| {
893 cx.update_global(|settings: &mut Settings, _| {
894 settings.language_overrides.insert(
895 Arc::from("Rust"),
896 settings::LanguageSettings {
897 enable_language_server: Some(true),
898 ..Default::default()
899 },
900 );
901 settings.language_overrides.insert(
902 Arc::from("JavaScript"),
903 settings::LanguageSettings {
904 enable_language_server: Some(false),
905 ..Default::default()
906 },
907 );
908 })
909 });
910 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
911 assert_eq!(
912 fake_rust_server_2
913 .receive_notification::<lsp::notification::DidOpenTextDocument>()
914 .await
915 .text_document
916 .uri
917 .as_str(),
918 "file:///dir/a.rs"
919 );
920 fake_js_server
921 .receive_notification::<lsp::notification::Exit>()
922 .await;
923}
924
925#[gpui::test]
926async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
927 cx.foreground().forbid_parking();
928
929 let mut language = Language::new(
930 LanguageConfig {
931 name: "Rust".into(),
932 path_suffixes: vec!["rs".to_string()],
933 ..Default::default()
934 },
935 Some(tree_sitter_rust::language()),
936 );
937 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
938 disk_based_diagnostics_sources: &["disk"],
939 ..Default::default()
940 });
941
942 let text = "
943 fn a() { A }
944 fn b() { BB }
945 fn c() { CCC }
946 "
947 .unindent();
948
949 let fs = FakeFs::new(cx.background());
950 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
951
952 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
953 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
954
955 let buffer = project
956 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
957 .await
958 .unwrap();
959
960 let mut fake_server = fake_servers.next().await.unwrap();
961 let open_notification = fake_server
962 .receive_notification::<lsp::notification::DidOpenTextDocument>()
963 .await;
964
965 // Edit the buffer, moving the content down
966 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
967 let change_notification_1 = fake_server
968 .receive_notification::<lsp::notification::DidChangeTextDocument>()
969 .await;
970 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
971
972 // Report some diagnostics for the initial version of the buffer
973 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
974 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
975 version: Some(open_notification.text_document.version),
976 diagnostics: vec![
977 lsp::Diagnostic {
978 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
979 severity: Some(DiagnosticSeverity::ERROR),
980 message: "undefined variable 'A'".to_string(),
981 source: Some("disk".to_string()),
982 ..Default::default()
983 },
984 lsp::Diagnostic {
985 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
986 severity: Some(DiagnosticSeverity::ERROR),
987 message: "undefined variable 'BB'".to_string(),
988 source: Some("disk".to_string()),
989 ..Default::default()
990 },
991 lsp::Diagnostic {
992 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
993 severity: Some(DiagnosticSeverity::ERROR),
994 source: Some("disk".to_string()),
995 message: "undefined variable 'CCC'".to_string(),
996 ..Default::default()
997 },
998 ],
999 });
1000
1001 // The diagnostics have moved down since they were created.
1002 buffer.next_notification(cx).await;
1003 buffer.read_with(cx, |buffer, _| {
1004 assert_eq!(
1005 buffer
1006 .snapshot()
1007 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1008 .collect::<Vec<_>>(),
1009 &[
1010 DiagnosticEntry {
1011 range: Point::new(3, 9)..Point::new(3, 11),
1012 diagnostic: Diagnostic {
1013 severity: DiagnosticSeverity::ERROR,
1014 message: "undefined variable 'BB'".to_string(),
1015 is_disk_based: true,
1016 group_id: 1,
1017 is_primary: true,
1018 ..Default::default()
1019 },
1020 },
1021 DiagnosticEntry {
1022 range: Point::new(4, 9)..Point::new(4, 12),
1023 diagnostic: Diagnostic {
1024 severity: DiagnosticSeverity::ERROR,
1025 message: "undefined variable 'CCC'".to_string(),
1026 is_disk_based: true,
1027 group_id: 2,
1028 is_primary: true,
1029 ..Default::default()
1030 }
1031 }
1032 ]
1033 );
1034 assert_eq!(
1035 chunks_with_diagnostics(buffer, 0..buffer.len()),
1036 [
1037 ("\n\nfn a() { ".to_string(), None),
1038 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1039 (" }\nfn b() { ".to_string(), None),
1040 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1041 (" }\nfn c() { ".to_string(), None),
1042 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1043 (" }\n".to_string(), None),
1044 ]
1045 );
1046 assert_eq!(
1047 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1048 [
1049 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1050 (" }\nfn c() { ".to_string(), None),
1051 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1052 ]
1053 );
1054 });
1055
1056 // Ensure overlapping diagnostics are highlighted correctly.
1057 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1058 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1059 version: Some(open_notification.text_document.version),
1060 diagnostics: vec![
1061 lsp::Diagnostic {
1062 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1063 severity: Some(DiagnosticSeverity::ERROR),
1064 message: "undefined variable 'A'".to_string(),
1065 source: Some("disk".to_string()),
1066 ..Default::default()
1067 },
1068 lsp::Diagnostic {
1069 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1070 severity: Some(DiagnosticSeverity::WARNING),
1071 message: "unreachable statement".to_string(),
1072 source: Some("disk".to_string()),
1073 ..Default::default()
1074 },
1075 ],
1076 });
1077
1078 buffer.next_notification(cx).await;
1079 buffer.read_with(cx, |buffer, _| {
1080 assert_eq!(
1081 buffer
1082 .snapshot()
1083 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1084 .collect::<Vec<_>>(),
1085 &[
1086 DiagnosticEntry {
1087 range: Point::new(2, 9)..Point::new(2, 12),
1088 diagnostic: Diagnostic {
1089 severity: DiagnosticSeverity::WARNING,
1090 message: "unreachable statement".to_string(),
1091 is_disk_based: true,
1092 group_id: 4,
1093 is_primary: true,
1094 ..Default::default()
1095 }
1096 },
1097 DiagnosticEntry {
1098 range: Point::new(2, 9)..Point::new(2, 10),
1099 diagnostic: Diagnostic {
1100 severity: DiagnosticSeverity::ERROR,
1101 message: "undefined variable 'A'".to_string(),
1102 is_disk_based: true,
1103 group_id: 3,
1104 is_primary: true,
1105 ..Default::default()
1106 },
1107 }
1108 ]
1109 );
1110 assert_eq!(
1111 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1112 [
1113 ("fn a() { ".to_string(), None),
1114 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1115 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1116 ("\n".to_string(), None),
1117 ]
1118 );
1119 assert_eq!(
1120 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1121 [
1122 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1123 ("\n".to_string(), None),
1124 ]
1125 );
1126 });
1127
1128 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1129 // changes since the last save.
1130 buffer.update(cx, |buffer, cx| {
1131 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1132 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1133 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1134 });
1135 let change_notification_2 = fake_server
1136 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1137 .await;
1138 assert!(
1139 change_notification_2.text_document.version > change_notification_1.text_document.version
1140 );
1141
1142 // Handle out-of-order diagnostics
1143 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1144 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1145 version: Some(change_notification_2.text_document.version),
1146 diagnostics: vec![
1147 lsp::Diagnostic {
1148 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1149 severity: Some(DiagnosticSeverity::ERROR),
1150 message: "undefined variable 'BB'".to_string(),
1151 source: Some("disk".to_string()),
1152 ..Default::default()
1153 },
1154 lsp::Diagnostic {
1155 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1156 severity: Some(DiagnosticSeverity::WARNING),
1157 message: "undefined variable 'A'".to_string(),
1158 source: Some("disk".to_string()),
1159 ..Default::default()
1160 },
1161 ],
1162 });
1163
1164 buffer.next_notification(cx).await;
1165 buffer.read_with(cx, |buffer, _| {
1166 assert_eq!(
1167 buffer
1168 .snapshot()
1169 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1170 .collect::<Vec<_>>(),
1171 &[
1172 DiagnosticEntry {
1173 range: Point::new(2, 21)..Point::new(2, 22),
1174 diagnostic: Diagnostic {
1175 severity: DiagnosticSeverity::WARNING,
1176 message: "undefined variable 'A'".to_string(),
1177 is_disk_based: true,
1178 group_id: 6,
1179 is_primary: true,
1180 ..Default::default()
1181 }
1182 },
1183 DiagnosticEntry {
1184 range: Point::new(3, 9)..Point::new(3, 14),
1185 diagnostic: Diagnostic {
1186 severity: DiagnosticSeverity::ERROR,
1187 message: "undefined variable 'BB'".to_string(),
1188 is_disk_based: true,
1189 group_id: 5,
1190 is_primary: true,
1191 ..Default::default()
1192 },
1193 }
1194 ]
1195 );
1196 });
1197}
1198
1199#[gpui::test]
1200async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1201 cx.foreground().forbid_parking();
1202
1203 let text = concat!(
1204 "let one = ;\n", //
1205 "let two = \n",
1206 "let three = 3;\n",
1207 );
1208
1209 let fs = FakeFs::new(cx.background());
1210 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1211
1212 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1213 let buffer = project
1214 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1215 .await
1216 .unwrap();
1217
1218 project.update(cx, |project, cx| {
1219 project
1220 .update_buffer_diagnostics(
1221 &buffer,
1222 vec![
1223 DiagnosticEntry {
1224 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1225 diagnostic: Diagnostic {
1226 severity: DiagnosticSeverity::ERROR,
1227 message: "syntax error 1".to_string(),
1228 ..Default::default()
1229 },
1230 },
1231 DiagnosticEntry {
1232 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1233 diagnostic: Diagnostic {
1234 severity: DiagnosticSeverity::ERROR,
1235 message: "syntax error 2".to_string(),
1236 ..Default::default()
1237 },
1238 },
1239 ],
1240 None,
1241 cx,
1242 )
1243 .unwrap();
1244 });
1245
1246 // An empty range is extended forward to include the following character.
1247 // At the end of a line, an empty range is extended backward to include
1248 // the preceding character.
1249 buffer.read_with(cx, |buffer, _| {
1250 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1251 assert_eq!(
1252 chunks
1253 .iter()
1254 .map(|(s, d)| (s.as_str(), *d))
1255 .collect::<Vec<_>>(),
1256 &[
1257 ("let one = ", None),
1258 (";", Some(DiagnosticSeverity::ERROR)),
1259 ("\nlet two =", None),
1260 (" ", Some(DiagnosticSeverity::ERROR)),
1261 ("\nlet three = 3;\n", None)
1262 ]
1263 );
1264 });
1265}
1266
1267#[gpui::test]
1268async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1269 cx.foreground().forbid_parking();
1270
1271 let mut language = Language::new(
1272 LanguageConfig {
1273 name: "Rust".into(),
1274 path_suffixes: vec!["rs".to_string()],
1275 ..Default::default()
1276 },
1277 Some(tree_sitter_rust::language()),
1278 );
1279 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1280
1281 let text = "
1282 fn a() {
1283 f1();
1284 }
1285 fn b() {
1286 f2();
1287 }
1288 fn c() {
1289 f3();
1290 }
1291 "
1292 .unindent();
1293
1294 let fs = FakeFs::new(cx.background());
1295 fs.insert_tree(
1296 "/dir",
1297 json!({
1298 "a.rs": text.clone(),
1299 }),
1300 )
1301 .await;
1302
1303 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1304 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1305 let buffer = project
1306 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1307 .await
1308 .unwrap();
1309
1310 let mut fake_server = fake_servers.next().await.unwrap();
1311 let lsp_document_version = fake_server
1312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1313 .await
1314 .text_document
1315 .version;
1316
1317 // Simulate editing the buffer after the language server computes some edits.
1318 buffer.update(cx, |buffer, cx| {
1319 buffer.edit(
1320 [(
1321 Point::new(0, 0)..Point::new(0, 0),
1322 "// above first function\n",
1323 )],
1324 cx,
1325 );
1326 buffer.edit(
1327 [(
1328 Point::new(2, 0)..Point::new(2, 0),
1329 " // inside first function\n",
1330 )],
1331 cx,
1332 );
1333 buffer.edit(
1334 [(
1335 Point::new(6, 4)..Point::new(6, 4),
1336 "// inside second function ",
1337 )],
1338 cx,
1339 );
1340
1341 assert_eq!(
1342 buffer.text(),
1343 "
1344 // above first function
1345 fn a() {
1346 // inside first function
1347 f1();
1348 }
1349 fn b() {
1350 // inside second function f2();
1351 }
1352 fn c() {
1353 f3();
1354 }
1355 "
1356 .unindent()
1357 );
1358 });
1359
1360 let edits = project
1361 .update(cx, |project, cx| {
1362 project.edits_from_lsp(
1363 &buffer,
1364 vec![
1365 // replace body of first function
1366 lsp::TextEdit {
1367 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1368 new_text: "
1369 fn a() {
1370 f10();
1371 }
1372 "
1373 .unindent(),
1374 },
1375 // edit inside second function
1376 lsp::TextEdit {
1377 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1378 new_text: "00".into(),
1379 },
1380 // edit inside third function via two distinct edits
1381 lsp::TextEdit {
1382 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1383 new_text: "4000".into(),
1384 },
1385 lsp::TextEdit {
1386 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1387 new_text: "".into(),
1388 },
1389 ],
1390 Some(lsp_document_version),
1391 cx,
1392 )
1393 })
1394 .await
1395 .unwrap();
1396
1397 buffer.update(cx, |buffer, cx| {
1398 for (range, new_text) in edits {
1399 buffer.edit([(range, new_text)], cx);
1400 }
1401 assert_eq!(
1402 buffer.text(),
1403 "
1404 // above first function
1405 fn a() {
1406 // inside first function
1407 f10();
1408 }
1409 fn b() {
1410 // inside second function f200();
1411 }
1412 fn c() {
1413 f4000();
1414 }
1415 "
1416 .unindent()
1417 );
1418 });
1419}
1420
1421#[gpui::test]
1422async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1423 cx.foreground().forbid_parking();
1424
1425 let text = "
1426 use a::b;
1427 use a::c;
1428
1429 fn f() {
1430 b();
1431 c();
1432 }
1433 "
1434 .unindent();
1435
1436 let fs = FakeFs::new(cx.background());
1437 fs.insert_tree(
1438 "/dir",
1439 json!({
1440 "a.rs": text.clone(),
1441 }),
1442 )
1443 .await;
1444
1445 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1446 let buffer = project
1447 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1448 .await
1449 .unwrap();
1450
1451 // Simulate the language server sending us a small edit in the form of a very large diff.
1452 // Rust-analyzer does this when performing a merge-imports code action.
1453 let edits = project
1454 .update(cx, |project, cx| {
1455 project.edits_from_lsp(
1456 &buffer,
1457 [
1458 // Replace the first use statement without editing the semicolon.
1459 lsp::TextEdit {
1460 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1461 new_text: "a::{b, c}".into(),
1462 },
1463 // Reinsert the remainder of the file between the semicolon and the final
1464 // newline of the file.
1465 lsp::TextEdit {
1466 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1467 new_text: "\n\n".into(),
1468 },
1469 lsp::TextEdit {
1470 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1471 new_text: "
1472 fn f() {
1473 b();
1474 c();
1475 }"
1476 .unindent(),
1477 },
1478 // Delete everything after the first newline of the file.
1479 lsp::TextEdit {
1480 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1481 new_text: "".into(),
1482 },
1483 ],
1484 None,
1485 cx,
1486 )
1487 })
1488 .await
1489 .unwrap();
1490
1491 buffer.update(cx, |buffer, cx| {
1492 let edits = edits
1493 .into_iter()
1494 .map(|(range, text)| {
1495 (
1496 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1497 text,
1498 )
1499 })
1500 .collect::<Vec<_>>();
1501
1502 assert_eq!(
1503 edits,
1504 [
1505 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1506 (Point::new(1, 0)..Point::new(2, 0), "".into())
1507 ]
1508 );
1509
1510 for (range, new_text) in edits {
1511 buffer.edit([(range, new_text)], cx);
1512 }
1513 assert_eq!(
1514 buffer.text(),
1515 "
1516 use a::{b, c};
1517
1518 fn f() {
1519 b();
1520 c();
1521 }
1522 "
1523 .unindent()
1524 );
1525 });
1526}
1527
1528#[gpui::test]
1529async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1530 cx.foreground().forbid_parking();
1531
1532 let text = "
1533 use a::b;
1534 use a::c;
1535
1536 fn f() {
1537 b();
1538 c();
1539 }
1540 "
1541 .unindent();
1542
1543 let fs = FakeFs::new(cx.background());
1544 fs.insert_tree(
1545 "/dir",
1546 json!({
1547 "a.rs": text.clone(),
1548 }),
1549 )
1550 .await;
1551
1552 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1553 let buffer = project
1554 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1555 .await
1556 .unwrap();
1557
1558 // Simulate the language server sending us edits in a non-ordered fashion,
1559 // with ranges sometimes being inverted.
1560 let edits = project
1561 .update(cx, |project, cx| {
1562 project.edits_from_lsp(
1563 &buffer,
1564 [
1565 lsp::TextEdit {
1566 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1567 new_text: "\n\n".into(),
1568 },
1569 lsp::TextEdit {
1570 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1571 new_text: "a::{b, c}".into(),
1572 },
1573 lsp::TextEdit {
1574 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1575 new_text: "".into(),
1576 },
1577 lsp::TextEdit {
1578 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1579 new_text: "
1580 fn f() {
1581 b();
1582 c();
1583 }"
1584 .unindent(),
1585 },
1586 ],
1587 None,
1588 cx,
1589 )
1590 })
1591 .await
1592 .unwrap();
1593
1594 buffer.update(cx, |buffer, cx| {
1595 let edits = edits
1596 .into_iter()
1597 .map(|(range, text)| {
1598 (
1599 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1600 text,
1601 )
1602 })
1603 .collect::<Vec<_>>();
1604
1605 assert_eq!(
1606 edits,
1607 [
1608 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1609 (Point::new(1, 0)..Point::new(2, 0), "".into())
1610 ]
1611 );
1612
1613 for (range, new_text) in edits {
1614 buffer.edit([(range, new_text)], cx);
1615 }
1616 assert_eq!(
1617 buffer.text(),
1618 "
1619 use a::{b, c};
1620
1621 fn f() {
1622 b();
1623 c();
1624 }
1625 "
1626 .unindent()
1627 );
1628 });
1629}
1630
1631fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1632 buffer: &Buffer,
1633 range: Range<T>,
1634) -> Vec<(String, Option<DiagnosticSeverity>)> {
1635 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1636 for chunk in buffer.snapshot().chunks(range, true) {
1637 if chunks.last().map_or(false, |prev_chunk| {
1638 prev_chunk.1 == chunk.diagnostic_severity
1639 }) {
1640 chunks.last_mut().unwrap().0.push_str(chunk.text);
1641 } else {
1642 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1643 }
1644 }
1645 chunks
1646}
1647
1648#[gpui::test]
1649async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
1650 let dir = temp_tree(json!({
1651 "root": {
1652 "dir1": {},
1653 "dir2": {
1654 "dir3": {}
1655 }
1656 }
1657 }));
1658
1659 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
1660 let cancel_flag = Default::default();
1661 let results = project
1662 .read_with(cx, |project, cx| {
1663 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
1664 })
1665 .await;
1666
1667 assert!(results.is_empty());
1668}
1669
1670#[gpui::test(iterations = 10)]
1671async fn test_definition(cx: &mut gpui::TestAppContext) {
1672 let mut language = Language::new(
1673 LanguageConfig {
1674 name: "Rust".into(),
1675 path_suffixes: vec!["rs".to_string()],
1676 ..Default::default()
1677 },
1678 Some(tree_sitter_rust::language()),
1679 );
1680 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1681
1682 let fs = FakeFs::new(cx.background());
1683 fs.insert_tree(
1684 "/dir",
1685 json!({
1686 "a.rs": "const fn a() { A }",
1687 "b.rs": "const y: i32 = crate::a()",
1688 }),
1689 )
1690 .await;
1691
1692 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1693 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1694
1695 let buffer = project
1696 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1697 .await
1698 .unwrap();
1699
1700 let fake_server = fake_servers.next().await.unwrap();
1701 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1702 let params = params.text_document_position_params;
1703 assert_eq!(
1704 params.text_document.uri.to_file_path().unwrap(),
1705 Path::new("/dir/b.rs"),
1706 );
1707 assert_eq!(params.position, lsp::Position::new(0, 22));
1708
1709 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1710 lsp::Location::new(
1711 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1712 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1713 ),
1714 )))
1715 });
1716
1717 let mut definitions = project
1718 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1719 .await
1720 .unwrap();
1721
1722 // Assert no new language server started
1723 cx.foreground().run_until_parked();
1724 assert!(fake_servers.try_next().is_err());
1725
1726 assert_eq!(definitions.len(), 1);
1727 let definition = definitions.pop().unwrap();
1728 cx.update(|cx| {
1729 let target_buffer = definition.target.buffer.read(cx);
1730 assert_eq!(
1731 target_buffer
1732 .file()
1733 .unwrap()
1734 .as_local()
1735 .unwrap()
1736 .abs_path(cx),
1737 Path::new("/dir/a.rs"),
1738 );
1739 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1740 assert_eq!(
1741 list_worktrees(&project, cx),
1742 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1743 );
1744
1745 drop(definition);
1746 });
1747 cx.read(|cx| {
1748 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1749 });
1750
1751 fn list_worktrees<'a>(
1752 project: &'a ModelHandle<Project>,
1753 cx: &'a AppContext,
1754 ) -> Vec<(&'a Path, bool)> {
1755 project
1756 .read(cx)
1757 .worktrees(cx)
1758 .map(|worktree| {
1759 let worktree = worktree.read(cx);
1760 (
1761 worktree.as_local().unwrap().abs_path().as_ref(),
1762 worktree.is_visible(),
1763 )
1764 })
1765 .collect::<Vec<_>>()
1766 }
1767}
1768
1769#[gpui::test]
1770async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1771 let mut language = Language::new(
1772 LanguageConfig {
1773 name: "TypeScript".into(),
1774 path_suffixes: vec!["ts".to_string()],
1775 ..Default::default()
1776 },
1777 Some(tree_sitter_typescript::language_typescript()),
1778 );
1779 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1780
1781 let fs = FakeFs::new(cx.background());
1782 fs.insert_tree(
1783 "/dir",
1784 json!({
1785 "a.ts": "",
1786 }),
1787 )
1788 .await;
1789
1790 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1791 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1792 let buffer = project
1793 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1794 .await
1795 .unwrap();
1796
1797 let fake_server = fake_language_servers.next().await.unwrap();
1798
1799 let text = "let a = b.fqn";
1800 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1801 let completions = project.update(cx, |project, cx| {
1802 project.completions(&buffer, text.len(), cx)
1803 });
1804
1805 fake_server
1806 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1807 Ok(Some(lsp::CompletionResponse::Array(vec![
1808 lsp::CompletionItem {
1809 label: "fullyQualifiedName?".into(),
1810 insert_text: Some("fullyQualifiedName".into()),
1811 ..Default::default()
1812 },
1813 ])))
1814 })
1815 .next()
1816 .await;
1817 let completions = completions.await.unwrap();
1818 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1819 assert_eq!(completions.len(), 1);
1820 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1821 assert_eq!(
1822 completions[0].old_range.to_offset(&snapshot),
1823 text.len() - 3..text.len()
1824 );
1825
1826 let text = "let a = \"atoms/cmp\"";
1827 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1828 let completions = project.update(cx, |project, cx| {
1829 project.completions(&buffer, text.len() - 1, cx)
1830 });
1831
1832 fake_server
1833 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1834 Ok(Some(lsp::CompletionResponse::Array(vec![
1835 lsp::CompletionItem {
1836 label: "component".into(),
1837 ..Default::default()
1838 },
1839 ])))
1840 })
1841 .next()
1842 .await;
1843 let completions = completions.await.unwrap();
1844 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1845 assert_eq!(completions.len(), 1);
1846 assert_eq!(completions[0].new_text, "component");
1847 assert_eq!(
1848 completions[0].old_range.to_offset(&snapshot),
1849 text.len() - 4..text.len() - 1
1850 );
1851}
1852
1853#[gpui::test(iterations = 10)]
1854async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1855 let mut language = Language::new(
1856 LanguageConfig {
1857 name: "TypeScript".into(),
1858 path_suffixes: vec!["ts".to_string()],
1859 ..Default::default()
1860 },
1861 None,
1862 );
1863 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1864
1865 let fs = FakeFs::new(cx.background());
1866 fs.insert_tree(
1867 "/dir",
1868 json!({
1869 "a.ts": "a",
1870 }),
1871 )
1872 .await;
1873
1874 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1875 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1876 let buffer = project
1877 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1878 .await
1879 .unwrap();
1880
1881 let fake_server = fake_language_servers.next().await.unwrap();
1882
1883 // Language server returns code actions that contain commands, and not edits.
1884 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1885 fake_server
1886 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1887 Ok(Some(vec![
1888 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1889 title: "The code action".into(),
1890 command: Some(lsp::Command {
1891 title: "The command".into(),
1892 command: "_the/command".into(),
1893 arguments: Some(vec![json!("the-argument")]),
1894 }),
1895 ..Default::default()
1896 }),
1897 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1898 title: "two".into(),
1899 ..Default::default()
1900 }),
1901 ]))
1902 })
1903 .next()
1904 .await;
1905
1906 let action = actions.await.unwrap()[0].clone();
1907 let apply = project.update(cx, |project, cx| {
1908 project.apply_code_action(buffer.clone(), action, true, cx)
1909 });
1910
1911 // Resolving the code action does not populate its edits. In absence of
1912 // edits, we must execute the given command.
1913 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1914 |action, _| async move { Ok(action) },
1915 );
1916
1917 // While executing the command, the language server sends the editor
1918 // a `workspaceEdit` request.
1919 fake_server
1920 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1921 let fake = fake_server.clone();
1922 move |params, _| {
1923 assert_eq!(params.command, "_the/command");
1924 let fake = fake.clone();
1925 async move {
1926 fake.server
1927 .request::<lsp::request::ApplyWorkspaceEdit>(
1928 lsp::ApplyWorkspaceEditParams {
1929 label: None,
1930 edit: lsp::WorkspaceEdit {
1931 changes: Some(
1932 [(
1933 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1934 vec![lsp::TextEdit {
1935 range: lsp::Range::new(
1936 lsp::Position::new(0, 0),
1937 lsp::Position::new(0, 0),
1938 ),
1939 new_text: "X".into(),
1940 }],
1941 )]
1942 .into_iter()
1943 .collect(),
1944 ),
1945 ..Default::default()
1946 },
1947 },
1948 )
1949 .await
1950 .unwrap();
1951 Ok(Some(json!(null)))
1952 }
1953 }
1954 })
1955 .next()
1956 .await;
1957
1958 // Applying the code action returns a project transaction containing the edits
1959 // sent by the language server in its `workspaceEdit` request.
1960 let transaction = apply.await.unwrap();
1961 assert!(transaction.0.contains_key(&buffer));
1962 buffer.update(cx, |buffer, cx| {
1963 assert_eq!(buffer.text(), "Xa");
1964 buffer.undo(cx);
1965 assert_eq!(buffer.text(), "a");
1966 });
1967}
1968
1969#[gpui::test]
1970async fn test_save_file(cx: &mut gpui::TestAppContext) {
1971 let fs = FakeFs::new(cx.background());
1972 fs.insert_tree(
1973 "/dir",
1974 json!({
1975 "file1": "the old contents",
1976 }),
1977 )
1978 .await;
1979
1980 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
1981 let buffer = project
1982 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
1983 .await
1984 .unwrap();
1985 buffer
1986 .update(cx, |buffer, cx| {
1987 assert_eq!(buffer.text(), "the old contents");
1988 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
1989 buffer.save(cx)
1990 })
1991 .await
1992 .unwrap();
1993
1994 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
1995 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
1996}
1997
1998#[gpui::test]
1999async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2000 let fs = FakeFs::new(cx.background());
2001 fs.insert_tree(
2002 "/dir",
2003 json!({
2004 "file1": "the old contents",
2005 }),
2006 )
2007 .await;
2008
2009 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2010 let buffer = project
2011 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2012 .await
2013 .unwrap();
2014 buffer
2015 .update(cx, |buffer, cx| {
2016 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2017 buffer.save(cx)
2018 })
2019 .await
2020 .unwrap();
2021
2022 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2023 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2024}
2025
2026#[gpui::test]
2027async fn test_save_as(cx: &mut gpui::TestAppContext) {
2028 let fs = FakeFs::new(cx.background());
2029 fs.insert_tree("/dir", json!({})).await;
2030
2031 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2032 let buffer = project.update(cx, |project, cx| {
2033 project.create_buffer("", None, cx).unwrap()
2034 });
2035 buffer.update(cx, |buffer, cx| {
2036 buffer.edit([(0..0, "abc")], cx);
2037 assert!(buffer.is_dirty());
2038 assert!(!buffer.has_conflict());
2039 });
2040 project
2041 .update(cx, |project, cx| {
2042 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2043 })
2044 .await
2045 .unwrap();
2046 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2047 buffer.read_with(cx, |buffer, cx| {
2048 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2049 assert!(!buffer.is_dirty());
2050 assert!(!buffer.has_conflict());
2051 });
2052
2053 let opened_buffer = project
2054 .update(cx, |project, cx| {
2055 project.open_local_buffer("/dir/file1", cx)
2056 })
2057 .await
2058 .unwrap();
2059 assert_eq!(opened_buffer, buffer);
2060}
2061
2062#[gpui::test(retries = 5)]
2063async fn test_rescan_and_remote_updates(
2064 deterministic: Arc<Deterministic>,
2065 cx: &mut gpui::TestAppContext,
2066) {
2067 let dir = temp_tree(json!({
2068 "a": {
2069 "file1": "",
2070 "file2": "",
2071 "file3": "",
2072 },
2073 "b": {
2074 "c": {
2075 "file4": "",
2076 "file5": "",
2077 }
2078 }
2079 }));
2080
2081 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2082 let rpc = project.read_with(cx, |p, _| p.client.clone());
2083
2084 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2085 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2086 async move { buffer.await.unwrap() }
2087 };
2088 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2089 project.read_with(cx, |project, cx| {
2090 let tree = project.worktrees(cx).next().unwrap();
2091 tree.read(cx)
2092 .entry_for_path(path)
2093 .expect(&format!("no entry for path {}", path))
2094 .id
2095 })
2096 };
2097
2098 let buffer2 = buffer_for_path("a/file2", cx).await;
2099 let buffer3 = buffer_for_path("a/file3", cx).await;
2100 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2101 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2102
2103 let file2_id = id_for_path("a/file2", &cx);
2104 let file3_id = id_for_path("a/file3", &cx);
2105 let file4_id = id_for_path("b/c/file4", &cx);
2106
2107 // Create a remote copy of this worktree.
2108 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2109 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2110 let remote = cx.update(|cx| {
2111 Worktree::remote(
2112 1,
2113 1,
2114 proto::WorktreeMetadata {
2115 id: initial_snapshot.id().to_proto(),
2116 root_name: initial_snapshot.root_name().into(),
2117 visible: true,
2118 },
2119 rpc.clone(),
2120 cx,
2121 )
2122 });
2123 remote.update(cx, |remote, _| {
2124 let update = initial_snapshot.build_initial_update(1);
2125 remote.as_remote_mut().unwrap().update_from_remote(update);
2126 });
2127 deterministic.run_until_parked();
2128
2129 cx.read(|cx| {
2130 assert!(!buffer2.read(cx).is_dirty());
2131 assert!(!buffer3.read(cx).is_dirty());
2132 assert!(!buffer4.read(cx).is_dirty());
2133 assert!(!buffer5.read(cx).is_dirty());
2134 });
2135
2136 // Rename and delete files and directories.
2137 tree.flush_fs_events(&cx).await;
2138 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2139 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2140 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2141 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2142 tree.flush_fs_events(&cx).await;
2143
2144 let expected_paths = vec![
2145 "a",
2146 "a/file1",
2147 "a/file2.new",
2148 "b",
2149 "d",
2150 "d/file3",
2151 "d/file4",
2152 ];
2153
2154 cx.read(|app| {
2155 assert_eq!(
2156 tree.read(app)
2157 .paths()
2158 .map(|p| p.to_str().unwrap())
2159 .collect::<Vec<_>>(),
2160 expected_paths
2161 );
2162
2163 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2164 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2165 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2166
2167 assert_eq!(
2168 buffer2.read(app).file().unwrap().path().as_ref(),
2169 Path::new("a/file2.new")
2170 );
2171 assert_eq!(
2172 buffer3.read(app).file().unwrap().path().as_ref(),
2173 Path::new("d/file3")
2174 );
2175 assert_eq!(
2176 buffer4.read(app).file().unwrap().path().as_ref(),
2177 Path::new("d/file4")
2178 );
2179 assert_eq!(
2180 buffer5.read(app).file().unwrap().path().as_ref(),
2181 Path::new("b/c/file5")
2182 );
2183
2184 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2185 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2186 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2187 assert!(buffer5.read(app).file().unwrap().is_deleted());
2188 });
2189
2190 // Update the remote worktree. Check that it becomes consistent with the
2191 // local worktree.
2192 remote.update(cx, |remote, cx| {
2193 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2194 &initial_snapshot,
2195 1,
2196 1,
2197 true,
2198 );
2199 remote.as_remote_mut().unwrap().update_from_remote(update);
2200 });
2201 deterministic.run_until_parked();
2202 remote.read_with(cx, |remote, _| {
2203 assert_eq!(
2204 remote
2205 .paths()
2206 .map(|p| p.to_str().unwrap())
2207 .collect::<Vec<_>>(),
2208 expected_paths
2209 );
2210 });
2211}
2212
2213#[gpui::test]
2214async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2215 let fs = FakeFs::new(cx.background());
2216 fs.insert_tree(
2217 "/dir",
2218 json!({
2219 "a.txt": "a-contents",
2220 "b.txt": "b-contents",
2221 }),
2222 )
2223 .await;
2224
2225 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2226
2227 // Spawn multiple tasks to open paths, repeating some paths.
2228 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2229 (
2230 p.open_local_buffer("/dir/a.txt", cx),
2231 p.open_local_buffer("/dir/b.txt", cx),
2232 p.open_local_buffer("/dir/a.txt", cx),
2233 )
2234 });
2235
2236 let buffer_a_1 = buffer_a_1.await.unwrap();
2237 let buffer_a_2 = buffer_a_2.await.unwrap();
2238 let buffer_b = buffer_b.await.unwrap();
2239 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2240 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2241
2242 // There is only one buffer per path.
2243 let buffer_a_id = buffer_a_1.id();
2244 assert_eq!(buffer_a_2.id(), buffer_a_id);
2245
2246 // Open the same path again while it is still open.
2247 drop(buffer_a_1);
2248 let buffer_a_3 = project
2249 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2250 .await
2251 .unwrap();
2252
2253 // There's still only one buffer per path.
2254 assert_eq!(buffer_a_3.id(), buffer_a_id);
2255}
2256
2257#[gpui::test]
2258async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2259 let fs = FakeFs::new(cx.background());
2260 fs.insert_tree(
2261 "/dir",
2262 json!({
2263 "file1": "abc",
2264 "file2": "def",
2265 "file3": "ghi",
2266 }),
2267 )
2268 .await;
2269
2270 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2271
2272 let buffer1 = project
2273 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2274 .await
2275 .unwrap();
2276 let events = Rc::new(RefCell::new(Vec::new()));
2277
2278 // initially, the buffer isn't dirty.
2279 buffer1.update(cx, |buffer, cx| {
2280 cx.subscribe(&buffer1, {
2281 let events = events.clone();
2282 move |_, _, event, _| match event {
2283 BufferEvent::Operation(_) => {}
2284 _ => events.borrow_mut().push(event.clone()),
2285 }
2286 })
2287 .detach();
2288
2289 assert!(!buffer.is_dirty());
2290 assert!(events.borrow().is_empty());
2291
2292 buffer.edit([(1..2, "")], cx);
2293 });
2294
2295 // after the first edit, the buffer is dirty, and emits a dirtied event.
2296 buffer1.update(cx, |buffer, cx| {
2297 assert!(buffer.text() == "ac");
2298 assert!(buffer.is_dirty());
2299 assert_eq!(
2300 *events.borrow(),
2301 &[language::Event::Edited, language::Event::DirtyChanged]
2302 );
2303 events.borrow_mut().clear();
2304 buffer.did_save(
2305 buffer.version(),
2306 buffer.as_rope().fingerprint(),
2307 buffer.file().unwrap().mtime(),
2308 None,
2309 cx,
2310 );
2311 });
2312
2313 // after saving, the buffer is not dirty, and emits a saved event.
2314 buffer1.update(cx, |buffer, cx| {
2315 assert!(!buffer.is_dirty());
2316 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2317 events.borrow_mut().clear();
2318
2319 buffer.edit([(1..1, "B")], cx);
2320 buffer.edit([(2..2, "D")], cx);
2321 });
2322
2323 // after editing again, the buffer is dirty, and emits another dirty event.
2324 buffer1.update(cx, |buffer, cx| {
2325 assert!(buffer.text() == "aBDc");
2326 assert!(buffer.is_dirty());
2327 assert_eq!(
2328 *events.borrow(),
2329 &[
2330 language::Event::Edited,
2331 language::Event::DirtyChanged,
2332 language::Event::Edited,
2333 ],
2334 );
2335 events.borrow_mut().clear();
2336
2337 // After restoring the buffer to its previously-saved state,
2338 // the buffer is not considered dirty anymore.
2339 buffer.edit([(1..3, "")], cx);
2340 assert!(buffer.text() == "ac");
2341 assert!(!buffer.is_dirty());
2342 });
2343
2344 assert_eq!(
2345 *events.borrow(),
2346 &[language::Event::Edited, language::Event::DirtyChanged]
2347 );
2348
2349 // When a file is deleted, the buffer is considered dirty.
2350 let events = Rc::new(RefCell::new(Vec::new()));
2351 let buffer2 = project
2352 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2353 .await
2354 .unwrap();
2355 buffer2.update(cx, |_, cx| {
2356 cx.subscribe(&buffer2, {
2357 let events = events.clone();
2358 move |_, _, event, _| events.borrow_mut().push(event.clone())
2359 })
2360 .detach();
2361 });
2362
2363 fs.remove_file("/dir/file2".as_ref(), Default::default())
2364 .await
2365 .unwrap();
2366 cx.foreground().run_until_parked();
2367 assert_eq!(
2368 *events.borrow(),
2369 &[
2370 language::Event::DirtyChanged,
2371 language::Event::FileHandleChanged
2372 ]
2373 );
2374
2375 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2376 let events = Rc::new(RefCell::new(Vec::new()));
2377 let buffer3 = project
2378 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2379 .await
2380 .unwrap();
2381 buffer3.update(cx, |_, cx| {
2382 cx.subscribe(&buffer3, {
2383 let events = events.clone();
2384 move |_, _, event, _| events.borrow_mut().push(event.clone())
2385 })
2386 .detach();
2387 });
2388
2389 buffer3.update(cx, |buffer, cx| {
2390 buffer.edit([(0..0, "x")], cx);
2391 });
2392 events.borrow_mut().clear();
2393 fs.remove_file("/dir/file3".as_ref(), Default::default())
2394 .await
2395 .unwrap();
2396 cx.foreground().run_until_parked();
2397 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2398 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2399}
2400
2401#[gpui::test]
2402async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2403 let initial_contents = "aaa\nbbbbb\nc\n";
2404 let fs = FakeFs::new(cx.background());
2405 fs.insert_tree(
2406 "/dir",
2407 json!({
2408 "the-file": initial_contents,
2409 }),
2410 )
2411 .await;
2412 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2413 let buffer = project
2414 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2415 .await
2416 .unwrap();
2417
2418 let anchors = (0..3)
2419 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2420 .collect::<Vec<_>>();
2421
2422 // Change the file on disk, adding two new lines of text, and removing
2423 // one line.
2424 buffer.read_with(cx, |buffer, _| {
2425 assert!(!buffer.is_dirty());
2426 assert!(!buffer.has_conflict());
2427 });
2428 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2429 fs.save(
2430 "/dir/the-file".as_ref(),
2431 &new_contents.into(),
2432 LineEnding::Unix,
2433 )
2434 .await
2435 .unwrap();
2436
2437 // Because the buffer was not modified, it is reloaded from disk. Its
2438 // contents are edited according to the diff between the old and new
2439 // file contents.
2440 cx.foreground().run_until_parked();
2441 buffer.update(cx, |buffer, _| {
2442 assert_eq!(buffer.text(), new_contents);
2443 assert!(!buffer.is_dirty());
2444 assert!(!buffer.has_conflict());
2445
2446 let anchor_positions = anchors
2447 .iter()
2448 .map(|anchor| anchor.to_point(&*buffer))
2449 .collect::<Vec<_>>();
2450 assert_eq!(
2451 anchor_positions,
2452 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2453 );
2454 });
2455
2456 // Modify the buffer
2457 buffer.update(cx, |buffer, cx| {
2458 buffer.edit([(0..0, " ")], cx);
2459 assert!(buffer.is_dirty());
2460 assert!(!buffer.has_conflict());
2461 });
2462
2463 // Change the file on disk again, adding blank lines to the beginning.
2464 fs.save(
2465 "/dir/the-file".as_ref(),
2466 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2467 LineEnding::Unix,
2468 )
2469 .await
2470 .unwrap();
2471
2472 // Because the buffer is modified, it doesn't reload from disk, but is
2473 // marked as having a conflict.
2474 cx.foreground().run_until_parked();
2475 buffer.read_with(cx, |buffer, _| {
2476 assert!(buffer.has_conflict());
2477 });
2478}
2479
2480#[gpui::test]
2481async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2482 let fs = FakeFs::new(cx.background());
2483 fs.insert_tree(
2484 "/dir",
2485 json!({
2486 "file1": "a\nb\nc\n",
2487 "file2": "one\r\ntwo\r\nthree\r\n",
2488 }),
2489 )
2490 .await;
2491
2492 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2493 let buffer1 = project
2494 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2495 .await
2496 .unwrap();
2497 let buffer2 = project
2498 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2499 .await
2500 .unwrap();
2501
2502 buffer1.read_with(cx, |buffer, _| {
2503 assert_eq!(buffer.text(), "a\nb\nc\n");
2504 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2505 });
2506 buffer2.read_with(cx, |buffer, _| {
2507 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2508 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2509 });
2510
2511 // Change a file's line endings on disk from unix to windows. The buffer's
2512 // state updates correctly.
2513 fs.save(
2514 "/dir/file1".as_ref(),
2515 &"aaa\nb\nc\n".into(),
2516 LineEnding::Windows,
2517 )
2518 .await
2519 .unwrap();
2520 cx.foreground().run_until_parked();
2521 buffer1.read_with(cx, |buffer, _| {
2522 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2523 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2524 });
2525
2526 // Save a file with windows line endings. The file is written correctly.
2527 buffer2
2528 .update(cx, |buffer, cx| {
2529 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2530 buffer.save(cx)
2531 })
2532 .await
2533 .unwrap();
2534 assert_eq!(
2535 fs.load("/dir/file2".as_ref()).await.unwrap(),
2536 "one\r\ntwo\r\nthree\r\nfour\r\n",
2537 );
2538}
2539
2540#[gpui::test]
2541async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2542 cx.foreground().forbid_parking();
2543
2544 let fs = FakeFs::new(cx.background());
2545 fs.insert_tree(
2546 "/the-dir",
2547 json!({
2548 "a.rs": "
2549 fn foo(mut v: Vec<usize>) {
2550 for x in &v {
2551 v.push(1);
2552 }
2553 }
2554 "
2555 .unindent(),
2556 }),
2557 )
2558 .await;
2559
2560 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2561 let buffer = project
2562 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2563 .await
2564 .unwrap();
2565
2566 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2567 let message = lsp::PublishDiagnosticsParams {
2568 uri: buffer_uri.clone(),
2569 diagnostics: vec![
2570 lsp::Diagnostic {
2571 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2572 severity: Some(DiagnosticSeverity::WARNING),
2573 message: "error 1".to_string(),
2574 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2575 location: lsp::Location {
2576 uri: buffer_uri.clone(),
2577 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2578 },
2579 message: "error 1 hint 1".to_string(),
2580 }]),
2581 ..Default::default()
2582 },
2583 lsp::Diagnostic {
2584 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2585 severity: Some(DiagnosticSeverity::HINT),
2586 message: "error 1 hint 1".to_string(),
2587 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2588 location: lsp::Location {
2589 uri: buffer_uri.clone(),
2590 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2591 },
2592 message: "original diagnostic".to_string(),
2593 }]),
2594 ..Default::default()
2595 },
2596 lsp::Diagnostic {
2597 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2598 severity: Some(DiagnosticSeverity::ERROR),
2599 message: "error 2".to_string(),
2600 related_information: Some(vec![
2601 lsp::DiagnosticRelatedInformation {
2602 location: lsp::Location {
2603 uri: buffer_uri.clone(),
2604 range: lsp::Range::new(
2605 lsp::Position::new(1, 13),
2606 lsp::Position::new(1, 15),
2607 ),
2608 },
2609 message: "error 2 hint 1".to_string(),
2610 },
2611 lsp::DiagnosticRelatedInformation {
2612 location: lsp::Location {
2613 uri: buffer_uri.clone(),
2614 range: lsp::Range::new(
2615 lsp::Position::new(1, 13),
2616 lsp::Position::new(1, 15),
2617 ),
2618 },
2619 message: "error 2 hint 2".to_string(),
2620 },
2621 ]),
2622 ..Default::default()
2623 },
2624 lsp::Diagnostic {
2625 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2626 severity: Some(DiagnosticSeverity::HINT),
2627 message: "error 2 hint 1".to_string(),
2628 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2629 location: lsp::Location {
2630 uri: buffer_uri.clone(),
2631 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2632 },
2633 message: "original diagnostic".to_string(),
2634 }]),
2635 ..Default::default()
2636 },
2637 lsp::Diagnostic {
2638 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2639 severity: Some(DiagnosticSeverity::HINT),
2640 message: "error 2 hint 2".to_string(),
2641 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2642 location: lsp::Location {
2643 uri: buffer_uri.clone(),
2644 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2645 },
2646 message: "original diagnostic".to_string(),
2647 }]),
2648 ..Default::default()
2649 },
2650 ],
2651 version: None,
2652 };
2653
2654 project
2655 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2656 .unwrap();
2657 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2658
2659 assert_eq!(
2660 buffer
2661 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2662 .collect::<Vec<_>>(),
2663 &[
2664 DiagnosticEntry {
2665 range: Point::new(1, 8)..Point::new(1, 9),
2666 diagnostic: Diagnostic {
2667 severity: DiagnosticSeverity::WARNING,
2668 message: "error 1".to_string(),
2669 group_id: 0,
2670 is_primary: true,
2671 ..Default::default()
2672 }
2673 },
2674 DiagnosticEntry {
2675 range: Point::new(1, 8)..Point::new(1, 9),
2676 diagnostic: Diagnostic {
2677 severity: DiagnosticSeverity::HINT,
2678 message: "error 1 hint 1".to_string(),
2679 group_id: 0,
2680 is_primary: false,
2681 ..Default::default()
2682 }
2683 },
2684 DiagnosticEntry {
2685 range: Point::new(1, 13)..Point::new(1, 15),
2686 diagnostic: Diagnostic {
2687 severity: DiagnosticSeverity::HINT,
2688 message: "error 2 hint 1".to_string(),
2689 group_id: 1,
2690 is_primary: false,
2691 ..Default::default()
2692 }
2693 },
2694 DiagnosticEntry {
2695 range: Point::new(1, 13)..Point::new(1, 15),
2696 diagnostic: Diagnostic {
2697 severity: DiagnosticSeverity::HINT,
2698 message: "error 2 hint 2".to_string(),
2699 group_id: 1,
2700 is_primary: false,
2701 ..Default::default()
2702 }
2703 },
2704 DiagnosticEntry {
2705 range: Point::new(2, 8)..Point::new(2, 17),
2706 diagnostic: Diagnostic {
2707 severity: DiagnosticSeverity::ERROR,
2708 message: "error 2".to_string(),
2709 group_id: 1,
2710 is_primary: true,
2711 ..Default::default()
2712 }
2713 }
2714 ]
2715 );
2716
2717 assert_eq!(
2718 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2719 &[
2720 DiagnosticEntry {
2721 range: Point::new(1, 8)..Point::new(1, 9),
2722 diagnostic: Diagnostic {
2723 severity: DiagnosticSeverity::WARNING,
2724 message: "error 1".to_string(),
2725 group_id: 0,
2726 is_primary: true,
2727 ..Default::default()
2728 }
2729 },
2730 DiagnosticEntry {
2731 range: Point::new(1, 8)..Point::new(1, 9),
2732 diagnostic: Diagnostic {
2733 severity: DiagnosticSeverity::HINT,
2734 message: "error 1 hint 1".to_string(),
2735 group_id: 0,
2736 is_primary: false,
2737 ..Default::default()
2738 }
2739 },
2740 ]
2741 );
2742 assert_eq!(
2743 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2744 &[
2745 DiagnosticEntry {
2746 range: Point::new(1, 13)..Point::new(1, 15),
2747 diagnostic: Diagnostic {
2748 severity: DiagnosticSeverity::HINT,
2749 message: "error 2 hint 1".to_string(),
2750 group_id: 1,
2751 is_primary: false,
2752 ..Default::default()
2753 }
2754 },
2755 DiagnosticEntry {
2756 range: Point::new(1, 13)..Point::new(1, 15),
2757 diagnostic: Diagnostic {
2758 severity: DiagnosticSeverity::HINT,
2759 message: "error 2 hint 2".to_string(),
2760 group_id: 1,
2761 is_primary: false,
2762 ..Default::default()
2763 }
2764 },
2765 DiagnosticEntry {
2766 range: Point::new(2, 8)..Point::new(2, 17),
2767 diagnostic: Diagnostic {
2768 severity: DiagnosticSeverity::ERROR,
2769 message: "error 2".to_string(),
2770 group_id: 1,
2771 is_primary: true,
2772 ..Default::default()
2773 }
2774 }
2775 ]
2776 );
2777}
2778
2779#[gpui::test]
2780async fn test_rename(cx: &mut gpui::TestAppContext) {
2781 cx.foreground().forbid_parking();
2782
2783 let mut language = Language::new(
2784 LanguageConfig {
2785 name: "Rust".into(),
2786 path_suffixes: vec!["rs".to_string()],
2787 ..Default::default()
2788 },
2789 Some(tree_sitter_rust::language()),
2790 );
2791 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
2792 capabilities: lsp::ServerCapabilities {
2793 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2794 prepare_provider: Some(true),
2795 work_done_progress_options: Default::default(),
2796 })),
2797 ..Default::default()
2798 },
2799 ..Default::default()
2800 });
2801
2802 let fs = FakeFs::new(cx.background());
2803 fs.insert_tree(
2804 "/dir",
2805 json!({
2806 "one.rs": "const ONE: usize = 1;",
2807 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2808 }),
2809 )
2810 .await;
2811
2812 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2813 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2814 let buffer = project
2815 .update(cx, |project, cx| {
2816 project.open_local_buffer("/dir/one.rs", cx)
2817 })
2818 .await
2819 .unwrap();
2820
2821 let fake_server = fake_servers.next().await.unwrap();
2822
2823 let response = project.update(cx, |project, cx| {
2824 project.prepare_rename(buffer.clone(), 7, cx)
2825 });
2826 fake_server
2827 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2828 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2829 assert_eq!(params.position, lsp::Position::new(0, 7));
2830 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2831 lsp::Position::new(0, 6),
2832 lsp::Position::new(0, 9),
2833 ))))
2834 })
2835 .next()
2836 .await
2837 .unwrap();
2838 let range = response.await.unwrap().unwrap();
2839 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2840 assert_eq!(range, 6..9);
2841
2842 let response = project.update(cx, |project, cx| {
2843 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2844 });
2845 fake_server
2846 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2847 assert_eq!(
2848 params.text_document_position.text_document.uri.as_str(),
2849 "file:///dir/one.rs"
2850 );
2851 assert_eq!(
2852 params.text_document_position.position,
2853 lsp::Position::new(0, 7)
2854 );
2855 assert_eq!(params.new_name, "THREE");
2856 Ok(Some(lsp::WorkspaceEdit {
2857 changes: Some(
2858 [
2859 (
2860 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2861 vec![lsp::TextEdit::new(
2862 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2863 "THREE".to_string(),
2864 )],
2865 ),
2866 (
2867 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2868 vec![
2869 lsp::TextEdit::new(
2870 lsp::Range::new(
2871 lsp::Position::new(0, 24),
2872 lsp::Position::new(0, 27),
2873 ),
2874 "THREE".to_string(),
2875 ),
2876 lsp::TextEdit::new(
2877 lsp::Range::new(
2878 lsp::Position::new(0, 35),
2879 lsp::Position::new(0, 38),
2880 ),
2881 "THREE".to_string(),
2882 ),
2883 ],
2884 ),
2885 ]
2886 .into_iter()
2887 .collect(),
2888 ),
2889 ..Default::default()
2890 }))
2891 })
2892 .next()
2893 .await
2894 .unwrap();
2895 let mut transaction = response.await.unwrap().0;
2896 assert_eq!(transaction.len(), 2);
2897 assert_eq!(
2898 transaction
2899 .remove_entry(&buffer)
2900 .unwrap()
2901 .0
2902 .read_with(cx, |buffer, _| buffer.text()),
2903 "const THREE: usize = 1;"
2904 );
2905 assert_eq!(
2906 transaction
2907 .into_keys()
2908 .next()
2909 .unwrap()
2910 .read_with(cx, |buffer, _| buffer.text()),
2911 "const TWO: usize = one::THREE + one::THREE;"
2912 );
2913}
2914
2915#[gpui::test]
2916async fn test_search(cx: &mut gpui::TestAppContext) {
2917 let fs = FakeFs::new(cx.background());
2918 fs.insert_tree(
2919 "/dir",
2920 json!({
2921 "one.rs": "const ONE: usize = 1;",
2922 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
2923 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
2924 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
2925 }),
2926 )
2927 .await;
2928 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2929 assert_eq!(
2930 search(&project, SearchQuery::text("TWO", false, true), cx)
2931 .await
2932 .unwrap(),
2933 HashMap::from_iter([
2934 ("two.rs".to_string(), vec![6..9]),
2935 ("three.rs".to_string(), vec![37..40])
2936 ])
2937 );
2938
2939 let buffer_4 = project
2940 .update(cx, |project, cx| {
2941 project.open_local_buffer("/dir/four.rs", cx)
2942 })
2943 .await
2944 .unwrap();
2945 buffer_4.update(cx, |buffer, cx| {
2946 let text = "two::TWO";
2947 buffer.edit([(20..28, text), (31..43, text)], cx);
2948 });
2949
2950 assert_eq!(
2951 search(&project, SearchQuery::text("TWO", false, true), cx)
2952 .await
2953 .unwrap(),
2954 HashMap::from_iter([
2955 ("two.rs".to_string(), vec![6..9]),
2956 ("three.rs".to_string(), vec![37..40]),
2957 ("four.rs".to_string(), vec![25..28, 36..39])
2958 ])
2959 );
2960
2961 async fn search(
2962 project: &ModelHandle<Project>,
2963 query: SearchQuery,
2964 cx: &mut gpui::TestAppContext,
2965 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
2966 let results = project
2967 .update(cx, |project, cx| project.search(query, cx))
2968 .await?;
2969
2970 Ok(results
2971 .into_iter()
2972 .map(|(buffer, ranges)| {
2973 buffer.read_with(cx, |buffer, _| {
2974 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
2975 let ranges = ranges
2976 .into_iter()
2977 .map(|range| range.to_offset(buffer))
2978 .collect::<Vec<_>>();
2979 (path, ranges)
2980 })
2981 })
2982 .collect())
2983 }
2984}