1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use pretty_assertions::assert_eq;
12use serde_json::json;
13use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
14use unindent::Unindent as _;
15use util::{assert_set_eq, test::temp_tree};
16
17#[gpui::test]
18async fn test_symlinks(cx: &mut gpui::TestAppContext) {
19 let dir = temp_tree(json!({
20 "root": {
21 "apple": "",
22 "banana": {
23 "carrot": {
24 "date": "",
25 "endive": "",
26 }
27 },
28 "fennel": {
29 "grape": "",
30 }
31 }
32 }));
33
34 let root_link_path = dir.path().join("root_link");
35 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
36 unix::fs::symlink(
37 &dir.path().join("root/fennel"),
38 &dir.path().join("root/finnochio"),
39 )
40 .unwrap();
41
42 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
43 project.read_with(cx, |project, cx| {
44 let tree = project.worktrees(cx).next().unwrap().read(cx);
45 assert_eq!(tree.file_count(), 5);
46 assert_eq!(
47 tree.inode_for_path("fennel/grape"),
48 tree.inode_for_path("finnochio/grape")
49 );
50 });
51}
52
53#[gpui::test]
54async fn test_managing_language_servers(
55 deterministic: Arc<Deterministic>,
56 cx: &mut gpui::TestAppContext,
57) {
58 cx.foreground().forbid_parking();
59
60 let mut rust_language = Language::new(
61 LanguageConfig {
62 name: "Rust".into(),
63 path_suffixes: vec!["rs".to_string()],
64 ..Default::default()
65 },
66 Some(tree_sitter_rust::language()),
67 );
68 let mut json_language = Language::new(
69 LanguageConfig {
70 name: "JSON".into(),
71 path_suffixes: vec!["json".to_string()],
72 ..Default::default()
73 },
74 None,
75 );
76 let mut fake_rust_servers = rust_language
77 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
78 name: "the-rust-language-server",
79 capabilities: lsp::ServerCapabilities {
80 completion_provider: Some(lsp::CompletionOptions {
81 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
82 ..Default::default()
83 }),
84 ..Default::default()
85 },
86 ..Default::default()
87 }))
88 .await;
89 let mut fake_json_servers = json_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-json-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![":".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102
103 let fs = FakeFs::new(cx.background());
104 fs.insert_tree(
105 "/the-root",
106 json!({
107 "test.rs": "const A: i32 = 1;",
108 "test2.rs": "",
109 "Cargo.toml": "a = 1",
110 "package.json": "{\"a\": 1}",
111 }),
112 )
113 .await;
114
115 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
116
117 // Open a buffer without an associated language server.
118 let toml_buffer = project
119 .update(cx, |project, cx| {
120 project.open_local_buffer("/the-root/Cargo.toml", cx)
121 })
122 .await
123 .unwrap();
124
125 // Open a buffer with an associated language server before the language for it has been loaded.
126 let rust_buffer = project
127 .update(cx, |project, cx| {
128 project.open_local_buffer("/the-root/test.rs", cx)
129 })
130 .await
131 .unwrap();
132 rust_buffer.read_with(cx, |buffer, _| {
133 assert_eq!(buffer.language().map(|l| l.name()), None);
134 });
135
136 // Now we add the languages to the project, and ensure they get assigned to all
137 // the relevant open buffers.
138 project.update(cx, |project, _| {
139 project.languages.add(Arc::new(json_language));
140 project.languages.add(Arc::new(rust_language));
141 });
142 deterministic.run_until_parked();
143 rust_buffer.read_with(cx, |buffer, _| {
144 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
145 });
146
147 // A server is started up, and it is notified about Rust files.
148 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
149 assert_eq!(
150 fake_rust_server
151 .receive_notification::<lsp::notification::DidOpenTextDocument>()
152 .await
153 .text_document,
154 lsp::TextDocumentItem {
155 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
156 version: 0,
157 text: "const A: i32 = 1;".to_string(),
158 language_id: Default::default()
159 }
160 );
161
162 // The buffer is configured based on the language server's capabilities.
163 rust_buffer.read_with(cx, |buffer, _| {
164 assert_eq!(
165 buffer.completion_triggers(),
166 &[".".to_string(), "::".to_string()]
167 );
168 });
169 toml_buffer.read_with(cx, |buffer, _| {
170 assert!(buffer.completion_triggers().is_empty());
171 });
172
173 // Edit a buffer. The changes are reported to the language server.
174 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
175 assert_eq!(
176 fake_rust_server
177 .receive_notification::<lsp::notification::DidChangeTextDocument>()
178 .await
179 .text_document,
180 lsp::VersionedTextDocumentIdentifier::new(
181 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
182 1
183 )
184 );
185
186 // Open a third buffer with a different associated language server.
187 let json_buffer = project
188 .update(cx, |project, cx| {
189 project.open_local_buffer("/the-root/package.json", cx)
190 })
191 .await
192 .unwrap();
193
194 // A json language server is started up and is only notified about the json buffer.
195 let mut fake_json_server = fake_json_servers.next().await.unwrap();
196 assert_eq!(
197 fake_json_server
198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
199 .await
200 .text_document,
201 lsp::TextDocumentItem {
202 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
203 version: 0,
204 text: "{\"a\": 1}".to_string(),
205 language_id: Default::default()
206 }
207 );
208
209 // This buffer is configured based on the second language server's
210 // capabilities.
211 json_buffer.read_with(cx, |buffer, _| {
212 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
213 });
214
215 // When opening another buffer whose language server is already running,
216 // it is also configured based on the existing language server's capabilities.
217 let rust_buffer2 = project
218 .update(cx, |project, cx| {
219 project.open_local_buffer("/the-root/test2.rs", cx)
220 })
221 .await
222 .unwrap();
223 rust_buffer2.read_with(cx, |buffer, _| {
224 assert_eq!(
225 buffer.completion_triggers(),
226 &[".".to_string(), "::".to_string()]
227 );
228 });
229
230 // Changes are reported only to servers matching the buffer's language.
231 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
232 rust_buffer2.update(cx, |buffer, cx| {
233 buffer.edit([(0..0, "let x = 1;")], None, cx)
234 });
235 assert_eq!(
236 fake_rust_server
237 .receive_notification::<lsp::notification::DidChangeTextDocument>()
238 .await
239 .text_document,
240 lsp::VersionedTextDocumentIdentifier::new(
241 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
242 1
243 )
244 );
245
246 // Save notifications are reported to all servers.
247 project
248 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
249 .await
250 .unwrap();
251 assert_eq!(
252 fake_rust_server
253 .receive_notification::<lsp::notification::DidSaveTextDocument>()
254 .await
255 .text_document,
256 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
257 );
258 assert_eq!(
259 fake_json_server
260 .receive_notification::<lsp::notification::DidSaveTextDocument>()
261 .await
262 .text_document,
263 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
264 );
265
266 // Renames are reported only to servers matching the buffer's language.
267 fs.rename(
268 Path::new("/the-root/test2.rs"),
269 Path::new("/the-root/test3.rs"),
270 Default::default(),
271 )
272 .await
273 .unwrap();
274 assert_eq!(
275 fake_rust_server
276 .receive_notification::<lsp::notification::DidCloseTextDocument>()
277 .await
278 .text_document,
279 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
280 );
281 assert_eq!(
282 fake_rust_server
283 .receive_notification::<lsp::notification::DidOpenTextDocument>()
284 .await
285 .text_document,
286 lsp::TextDocumentItem {
287 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
288 version: 0,
289 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
290 language_id: Default::default()
291 },
292 );
293
294 rust_buffer2.update(cx, |buffer, cx| {
295 buffer.update_diagnostics(
296 DiagnosticSet::from_sorted_entries(
297 vec![DiagnosticEntry {
298 diagnostic: Default::default(),
299 range: Anchor::MIN..Anchor::MAX,
300 }],
301 &buffer.snapshot(),
302 ),
303 cx,
304 );
305 assert_eq!(
306 buffer
307 .snapshot()
308 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
309 .count(),
310 1
311 );
312 });
313
314 // When the rename changes the extension of the file, the buffer gets closed on the old
315 // language server and gets opened on the new one.
316 fs.rename(
317 Path::new("/the-root/test3.rs"),
318 Path::new("/the-root/test3.json"),
319 Default::default(),
320 )
321 .await
322 .unwrap();
323 assert_eq!(
324 fake_rust_server
325 .receive_notification::<lsp::notification::DidCloseTextDocument>()
326 .await
327 .text_document,
328 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
329 );
330 assert_eq!(
331 fake_json_server
332 .receive_notification::<lsp::notification::DidOpenTextDocument>()
333 .await
334 .text_document,
335 lsp::TextDocumentItem {
336 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
337 version: 0,
338 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
339 language_id: Default::default()
340 },
341 );
342
343 // We clear the diagnostics, since the language has changed.
344 rust_buffer2.read_with(cx, |buffer, _| {
345 assert_eq!(
346 buffer
347 .snapshot()
348 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
349 .count(),
350 0
351 );
352 });
353
354 // The renamed file's version resets after changing language server.
355 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
356 assert_eq!(
357 fake_json_server
358 .receive_notification::<lsp::notification::DidChangeTextDocument>()
359 .await
360 .text_document,
361 lsp::VersionedTextDocumentIdentifier::new(
362 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
363 1
364 )
365 );
366
367 // Restart language servers
368 project.update(cx, |project, cx| {
369 project.restart_language_servers_for_buffers(
370 vec![rust_buffer.clone(), json_buffer.clone()],
371 cx,
372 );
373 });
374
375 let mut rust_shutdown_requests = fake_rust_server
376 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
377 let mut json_shutdown_requests = fake_json_server
378 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
379 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
380
381 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
382 let mut fake_json_server = fake_json_servers.next().await.unwrap();
383
384 // Ensure rust document is reopened in new rust language server
385 assert_eq!(
386 fake_rust_server
387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
388 .await
389 .text_document,
390 lsp::TextDocumentItem {
391 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
392 version: 1,
393 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
394 language_id: Default::default()
395 }
396 );
397
398 // Ensure json documents are reopened in new json language server
399 assert_set_eq!(
400 [
401 fake_json_server
402 .receive_notification::<lsp::notification::DidOpenTextDocument>()
403 .await
404 .text_document,
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 ],
410 [
411 lsp::TextDocumentItem {
412 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
413 version: 0,
414 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
415 language_id: Default::default()
416 },
417 lsp::TextDocumentItem {
418 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
419 version: 1,
420 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
421 language_id: Default::default()
422 }
423 ]
424 );
425
426 // Close notifications are reported only to servers matching the buffer's language.
427 cx.update(|_| drop(json_buffer));
428 let close_message = lsp::DidCloseTextDocumentParams {
429 text_document: lsp::TextDocumentIdentifier::new(
430 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
431 ),
432 };
433 assert_eq!(
434 fake_json_server
435 .receive_notification::<lsp::notification::DidCloseTextDocument>()
436 .await,
437 close_message,
438 );
439}
440
441#[gpui::test]
442async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
443 cx.foreground().forbid_parking();
444
445 let mut language = Language::new(
446 LanguageConfig {
447 name: "Rust".into(),
448 path_suffixes: vec!["rs".to_string()],
449 ..Default::default()
450 },
451 Some(tree_sitter_rust::language()),
452 );
453 let mut fake_servers = language
454 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
455 name: "the-language-server",
456 ..Default::default()
457 }))
458 .await;
459
460 let fs = FakeFs::new(cx.background());
461 fs.insert_tree(
462 "/the-root",
463 json!({
464 "a.rs": "",
465 "b.rs": "",
466 }),
467 )
468 .await;
469
470 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
471 project.update(cx, |project, _| {
472 project.languages.add(Arc::new(language));
473 });
474 cx.foreground().run_until_parked();
475
476 // Start the language server by opening a buffer with a compatible file extension.
477 let _buffer = project
478 .update(cx, |project, cx| {
479 project.open_local_buffer("/the-root/a.rs", cx)
480 })
481 .await
482 .unwrap();
483
484 // Keep track of the FS events reported to the language server.
485 let fake_server = fake_servers.next().await.unwrap();
486 let file_changes = Arc::new(Mutex::new(Vec::new()));
487 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
488 let file_changes = file_changes.clone();
489 move |params, _| {
490 let mut file_changes = file_changes.lock();
491 file_changes.extend(params.changes);
492 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
493 }
494 });
495
496 cx.foreground().run_until_parked();
497 assert_eq!(file_changes.lock().len(), 0);
498
499 // Perform some file system mutations.
500 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
501 .await
502 .unwrap();
503 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
504 .await
505 .unwrap();
506
507 // The language server receives events for both FS mutations.
508 cx.foreground().run_until_parked();
509 assert_eq!(
510 &*file_changes.lock(),
511 &[
512 lsp::FileEvent {
513 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
514 typ: lsp::FileChangeType::DELETED,
515 },
516 lsp::FileEvent {
517 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
518 typ: lsp::FileChangeType::CREATED,
519 },
520 ]
521 );
522}
523
524#[gpui::test]
525async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
526 cx.foreground().forbid_parking();
527
528 let fs = FakeFs::new(cx.background());
529 fs.insert_tree(
530 "/dir",
531 json!({
532 "a.rs": "let a = 1;",
533 "b.rs": "let b = 2;"
534 }),
535 )
536 .await;
537
538 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
539
540 let buffer_a = project
541 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
542 .await
543 .unwrap();
544 let buffer_b = project
545 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
546 .await
547 .unwrap();
548
549 project.update(cx, |project, cx| {
550 project
551 .update_diagnostics(
552 0,
553 lsp::PublishDiagnosticsParams {
554 uri: Url::from_file_path("/dir/a.rs").unwrap(),
555 version: None,
556 diagnostics: vec![lsp::Diagnostic {
557 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
558 severity: Some(lsp::DiagnosticSeverity::ERROR),
559 message: "error 1".to_string(),
560 ..Default::default()
561 }],
562 },
563 &[],
564 cx,
565 )
566 .unwrap();
567 project
568 .update_diagnostics(
569 0,
570 lsp::PublishDiagnosticsParams {
571 uri: Url::from_file_path("/dir/b.rs").unwrap(),
572 version: None,
573 diagnostics: vec![lsp::Diagnostic {
574 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
575 severity: Some(lsp::DiagnosticSeverity::WARNING),
576 message: "error 2".to_string(),
577 ..Default::default()
578 }],
579 },
580 &[],
581 cx,
582 )
583 .unwrap();
584 });
585
586 buffer_a.read_with(cx, |buffer, _| {
587 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
588 assert_eq!(
589 chunks
590 .iter()
591 .map(|(s, d)| (s.as_str(), *d))
592 .collect::<Vec<_>>(),
593 &[
594 ("let ", None),
595 ("a", Some(DiagnosticSeverity::ERROR)),
596 (" = 1;", None),
597 ]
598 );
599 });
600 buffer_b.read_with(cx, |buffer, _| {
601 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
602 assert_eq!(
603 chunks
604 .iter()
605 .map(|(s, d)| (s.as_str(), *d))
606 .collect::<Vec<_>>(),
607 &[
608 ("let ", None),
609 ("b", Some(DiagnosticSeverity::WARNING)),
610 (" = 2;", None),
611 ]
612 );
613 });
614}
615
616#[gpui::test]
617async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
618 cx.foreground().forbid_parking();
619
620 let fs = FakeFs::new(cx.background());
621 fs.insert_tree(
622 "/root",
623 json!({
624 "dir": {
625 "a.rs": "let a = 1;",
626 },
627 "other.rs": "let b = c;"
628 }),
629 )
630 .await;
631
632 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
633
634 let (worktree, _) = project
635 .update(cx, |project, cx| {
636 project.find_or_create_local_worktree("/root/other.rs", false, cx)
637 })
638 .await
639 .unwrap();
640 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
641
642 project.update(cx, |project, cx| {
643 project
644 .update_diagnostics(
645 0,
646 lsp::PublishDiagnosticsParams {
647 uri: Url::from_file_path("/root/other.rs").unwrap(),
648 version: None,
649 diagnostics: vec![lsp::Diagnostic {
650 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
651 severity: Some(lsp::DiagnosticSeverity::ERROR),
652 message: "unknown variable 'c'".to_string(),
653 ..Default::default()
654 }],
655 },
656 &[],
657 cx,
658 )
659 .unwrap();
660 });
661
662 let buffer = project
663 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
664 .await
665 .unwrap();
666 buffer.read_with(cx, |buffer, _| {
667 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
668 assert_eq!(
669 chunks
670 .iter()
671 .map(|(s, d)| (s.as_str(), *d))
672 .collect::<Vec<_>>(),
673 &[
674 ("let b = ", None),
675 ("c", Some(DiagnosticSeverity::ERROR)),
676 (";", None),
677 ]
678 );
679 });
680
681 project.read_with(cx, |project, cx| {
682 assert_eq!(project.diagnostic_summaries(cx).next(), None);
683 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
684 });
685}
686
687#[gpui::test]
688async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
689 cx.foreground().forbid_parking();
690
691 let progress_token = "the-progress-token";
692 let mut language = Language::new(
693 LanguageConfig {
694 name: "Rust".into(),
695 path_suffixes: vec!["rs".to_string()],
696 ..Default::default()
697 },
698 Some(tree_sitter_rust::language()),
699 );
700 let mut fake_servers = language
701 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
702 disk_based_diagnostics_progress_token: Some(progress_token.into()),
703 disk_based_diagnostics_sources: vec!["disk".into()],
704 ..Default::default()
705 }))
706 .await;
707
708 let fs = FakeFs::new(cx.background());
709 fs.insert_tree(
710 "/dir",
711 json!({
712 "a.rs": "fn a() { A }",
713 "b.rs": "const y: i32 = 1",
714 }),
715 )
716 .await;
717
718 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
719 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
720 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
721
722 // Cause worktree to start the fake language server
723 let _buffer = project
724 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
725 .await
726 .unwrap();
727
728 let mut events = subscribe(&project, cx);
729
730 let fake_server = fake_servers.next().await.unwrap();
731 fake_server
732 .start_progress(format!("{}/0", progress_token))
733 .await;
734 assert_eq!(
735 events.next().await.unwrap(),
736 Event::DiskBasedDiagnosticsStarted {
737 language_server_id: 0,
738 }
739 );
740
741 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
742 uri: Url::from_file_path("/dir/a.rs").unwrap(),
743 version: None,
744 diagnostics: vec![lsp::Diagnostic {
745 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
746 severity: Some(lsp::DiagnosticSeverity::ERROR),
747 message: "undefined variable 'A'".to_string(),
748 ..Default::default()
749 }],
750 });
751 assert_eq!(
752 events.next().await.unwrap(),
753 Event::DiagnosticsUpdated {
754 language_server_id: 0,
755 path: (worktree_id, Path::new("a.rs")).into()
756 }
757 );
758
759 fake_server.end_progress(format!("{}/0", progress_token));
760 assert_eq!(
761 events.next().await.unwrap(),
762 Event::DiskBasedDiagnosticsFinished {
763 language_server_id: 0
764 }
765 );
766
767 let buffer = project
768 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
769 .await
770 .unwrap();
771
772 buffer.read_with(cx, |buffer, _| {
773 let snapshot = buffer.snapshot();
774 let diagnostics = snapshot
775 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
776 .collect::<Vec<_>>();
777 assert_eq!(
778 diagnostics,
779 &[DiagnosticEntry {
780 range: Point::new(0, 9)..Point::new(0, 10),
781 diagnostic: Diagnostic {
782 severity: lsp::DiagnosticSeverity::ERROR,
783 message: "undefined variable 'A'".to_string(),
784 group_id: 0,
785 is_primary: true,
786 ..Default::default()
787 }
788 }]
789 )
790 });
791
792 // Ensure publishing empty diagnostics twice only results in one update event.
793 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
794 uri: Url::from_file_path("/dir/a.rs").unwrap(),
795 version: None,
796 diagnostics: Default::default(),
797 });
798 assert_eq!(
799 events.next().await.unwrap(),
800 Event::DiagnosticsUpdated {
801 language_server_id: 0,
802 path: (worktree_id, Path::new("a.rs")).into()
803 }
804 );
805
806 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
807 uri: Url::from_file_path("/dir/a.rs").unwrap(),
808 version: None,
809 diagnostics: Default::default(),
810 });
811 cx.foreground().run_until_parked();
812 assert_eq!(futures::poll!(events.next()), Poll::Pending);
813}
814
815#[gpui::test]
816async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
817 cx.foreground().forbid_parking();
818
819 let progress_token = "the-progress-token";
820 let mut language = Language::new(
821 LanguageConfig {
822 path_suffixes: vec!["rs".to_string()],
823 ..Default::default()
824 },
825 None,
826 );
827 let mut fake_servers = language
828 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
829 disk_based_diagnostics_sources: vec!["disk".into()],
830 disk_based_diagnostics_progress_token: Some(progress_token.into()),
831 ..Default::default()
832 }))
833 .await;
834
835 let fs = FakeFs::new(cx.background());
836 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
837
838 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
839 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
840
841 let buffer = project
842 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
843 .await
844 .unwrap();
845
846 // Simulate diagnostics starting to update.
847 let fake_server = fake_servers.next().await.unwrap();
848 fake_server.start_progress(progress_token).await;
849
850 // Restart the server before the diagnostics finish updating.
851 project.update(cx, |project, cx| {
852 project.restart_language_servers_for_buffers([buffer], cx);
853 });
854 let mut events = subscribe(&project, cx);
855
856 // Simulate the newly started server sending more diagnostics.
857 let fake_server = fake_servers.next().await.unwrap();
858 fake_server.start_progress(progress_token).await;
859 assert_eq!(
860 events.next().await.unwrap(),
861 Event::DiskBasedDiagnosticsStarted {
862 language_server_id: 1
863 }
864 );
865 project.read_with(cx, |project, _| {
866 assert_eq!(
867 project
868 .language_servers_running_disk_based_diagnostics()
869 .collect::<Vec<_>>(),
870 [1]
871 );
872 });
873
874 // All diagnostics are considered done, despite the old server's diagnostic
875 // task never completing.
876 fake_server.end_progress(progress_token);
877 assert_eq!(
878 events.next().await.unwrap(),
879 Event::DiskBasedDiagnosticsFinished {
880 language_server_id: 1
881 }
882 );
883 project.read_with(cx, |project, _| {
884 assert_eq!(
885 project
886 .language_servers_running_disk_based_diagnostics()
887 .collect::<Vec<_>>(),
888 [0; 0]
889 );
890 });
891}
892
893#[gpui::test]
894async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
895 cx.foreground().forbid_parking();
896
897 let mut language = Language::new(
898 LanguageConfig {
899 path_suffixes: vec!["rs".to_string()],
900 ..Default::default()
901 },
902 None,
903 );
904 let mut fake_servers = language
905 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
906 name: "the-lsp",
907 ..Default::default()
908 }))
909 .await;
910
911 let fs = FakeFs::new(cx.background());
912 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
913
914 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
915 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
916
917 let buffer = project
918 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
919 .await
920 .unwrap();
921
922 // Before restarting the server, report diagnostics with an unknown buffer version.
923 let fake_server = fake_servers.next().await.unwrap();
924 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
925 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
926 version: Some(10000),
927 diagnostics: Vec::new(),
928 });
929 cx.foreground().run_until_parked();
930
931 project.update(cx, |project, cx| {
932 project.restart_language_servers_for_buffers([buffer.clone()], cx);
933 });
934 let mut fake_server = fake_servers.next().await.unwrap();
935 let notification = fake_server
936 .receive_notification::<lsp::notification::DidOpenTextDocument>()
937 .await
938 .text_document;
939 assert_eq!(notification.version, 0);
940}
941
942#[gpui::test]
943async fn test_toggling_enable_language_server(
944 deterministic: Arc<Deterministic>,
945 cx: &mut gpui::TestAppContext,
946) {
947 deterministic.forbid_parking();
948
949 let mut rust = Language::new(
950 LanguageConfig {
951 name: Arc::from("Rust"),
952 path_suffixes: vec!["rs".to_string()],
953 ..Default::default()
954 },
955 None,
956 );
957 let mut fake_rust_servers = rust
958 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
959 name: "rust-lsp",
960 ..Default::default()
961 }))
962 .await;
963 let mut js = Language::new(
964 LanguageConfig {
965 name: Arc::from("JavaScript"),
966 path_suffixes: vec!["js".to_string()],
967 ..Default::default()
968 },
969 None,
970 );
971 let mut fake_js_servers = js
972 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
973 name: "js-lsp",
974 ..Default::default()
975 }))
976 .await;
977
978 let fs = FakeFs::new(cx.background());
979 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
980 .await;
981
982 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
983 project.update(cx, |project, _| {
984 project.languages.add(Arc::new(rust));
985 project.languages.add(Arc::new(js));
986 });
987
988 let _rs_buffer = project
989 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
990 .await
991 .unwrap();
992 let _js_buffer = project
993 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
994 .await
995 .unwrap();
996
997 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
998 assert_eq!(
999 fake_rust_server_1
1000 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1001 .await
1002 .text_document
1003 .uri
1004 .as_str(),
1005 "file:///dir/a.rs"
1006 );
1007
1008 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1009 assert_eq!(
1010 fake_js_server
1011 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1012 .await
1013 .text_document
1014 .uri
1015 .as_str(),
1016 "file:///dir/b.js"
1017 );
1018
1019 // Disable Rust language server, ensuring only that server gets stopped.
1020 cx.update(|cx| {
1021 cx.update_global(|settings: &mut Settings, _| {
1022 settings.language_overrides.insert(
1023 Arc::from("Rust"),
1024 settings::EditorSettings {
1025 enable_language_server: Some(false),
1026 ..Default::default()
1027 },
1028 );
1029 })
1030 });
1031 fake_rust_server_1
1032 .receive_notification::<lsp::notification::Exit>()
1033 .await;
1034
1035 // Enable Rust and disable JavaScript language servers, ensuring that the
1036 // former gets started again and that the latter stops.
1037 cx.update(|cx| {
1038 cx.update_global(|settings: &mut Settings, _| {
1039 settings.language_overrides.insert(
1040 Arc::from("Rust"),
1041 settings::EditorSettings {
1042 enable_language_server: Some(true),
1043 ..Default::default()
1044 },
1045 );
1046 settings.language_overrides.insert(
1047 Arc::from("JavaScript"),
1048 settings::EditorSettings {
1049 enable_language_server: Some(false),
1050 ..Default::default()
1051 },
1052 );
1053 })
1054 });
1055 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1056 assert_eq!(
1057 fake_rust_server_2
1058 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1059 .await
1060 .text_document
1061 .uri
1062 .as_str(),
1063 "file:///dir/a.rs"
1064 );
1065 fake_js_server
1066 .receive_notification::<lsp::notification::Exit>()
1067 .await;
1068}
1069
1070#[gpui::test]
1071async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1072 cx.foreground().forbid_parking();
1073
1074 let mut language = Language::new(
1075 LanguageConfig {
1076 name: "Rust".into(),
1077 path_suffixes: vec!["rs".to_string()],
1078 ..Default::default()
1079 },
1080 Some(tree_sitter_rust::language()),
1081 );
1082 let mut fake_servers = language
1083 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1084 disk_based_diagnostics_sources: vec!["disk".into()],
1085 ..Default::default()
1086 }))
1087 .await;
1088
1089 let text = "
1090 fn a() { A }
1091 fn b() { BB }
1092 fn c() { CCC }
1093 "
1094 .unindent();
1095
1096 let fs = FakeFs::new(cx.background());
1097 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1098
1099 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1100 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1101
1102 let buffer = project
1103 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1104 .await
1105 .unwrap();
1106
1107 let mut fake_server = fake_servers.next().await.unwrap();
1108 let open_notification = fake_server
1109 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1110 .await;
1111
1112 // Edit the buffer, moving the content down
1113 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1114 let change_notification_1 = fake_server
1115 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1116 .await;
1117 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1118
1119 // Report some diagnostics for the initial version of the buffer
1120 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1121 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1122 version: Some(open_notification.text_document.version),
1123 diagnostics: vec![
1124 lsp::Diagnostic {
1125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1126 severity: Some(DiagnosticSeverity::ERROR),
1127 message: "undefined variable 'A'".to_string(),
1128 source: Some("disk".to_string()),
1129 ..Default::default()
1130 },
1131 lsp::Diagnostic {
1132 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1133 severity: Some(DiagnosticSeverity::ERROR),
1134 message: "undefined variable 'BB'".to_string(),
1135 source: Some("disk".to_string()),
1136 ..Default::default()
1137 },
1138 lsp::Diagnostic {
1139 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1140 severity: Some(DiagnosticSeverity::ERROR),
1141 source: Some("disk".to_string()),
1142 message: "undefined variable 'CCC'".to_string(),
1143 ..Default::default()
1144 },
1145 ],
1146 });
1147
1148 // The diagnostics have moved down since they were created.
1149 buffer.next_notification(cx).await;
1150 buffer.read_with(cx, |buffer, _| {
1151 assert_eq!(
1152 buffer
1153 .snapshot()
1154 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1155 .collect::<Vec<_>>(),
1156 &[
1157 DiagnosticEntry {
1158 range: Point::new(3, 9)..Point::new(3, 11),
1159 diagnostic: Diagnostic {
1160 severity: DiagnosticSeverity::ERROR,
1161 message: "undefined variable 'BB'".to_string(),
1162 is_disk_based: true,
1163 group_id: 1,
1164 is_primary: true,
1165 ..Default::default()
1166 },
1167 },
1168 DiagnosticEntry {
1169 range: Point::new(4, 9)..Point::new(4, 12),
1170 diagnostic: Diagnostic {
1171 severity: DiagnosticSeverity::ERROR,
1172 message: "undefined variable 'CCC'".to_string(),
1173 is_disk_based: true,
1174 group_id: 2,
1175 is_primary: true,
1176 ..Default::default()
1177 }
1178 }
1179 ]
1180 );
1181 assert_eq!(
1182 chunks_with_diagnostics(buffer, 0..buffer.len()),
1183 [
1184 ("\n\nfn a() { ".to_string(), None),
1185 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1186 (" }\nfn b() { ".to_string(), None),
1187 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1188 (" }\nfn c() { ".to_string(), None),
1189 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1190 (" }\n".to_string(), None),
1191 ]
1192 );
1193 assert_eq!(
1194 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1195 [
1196 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1197 (" }\nfn c() { ".to_string(), None),
1198 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1199 ]
1200 );
1201 });
1202
1203 // Ensure overlapping diagnostics are highlighted correctly.
1204 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1205 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1206 version: Some(open_notification.text_document.version),
1207 diagnostics: vec![
1208 lsp::Diagnostic {
1209 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1210 severity: Some(DiagnosticSeverity::ERROR),
1211 message: "undefined variable 'A'".to_string(),
1212 source: Some("disk".to_string()),
1213 ..Default::default()
1214 },
1215 lsp::Diagnostic {
1216 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1217 severity: Some(DiagnosticSeverity::WARNING),
1218 message: "unreachable statement".to_string(),
1219 source: Some("disk".to_string()),
1220 ..Default::default()
1221 },
1222 ],
1223 });
1224
1225 buffer.next_notification(cx).await;
1226 buffer.read_with(cx, |buffer, _| {
1227 assert_eq!(
1228 buffer
1229 .snapshot()
1230 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1231 .collect::<Vec<_>>(),
1232 &[
1233 DiagnosticEntry {
1234 range: Point::new(2, 9)..Point::new(2, 12),
1235 diagnostic: Diagnostic {
1236 severity: DiagnosticSeverity::WARNING,
1237 message: "unreachable statement".to_string(),
1238 is_disk_based: true,
1239 group_id: 4,
1240 is_primary: true,
1241 ..Default::default()
1242 }
1243 },
1244 DiagnosticEntry {
1245 range: Point::new(2, 9)..Point::new(2, 10),
1246 diagnostic: Diagnostic {
1247 severity: DiagnosticSeverity::ERROR,
1248 message: "undefined variable 'A'".to_string(),
1249 is_disk_based: true,
1250 group_id: 3,
1251 is_primary: true,
1252 ..Default::default()
1253 },
1254 }
1255 ]
1256 );
1257 assert_eq!(
1258 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1259 [
1260 ("fn a() { ".to_string(), None),
1261 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1262 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1263 ("\n".to_string(), None),
1264 ]
1265 );
1266 assert_eq!(
1267 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1268 [
1269 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1270 ("\n".to_string(), None),
1271 ]
1272 );
1273 });
1274
1275 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1276 // changes since the last save.
1277 buffer.update(cx, |buffer, cx| {
1278 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1279 buffer.edit(
1280 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1281 None,
1282 cx,
1283 );
1284 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1285 });
1286 let change_notification_2 = fake_server
1287 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1288 .await;
1289 assert!(
1290 change_notification_2.text_document.version > change_notification_1.text_document.version
1291 );
1292
1293 // Handle out-of-order diagnostics
1294 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1295 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1296 version: Some(change_notification_2.text_document.version),
1297 diagnostics: vec![
1298 lsp::Diagnostic {
1299 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1300 severity: Some(DiagnosticSeverity::ERROR),
1301 message: "undefined variable 'BB'".to_string(),
1302 source: Some("disk".to_string()),
1303 ..Default::default()
1304 },
1305 lsp::Diagnostic {
1306 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1307 severity: Some(DiagnosticSeverity::WARNING),
1308 message: "undefined variable 'A'".to_string(),
1309 source: Some("disk".to_string()),
1310 ..Default::default()
1311 },
1312 ],
1313 });
1314
1315 buffer.next_notification(cx).await;
1316 buffer.read_with(cx, |buffer, _| {
1317 assert_eq!(
1318 buffer
1319 .snapshot()
1320 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1321 .collect::<Vec<_>>(),
1322 &[
1323 DiagnosticEntry {
1324 range: Point::new(2, 21)..Point::new(2, 22),
1325 diagnostic: Diagnostic {
1326 severity: DiagnosticSeverity::WARNING,
1327 message: "undefined variable 'A'".to_string(),
1328 is_disk_based: true,
1329 group_id: 6,
1330 is_primary: true,
1331 ..Default::default()
1332 }
1333 },
1334 DiagnosticEntry {
1335 range: Point::new(3, 9)..Point::new(3, 14),
1336 diagnostic: Diagnostic {
1337 severity: DiagnosticSeverity::ERROR,
1338 message: "undefined variable 'BB'".to_string(),
1339 is_disk_based: true,
1340 group_id: 5,
1341 is_primary: true,
1342 ..Default::default()
1343 },
1344 }
1345 ]
1346 );
1347 });
1348}
1349
1350#[gpui::test]
1351async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1352 cx.foreground().forbid_parking();
1353
1354 let text = concat!(
1355 "let one = ;\n", //
1356 "let two = \n",
1357 "let three = 3;\n",
1358 );
1359
1360 let fs = FakeFs::new(cx.background());
1361 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1362
1363 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1364 let buffer = project
1365 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1366 .await
1367 .unwrap();
1368
1369 project.update(cx, |project, cx| {
1370 project
1371 .update_buffer_diagnostics(
1372 &buffer,
1373 vec![
1374 DiagnosticEntry {
1375 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1376 diagnostic: Diagnostic {
1377 severity: DiagnosticSeverity::ERROR,
1378 message: "syntax error 1".to_string(),
1379 ..Default::default()
1380 },
1381 },
1382 DiagnosticEntry {
1383 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1384 diagnostic: Diagnostic {
1385 severity: DiagnosticSeverity::ERROR,
1386 message: "syntax error 2".to_string(),
1387 ..Default::default()
1388 },
1389 },
1390 ],
1391 None,
1392 cx,
1393 )
1394 .unwrap();
1395 });
1396
1397 // An empty range is extended forward to include the following character.
1398 // At the end of a line, an empty range is extended backward to include
1399 // the preceding character.
1400 buffer.read_with(cx, |buffer, _| {
1401 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1402 assert_eq!(
1403 chunks
1404 .iter()
1405 .map(|(s, d)| (s.as_str(), *d))
1406 .collect::<Vec<_>>(),
1407 &[
1408 ("let one = ", None),
1409 (";", Some(DiagnosticSeverity::ERROR)),
1410 ("\nlet two =", None),
1411 (" ", Some(DiagnosticSeverity::ERROR)),
1412 ("\nlet three = 3;\n", None)
1413 ]
1414 );
1415 });
1416}
1417
1418#[gpui::test]
1419async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1420 cx.foreground().forbid_parking();
1421
1422 let mut language = Language::new(
1423 LanguageConfig {
1424 name: "Rust".into(),
1425 path_suffixes: vec!["rs".to_string()],
1426 ..Default::default()
1427 },
1428 Some(tree_sitter_rust::language()),
1429 );
1430 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1431
1432 let text = "
1433 fn a() {
1434 f1();
1435 }
1436 fn b() {
1437 f2();
1438 }
1439 fn c() {
1440 f3();
1441 }
1442 "
1443 .unindent();
1444
1445 let fs = FakeFs::new(cx.background());
1446 fs.insert_tree(
1447 "/dir",
1448 json!({
1449 "a.rs": text.clone(),
1450 }),
1451 )
1452 .await;
1453
1454 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1455 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1456 let buffer = project
1457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1458 .await
1459 .unwrap();
1460
1461 let mut fake_server = fake_servers.next().await.unwrap();
1462 let lsp_document_version = fake_server
1463 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1464 .await
1465 .text_document
1466 .version;
1467
1468 // Simulate editing the buffer after the language server computes some edits.
1469 buffer.update(cx, |buffer, cx| {
1470 buffer.edit(
1471 [(
1472 Point::new(0, 0)..Point::new(0, 0),
1473 "// above first function\n",
1474 )],
1475 None,
1476 cx,
1477 );
1478 buffer.edit(
1479 [(
1480 Point::new(2, 0)..Point::new(2, 0),
1481 " // inside first function\n",
1482 )],
1483 None,
1484 cx,
1485 );
1486 buffer.edit(
1487 [(
1488 Point::new(6, 4)..Point::new(6, 4),
1489 "// inside second function ",
1490 )],
1491 None,
1492 cx,
1493 );
1494
1495 assert_eq!(
1496 buffer.text(),
1497 "
1498 // above first function
1499 fn a() {
1500 // inside first function
1501 f1();
1502 }
1503 fn b() {
1504 // inside second function f2();
1505 }
1506 fn c() {
1507 f3();
1508 }
1509 "
1510 .unindent()
1511 );
1512 });
1513
1514 let edits = project
1515 .update(cx, |project, cx| {
1516 project.edits_from_lsp(
1517 &buffer,
1518 vec![
1519 // replace body of first function
1520 lsp::TextEdit {
1521 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1522 new_text: "
1523 fn a() {
1524 f10();
1525 }
1526 "
1527 .unindent(),
1528 },
1529 // edit inside second function
1530 lsp::TextEdit {
1531 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1532 new_text: "00".into(),
1533 },
1534 // edit inside third function via two distinct edits
1535 lsp::TextEdit {
1536 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1537 new_text: "4000".into(),
1538 },
1539 lsp::TextEdit {
1540 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1541 new_text: "".into(),
1542 },
1543 ],
1544 Some(lsp_document_version),
1545 cx,
1546 )
1547 })
1548 .await
1549 .unwrap();
1550
1551 buffer.update(cx, |buffer, cx| {
1552 for (range, new_text) in edits {
1553 buffer.edit([(range, new_text)], None, cx);
1554 }
1555 assert_eq!(
1556 buffer.text(),
1557 "
1558 // above first function
1559 fn a() {
1560 // inside first function
1561 f10();
1562 }
1563 fn b() {
1564 // inside second function f200();
1565 }
1566 fn c() {
1567 f4000();
1568 }
1569 "
1570 .unindent()
1571 );
1572 });
1573}
1574
1575#[gpui::test]
1576async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1577 cx.foreground().forbid_parking();
1578
1579 let text = "
1580 use a::b;
1581 use a::c;
1582
1583 fn f() {
1584 b();
1585 c();
1586 }
1587 "
1588 .unindent();
1589
1590 let fs = FakeFs::new(cx.background());
1591 fs.insert_tree(
1592 "/dir",
1593 json!({
1594 "a.rs": text.clone(),
1595 }),
1596 )
1597 .await;
1598
1599 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1600 let buffer = project
1601 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1602 .await
1603 .unwrap();
1604
1605 // Simulate the language server sending us a small edit in the form of a very large diff.
1606 // Rust-analyzer does this when performing a merge-imports code action.
1607 let edits = project
1608 .update(cx, |project, cx| {
1609 project.edits_from_lsp(
1610 &buffer,
1611 [
1612 // Replace the first use statement without editing the semicolon.
1613 lsp::TextEdit {
1614 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1615 new_text: "a::{b, c}".into(),
1616 },
1617 // Reinsert the remainder of the file between the semicolon and the final
1618 // newline of the file.
1619 lsp::TextEdit {
1620 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1621 new_text: "\n\n".into(),
1622 },
1623 lsp::TextEdit {
1624 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1625 new_text: "
1626 fn f() {
1627 b();
1628 c();
1629 }"
1630 .unindent(),
1631 },
1632 // Delete everything after the first newline of the file.
1633 lsp::TextEdit {
1634 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1635 new_text: "".into(),
1636 },
1637 ],
1638 None,
1639 cx,
1640 )
1641 })
1642 .await
1643 .unwrap();
1644
1645 buffer.update(cx, |buffer, cx| {
1646 let edits = edits
1647 .into_iter()
1648 .map(|(range, text)| {
1649 (
1650 range.start.to_point(buffer)..range.end.to_point(buffer),
1651 text,
1652 )
1653 })
1654 .collect::<Vec<_>>();
1655
1656 assert_eq!(
1657 edits,
1658 [
1659 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1660 (Point::new(1, 0)..Point::new(2, 0), "".into())
1661 ]
1662 );
1663
1664 for (range, new_text) in edits {
1665 buffer.edit([(range, new_text)], None, cx);
1666 }
1667 assert_eq!(
1668 buffer.text(),
1669 "
1670 use a::{b, c};
1671
1672 fn f() {
1673 b();
1674 c();
1675 }
1676 "
1677 .unindent()
1678 );
1679 });
1680}
1681
1682#[gpui::test]
1683async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1684 cx.foreground().forbid_parking();
1685
1686 let text = "
1687 use a::b;
1688 use a::c;
1689
1690 fn f() {
1691 b();
1692 c();
1693 }
1694 "
1695 .unindent();
1696
1697 let fs = FakeFs::new(cx.background());
1698 fs.insert_tree(
1699 "/dir",
1700 json!({
1701 "a.rs": text.clone(),
1702 }),
1703 )
1704 .await;
1705
1706 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1707 let buffer = project
1708 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1709 .await
1710 .unwrap();
1711
1712 // Simulate the language server sending us edits in a non-ordered fashion,
1713 // with ranges sometimes being inverted or pointing to invalid locations.
1714 let edits = project
1715 .update(cx, |project, cx| {
1716 project.edits_from_lsp(
1717 &buffer,
1718 [
1719 lsp::TextEdit {
1720 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1721 new_text: "\n\n".into(),
1722 },
1723 lsp::TextEdit {
1724 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1725 new_text: "a::{b, c}".into(),
1726 },
1727 lsp::TextEdit {
1728 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1729 new_text: "".into(),
1730 },
1731 lsp::TextEdit {
1732 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1733 new_text: "
1734 fn f() {
1735 b();
1736 c();
1737 }"
1738 .unindent(),
1739 },
1740 ],
1741 None,
1742 cx,
1743 )
1744 })
1745 .await
1746 .unwrap();
1747
1748 buffer.update(cx, |buffer, cx| {
1749 let edits = edits
1750 .into_iter()
1751 .map(|(range, text)| {
1752 (
1753 range.start.to_point(buffer)..range.end.to_point(buffer),
1754 text,
1755 )
1756 })
1757 .collect::<Vec<_>>();
1758
1759 assert_eq!(
1760 edits,
1761 [
1762 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1763 (Point::new(1, 0)..Point::new(2, 0), "".into())
1764 ]
1765 );
1766
1767 for (range, new_text) in edits {
1768 buffer.edit([(range, new_text)], None, cx);
1769 }
1770 assert_eq!(
1771 buffer.text(),
1772 "
1773 use a::{b, c};
1774
1775 fn f() {
1776 b();
1777 c();
1778 }
1779 "
1780 .unindent()
1781 );
1782 });
1783}
1784
1785fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1786 buffer: &Buffer,
1787 range: Range<T>,
1788) -> Vec<(String, Option<DiagnosticSeverity>)> {
1789 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1790 for chunk in buffer.snapshot().chunks(range, true) {
1791 if chunks.last().map_or(false, |prev_chunk| {
1792 prev_chunk.1 == chunk.diagnostic_severity
1793 }) {
1794 chunks.last_mut().unwrap().0.push_str(chunk.text);
1795 } else {
1796 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1797 }
1798 }
1799 chunks
1800}
1801
1802#[gpui::test(iterations = 10)]
1803async fn test_definition(cx: &mut gpui::TestAppContext) {
1804 let mut language = Language::new(
1805 LanguageConfig {
1806 name: "Rust".into(),
1807 path_suffixes: vec!["rs".to_string()],
1808 ..Default::default()
1809 },
1810 Some(tree_sitter_rust::language()),
1811 );
1812 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1813
1814 let fs = FakeFs::new(cx.background());
1815 fs.insert_tree(
1816 "/dir",
1817 json!({
1818 "a.rs": "const fn a() { A }",
1819 "b.rs": "const y: i32 = crate::a()",
1820 }),
1821 )
1822 .await;
1823
1824 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1825 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1826
1827 let buffer = project
1828 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1829 .await
1830 .unwrap();
1831
1832 let fake_server = fake_servers.next().await.unwrap();
1833 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1834 let params = params.text_document_position_params;
1835 assert_eq!(
1836 params.text_document.uri.to_file_path().unwrap(),
1837 Path::new("/dir/b.rs"),
1838 );
1839 assert_eq!(params.position, lsp::Position::new(0, 22));
1840
1841 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1842 lsp::Location::new(
1843 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1844 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1845 ),
1846 )))
1847 });
1848
1849 let mut definitions = project
1850 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1851 .await
1852 .unwrap();
1853
1854 // Assert no new language server started
1855 cx.foreground().run_until_parked();
1856 assert!(fake_servers.try_next().is_err());
1857
1858 assert_eq!(definitions.len(), 1);
1859 let definition = definitions.pop().unwrap();
1860 cx.update(|cx| {
1861 let target_buffer = definition.target.buffer.read(cx);
1862 assert_eq!(
1863 target_buffer
1864 .file()
1865 .unwrap()
1866 .as_local()
1867 .unwrap()
1868 .abs_path(cx),
1869 Path::new("/dir/a.rs"),
1870 );
1871 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1872 assert_eq!(
1873 list_worktrees(&project, cx),
1874 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1875 );
1876
1877 drop(definition);
1878 });
1879 cx.read(|cx| {
1880 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1881 });
1882
1883 fn list_worktrees<'a>(
1884 project: &'a ModelHandle<Project>,
1885 cx: &'a AppContext,
1886 ) -> Vec<(&'a Path, bool)> {
1887 project
1888 .read(cx)
1889 .worktrees(cx)
1890 .map(|worktree| {
1891 let worktree = worktree.read(cx);
1892 (
1893 worktree.as_local().unwrap().abs_path().as_ref(),
1894 worktree.is_visible(),
1895 )
1896 })
1897 .collect::<Vec<_>>()
1898 }
1899}
1900
1901#[gpui::test]
1902async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1903 let mut language = Language::new(
1904 LanguageConfig {
1905 name: "TypeScript".into(),
1906 path_suffixes: vec!["ts".to_string()],
1907 ..Default::default()
1908 },
1909 Some(tree_sitter_typescript::language_typescript()),
1910 );
1911 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1912
1913 let fs = FakeFs::new(cx.background());
1914 fs.insert_tree(
1915 "/dir",
1916 json!({
1917 "a.ts": "",
1918 }),
1919 )
1920 .await;
1921
1922 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1923 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1924 let buffer = project
1925 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1926 .await
1927 .unwrap();
1928
1929 let fake_server = fake_language_servers.next().await.unwrap();
1930
1931 let text = "let a = b.fqn";
1932 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1933 let completions = project.update(cx, |project, cx| {
1934 project.completions(&buffer, text.len(), cx)
1935 });
1936
1937 fake_server
1938 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1939 Ok(Some(lsp::CompletionResponse::Array(vec![
1940 lsp::CompletionItem {
1941 label: "fullyQualifiedName?".into(),
1942 insert_text: Some("fullyQualifiedName".into()),
1943 ..Default::default()
1944 },
1945 ])))
1946 })
1947 .next()
1948 .await;
1949 let completions = completions.await.unwrap();
1950 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1951 assert_eq!(completions.len(), 1);
1952 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1953 assert_eq!(
1954 completions[0].old_range.to_offset(&snapshot),
1955 text.len() - 3..text.len()
1956 );
1957
1958 let text = "let a = \"atoms/cmp\"";
1959 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1960 let completions = project.update(cx, |project, cx| {
1961 project.completions(&buffer, text.len() - 1, cx)
1962 });
1963
1964 fake_server
1965 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1966 Ok(Some(lsp::CompletionResponse::Array(vec![
1967 lsp::CompletionItem {
1968 label: "component".into(),
1969 ..Default::default()
1970 },
1971 ])))
1972 })
1973 .next()
1974 .await;
1975 let completions = completions.await.unwrap();
1976 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1977 assert_eq!(completions.len(), 1);
1978 assert_eq!(completions[0].new_text, "component");
1979 assert_eq!(
1980 completions[0].old_range.to_offset(&snapshot),
1981 text.len() - 4..text.len() - 1
1982 );
1983}
1984
1985#[gpui::test]
1986async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1987 let mut language = Language::new(
1988 LanguageConfig {
1989 name: "TypeScript".into(),
1990 path_suffixes: vec!["ts".to_string()],
1991 ..Default::default()
1992 },
1993 Some(tree_sitter_typescript::language_typescript()),
1994 );
1995 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1996
1997 let fs = FakeFs::new(cx.background());
1998 fs.insert_tree(
1999 "/dir",
2000 json!({
2001 "a.ts": "",
2002 }),
2003 )
2004 .await;
2005
2006 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2007 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2008 let buffer = project
2009 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2010 .await
2011 .unwrap();
2012
2013 let fake_server = fake_language_servers.next().await.unwrap();
2014
2015 let text = "let a = b.fqn";
2016 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2017 let completions = project.update(cx, |project, cx| {
2018 project.completions(&buffer, text.len(), cx)
2019 });
2020
2021 fake_server
2022 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2023 Ok(Some(lsp::CompletionResponse::Array(vec![
2024 lsp::CompletionItem {
2025 label: "fullyQualifiedName?".into(),
2026 insert_text: Some("fully\rQualified\r\nName".into()),
2027 ..Default::default()
2028 },
2029 ])))
2030 })
2031 .next()
2032 .await;
2033 let completions = completions.await.unwrap();
2034 assert_eq!(completions.len(), 1);
2035 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2036}
2037
2038#[gpui::test(iterations = 10)]
2039async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2040 let mut language = Language::new(
2041 LanguageConfig {
2042 name: "TypeScript".into(),
2043 path_suffixes: vec!["ts".to_string()],
2044 ..Default::default()
2045 },
2046 None,
2047 );
2048 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2049
2050 let fs = FakeFs::new(cx.background());
2051 fs.insert_tree(
2052 "/dir",
2053 json!({
2054 "a.ts": "a",
2055 }),
2056 )
2057 .await;
2058
2059 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2060 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2061 let buffer = project
2062 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2063 .await
2064 .unwrap();
2065
2066 let fake_server = fake_language_servers.next().await.unwrap();
2067
2068 // Language server returns code actions that contain commands, and not edits.
2069 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2070 fake_server
2071 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2072 Ok(Some(vec![
2073 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2074 title: "The code action".into(),
2075 command: Some(lsp::Command {
2076 title: "The command".into(),
2077 command: "_the/command".into(),
2078 arguments: Some(vec![json!("the-argument")]),
2079 }),
2080 ..Default::default()
2081 }),
2082 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2083 title: "two".into(),
2084 ..Default::default()
2085 }),
2086 ]))
2087 })
2088 .next()
2089 .await;
2090
2091 let action = actions.await.unwrap()[0].clone();
2092 let apply = project.update(cx, |project, cx| {
2093 project.apply_code_action(buffer.clone(), action, true, cx)
2094 });
2095
2096 // Resolving the code action does not populate its edits. In absence of
2097 // edits, we must execute the given command.
2098 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2099 |action, _| async move { Ok(action) },
2100 );
2101
2102 // While executing the command, the language server sends the editor
2103 // a `workspaceEdit` request.
2104 fake_server
2105 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2106 let fake = fake_server.clone();
2107 move |params, _| {
2108 assert_eq!(params.command, "_the/command");
2109 let fake = fake.clone();
2110 async move {
2111 fake.server
2112 .request::<lsp::request::ApplyWorkspaceEdit>(
2113 lsp::ApplyWorkspaceEditParams {
2114 label: None,
2115 edit: lsp::WorkspaceEdit {
2116 changes: Some(
2117 [(
2118 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2119 vec![lsp::TextEdit {
2120 range: lsp::Range::new(
2121 lsp::Position::new(0, 0),
2122 lsp::Position::new(0, 0),
2123 ),
2124 new_text: "X".into(),
2125 }],
2126 )]
2127 .into_iter()
2128 .collect(),
2129 ),
2130 ..Default::default()
2131 },
2132 },
2133 )
2134 .await
2135 .unwrap();
2136 Ok(Some(json!(null)))
2137 }
2138 }
2139 })
2140 .next()
2141 .await;
2142
2143 // Applying the code action returns a project transaction containing the edits
2144 // sent by the language server in its `workspaceEdit` request.
2145 let transaction = apply.await.unwrap();
2146 assert!(transaction.0.contains_key(&buffer));
2147 buffer.update(cx, |buffer, cx| {
2148 assert_eq!(buffer.text(), "Xa");
2149 buffer.undo(cx);
2150 assert_eq!(buffer.text(), "a");
2151 });
2152}
2153
2154#[gpui::test]
2155async fn test_save_file(cx: &mut gpui::TestAppContext) {
2156 let fs = FakeFs::new(cx.background());
2157 fs.insert_tree(
2158 "/dir",
2159 json!({
2160 "file1": "the old contents",
2161 }),
2162 )
2163 .await;
2164
2165 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2166 let buffer = project
2167 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2168 .await
2169 .unwrap();
2170 buffer.update(cx, |buffer, cx| {
2171 assert_eq!(buffer.text(), "the old contents");
2172 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2173 });
2174
2175 project
2176 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2177 .await
2178 .unwrap();
2179
2180 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2181 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2182}
2183
2184#[gpui::test]
2185async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2186 let fs = FakeFs::new(cx.background());
2187 fs.insert_tree(
2188 "/dir",
2189 json!({
2190 "file1": "the old contents",
2191 }),
2192 )
2193 .await;
2194
2195 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2196 let buffer = project
2197 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2198 .await
2199 .unwrap();
2200 buffer.update(cx, |buffer, cx| {
2201 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2202 });
2203
2204 project
2205 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2206 .await
2207 .unwrap();
2208
2209 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2210 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2211}
2212
2213#[gpui::test]
2214async fn test_save_as(cx: &mut gpui::TestAppContext) {
2215 let fs = FakeFs::new(cx.background());
2216 fs.insert_tree("/dir", json!({})).await;
2217
2218 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2219
2220 let languages = project.read_with(cx, |project, _| project.languages().clone());
2221 languages.register(
2222 "/some/path",
2223 LanguageConfig {
2224 name: "Rust".into(),
2225 path_suffixes: vec!["rs".into()],
2226 ..Default::default()
2227 },
2228 tree_sitter_rust::language(),
2229 None,
2230 |_| Default::default(),
2231 );
2232
2233 let buffer = project.update(cx, |project, cx| {
2234 project.create_buffer("", None, cx).unwrap()
2235 });
2236 buffer.update(cx, |buffer, cx| {
2237 buffer.edit([(0..0, "abc")], None, cx);
2238 assert!(buffer.is_dirty());
2239 assert!(!buffer.has_conflict());
2240 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2241 });
2242 project
2243 .update(cx, |project, cx| {
2244 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2245 })
2246 .await
2247 .unwrap();
2248 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2249
2250 cx.foreground().run_until_parked();
2251 buffer.read_with(cx, |buffer, cx| {
2252 assert_eq!(
2253 buffer.file().unwrap().full_path(cx),
2254 Path::new("dir/file1.rs")
2255 );
2256 assert!(!buffer.is_dirty());
2257 assert!(!buffer.has_conflict());
2258 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2259 });
2260
2261 let opened_buffer = project
2262 .update(cx, |project, cx| {
2263 project.open_local_buffer("/dir/file1.rs", cx)
2264 })
2265 .await
2266 .unwrap();
2267 assert_eq!(opened_buffer, buffer);
2268}
2269
2270#[gpui::test(retries = 5)]
2271async fn test_rescan_and_remote_updates(
2272 deterministic: Arc<Deterministic>,
2273 cx: &mut gpui::TestAppContext,
2274) {
2275 let dir = temp_tree(json!({
2276 "a": {
2277 "file1": "",
2278 "file2": "",
2279 "file3": "",
2280 },
2281 "b": {
2282 "c": {
2283 "file4": "",
2284 "file5": "",
2285 }
2286 }
2287 }));
2288
2289 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2290 let rpc = project.read_with(cx, |p, _| p.client.clone());
2291
2292 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2293 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2294 async move { buffer.await.unwrap() }
2295 };
2296 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2297 project.read_with(cx, |project, cx| {
2298 let tree = project.worktrees(cx).next().unwrap();
2299 tree.read(cx)
2300 .entry_for_path(path)
2301 .unwrap_or_else(|| panic!("no entry for path {}", path))
2302 .id
2303 })
2304 };
2305
2306 let buffer2 = buffer_for_path("a/file2", cx).await;
2307 let buffer3 = buffer_for_path("a/file3", cx).await;
2308 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2309 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2310
2311 let file2_id = id_for_path("a/file2", cx);
2312 let file3_id = id_for_path("a/file3", cx);
2313 let file4_id = id_for_path("b/c/file4", cx);
2314
2315 // Create a remote copy of this worktree.
2316 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2317 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2318 let remote = cx.update(|cx| {
2319 Worktree::remote(
2320 1,
2321 1,
2322 proto::WorktreeMetadata {
2323 id: initial_snapshot.id().to_proto(),
2324 root_name: initial_snapshot.root_name().into(),
2325 abs_path: initial_snapshot
2326 .abs_path()
2327 .as_os_str()
2328 .to_string_lossy()
2329 .into(),
2330 visible: true,
2331 },
2332 rpc.clone(),
2333 cx,
2334 )
2335 });
2336 remote.update(cx, |remote, _| {
2337 let update = initial_snapshot.build_initial_update(1);
2338 remote.as_remote_mut().unwrap().update_from_remote(update);
2339 });
2340 deterministic.run_until_parked();
2341
2342 cx.read(|cx| {
2343 assert!(!buffer2.read(cx).is_dirty());
2344 assert!(!buffer3.read(cx).is_dirty());
2345 assert!(!buffer4.read(cx).is_dirty());
2346 assert!(!buffer5.read(cx).is_dirty());
2347 });
2348
2349 // Rename and delete files and directories.
2350 tree.flush_fs_events(cx).await;
2351 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2352 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2353 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2354 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2355 tree.flush_fs_events(cx).await;
2356
2357 let expected_paths = vec![
2358 "a",
2359 "a/file1",
2360 "a/file2.new",
2361 "b",
2362 "d",
2363 "d/file3",
2364 "d/file4",
2365 ];
2366
2367 cx.read(|app| {
2368 assert_eq!(
2369 tree.read(app)
2370 .paths()
2371 .map(|p| p.to_str().unwrap())
2372 .collect::<Vec<_>>(),
2373 expected_paths
2374 );
2375
2376 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2377 assert_eq!(id_for_path("d/file3", cx), file3_id);
2378 assert_eq!(id_for_path("d/file4", cx), file4_id);
2379
2380 assert_eq!(
2381 buffer2.read(app).file().unwrap().path().as_ref(),
2382 Path::new("a/file2.new")
2383 );
2384 assert_eq!(
2385 buffer3.read(app).file().unwrap().path().as_ref(),
2386 Path::new("d/file3")
2387 );
2388 assert_eq!(
2389 buffer4.read(app).file().unwrap().path().as_ref(),
2390 Path::new("d/file4")
2391 );
2392 assert_eq!(
2393 buffer5.read(app).file().unwrap().path().as_ref(),
2394 Path::new("b/c/file5")
2395 );
2396
2397 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2398 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2399 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2400 assert!(buffer5.read(app).file().unwrap().is_deleted());
2401 });
2402
2403 // Update the remote worktree. Check that it becomes consistent with the
2404 // local worktree.
2405 remote.update(cx, |remote, cx| {
2406 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2407 &initial_snapshot,
2408 1,
2409 1,
2410 true,
2411 );
2412 remote.as_remote_mut().unwrap().update_from_remote(update);
2413 });
2414 deterministic.run_until_parked();
2415 remote.read_with(cx, |remote, _| {
2416 assert_eq!(
2417 remote
2418 .paths()
2419 .map(|p| p.to_str().unwrap())
2420 .collect::<Vec<_>>(),
2421 expected_paths
2422 );
2423 });
2424}
2425
2426#[gpui::test(iterations = 10)]
2427async fn test_buffer_identity_across_renames(
2428 deterministic: Arc<Deterministic>,
2429 cx: &mut gpui::TestAppContext,
2430) {
2431 let fs = FakeFs::new(cx.background());
2432 fs.insert_tree(
2433 "/dir",
2434 json!({
2435 "a": {
2436 "file1": "",
2437 }
2438 }),
2439 )
2440 .await;
2441
2442 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2443 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2444 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2445
2446 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2447 project.read_with(cx, |project, cx| {
2448 let tree = project.worktrees(cx).next().unwrap();
2449 tree.read(cx)
2450 .entry_for_path(path)
2451 .unwrap_or_else(|| panic!("no entry for path {}", path))
2452 .id
2453 })
2454 };
2455
2456 let dir_id = id_for_path("a", cx);
2457 let file_id = id_for_path("a/file1", cx);
2458 let buffer = project
2459 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2460 .await
2461 .unwrap();
2462 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2463
2464 project
2465 .update(cx, |project, cx| {
2466 project.rename_entry(dir_id, Path::new("b"), cx)
2467 })
2468 .unwrap()
2469 .await
2470 .unwrap();
2471 deterministic.run_until_parked();
2472 assert_eq!(id_for_path("b", cx), dir_id);
2473 assert_eq!(id_for_path("b/file1", cx), file_id);
2474 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2475}
2476
2477#[gpui::test]
2478async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2479 let fs = FakeFs::new(cx.background());
2480 fs.insert_tree(
2481 "/dir",
2482 json!({
2483 "a.txt": "a-contents",
2484 "b.txt": "b-contents",
2485 }),
2486 )
2487 .await;
2488
2489 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2490
2491 // Spawn multiple tasks to open paths, repeating some paths.
2492 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2493 (
2494 p.open_local_buffer("/dir/a.txt", cx),
2495 p.open_local_buffer("/dir/b.txt", cx),
2496 p.open_local_buffer("/dir/a.txt", cx),
2497 )
2498 });
2499
2500 let buffer_a_1 = buffer_a_1.await.unwrap();
2501 let buffer_a_2 = buffer_a_2.await.unwrap();
2502 let buffer_b = buffer_b.await.unwrap();
2503 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2504 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2505
2506 // There is only one buffer per path.
2507 let buffer_a_id = buffer_a_1.id();
2508 assert_eq!(buffer_a_2.id(), buffer_a_id);
2509
2510 // Open the same path again while it is still open.
2511 drop(buffer_a_1);
2512 let buffer_a_3 = project
2513 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2514 .await
2515 .unwrap();
2516
2517 // There's still only one buffer per path.
2518 assert_eq!(buffer_a_3.id(), buffer_a_id);
2519}
2520
2521#[gpui::test]
2522async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2523 let fs = FakeFs::new(cx.background());
2524 fs.insert_tree(
2525 "/dir",
2526 json!({
2527 "file1": "abc",
2528 "file2": "def",
2529 "file3": "ghi",
2530 }),
2531 )
2532 .await;
2533
2534 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2535
2536 let buffer1 = project
2537 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2538 .await
2539 .unwrap();
2540 let events = Rc::new(RefCell::new(Vec::new()));
2541
2542 // initially, the buffer isn't dirty.
2543 buffer1.update(cx, |buffer, cx| {
2544 cx.subscribe(&buffer1, {
2545 let events = events.clone();
2546 move |_, _, event, _| match event {
2547 BufferEvent::Operation(_) => {}
2548 _ => events.borrow_mut().push(event.clone()),
2549 }
2550 })
2551 .detach();
2552
2553 assert!(!buffer.is_dirty());
2554 assert!(events.borrow().is_empty());
2555
2556 buffer.edit([(1..2, "")], None, cx);
2557 });
2558
2559 // after the first edit, the buffer is dirty, and emits a dirtied event.
2560 buffer1.update(cx, |buffer, cx| {
2561 assert!(buffer.text() == "ac");
2562 assert!(buffer.is_dirty());
2563 assert_eq!(
2564 *events.borrow(),
2565 &[language::Event::Edited, language::Event::DirtyChanged]
2566 );
2567 events.borrow_mut().clear();
2568 buffer.did_save(
2569 buffer.version(),
2570 buffer.as_rope().fingerprint(),
2571 buffer.file().unwrap().mtime(),
2572 cx,
2573 );
2574 });
2575
2576 // after saving, the buffer is not dirty, and emits a saved event.
2577 buffer1.update(cx, |buffer, cx| {
2578 assert!(!buffer.is_dirty());
2579 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2580 events.borrow_mut().clear();
2581
2582 buffer.edit([(1..1, "B")], None, cx);
2583 buffer.edit([(2..2, "D")], None, cx);
2584 });
2585
2586 // after editing again, the buffer is dirty, and emits another dirty event.
2587 buffer1.update(cx, |buffer, cx| {
2588 assert!(buffer.text() == "aBDc");
2589 assert!(buffer.is_dirty());
2590 assert_eq!(
2591 *events.borrow(),
2592 &[
2593 language::Event::Edited,
2594 language::Event::DirtyChanged,
2595 language::Event::Edited,
2596 ],
2597 );
2598 events.borrow_mut().clear();
2599
2600 // After restoring the buffer to its previously-saved state,
2601 // the buffer is not considered dirty anymore.
2602 buffer.edit([(1..3, "")], None, cx);
2603 assert!(buffer.text() == "ac");
2604 assert!(!buffer.is_dirty());
2605 });
2606
2607 assert_eq!(
2608 *events.borrow(),
2609 &[language::Event::Edited, language::Event::DirtyChanged]
2610 );
2611
2612 // When a file is deleted, the buffer is considered dirty.
2613 let events = Rc::new(RefCell::new(Vec::new()));
2614 let buffer2 = project
2615 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2616 .await
2617 .unwrap();
2618 buffer2.update(cx, |_, cx| {
2619 cx.subscribe(&buffer2, {
2620 let events = events.clone();
2621 move |_, _, event, _| events.borrow_mut().push(event.clone())
2622 })
2623 .detach();
2624 });
2625
2626 fs.remove_file("/dir/file2".as_ref(), Default::default())
2627 .await
2628 .unwrap();
2629 cx.foreground().run_until_parked();
2630 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2631 assert_eq!(
2632 *events.borrow(),
2633 &[
2634 language::Event::DirtyChanged,
2635 language::Event::FileHandleChanged
2636 ]
2637 );
2638
2639 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2640 let events = Rc::new(RefCell::new(Vec::new()));
2641 let buffer3 = project
2642 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2643 .await
2644 .unwrap();
2645 buffer3.update(cx, |_, cx| {
2646 cx.subscribe(&buffer3, {
2647 let events = events.clone();
2648 move |_, _, event, _| events.borrow_mut().push(event.clone())
2649 })
2650 .detach();
2651 });
2652
2653 buffer3.update(cx, |buffer, cx| {
2654 buffer.edit([(0..0, "x")], None, cx);
2655 });
2656 events.borrow_mut().clear();
2657 fs.remove_file("/dir/file3".as_ref(), Default::default())
2658 .await
2659 .unwrap();
2660 cx.foreground().run_until_parked();
2661 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2662 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2663}
2664
2665#[gpui::test]
2666async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2667 let initial_contents = "aaa\nbbbbb\nc\n";
2668 let fs = FakeFs::new(cx.background());
2669 fs.insert_tree(
2670 "/dir",
2671 json!({
2672 "the-file": initial_contents,
2673 }),
2674 )
2675 .await;
2676 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2677 let buffer = project
2678 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2679 .await
2680 .unwrap();
2681
2682 let anchors = (0..3)
2683 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2684 .collect::<Vec<_>>();
2685
2686 // Change the file on disk, adding two new lines of text, and removing
2687 // one line.
2688 buffer.read_with(cx, |buffer, _| {
2689 assert!(!buffer.is_dirty());
2690 assert!(!buffer.has_conflict());
2691 });
2692 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2693 fs.save(
2694 "/dir/the-file".as_ref(),
2695 &new_contents.into(),
2696 LineEnding::Unix,
2697 )
2698 .await
2699 .unwrap();
2700
2701 // Because the buffer was not modified, it is reloaded from disk. Its
2702 // contents are edited according to the diff between the old and new
2703 // file contents.
2704 cx.foreground().run_until_parked();
2705 buffer.update(cx, |buffer, _| {
2706 assert_eq!(buffer.text(), new_contents);
2707 assert!(!buffer.is_dirty());
2708 assert!(!buffer.has_conflict());
2709
2710 let anchor_positions = anchors
2711 .iter()
2712 .map(|anchor| anchor.to_point(&*buffer))
2713 .collect::<Vec<_>>();
2714 assert_eq!(
2715 anchor_positions,
2716 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2717 );
2718 });
2719
2720 // Modify the buffer
2721 buffer.update(cx, |buffer, cx| {
2722 buffer.edit([(0..0, " ")], None, cx);
2723 assert!(buffer.is_dirty());
2724 assert!(!buffer.has_conflict());
2725 });
2726
2727 // Change the file on disk again, adding blank lines to the beginning.
2728 fs.save(
2729 "/dir/the-file".as_ref(),
2730 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2731 LineEnding::Unix,
2732 )
2733 .await
2734 .unwrap();
2735
2736 // Because the buffer is modified, it doesn't reload from disk, but is
2737 // marked as having a conflict.
2738 cx.foreground().run_until_parked();
2739 buffer.read_with(cx, |buffer, _| {
2740 assert!(buffer.has_conflict());
2741 });
2742}
2743
2744#[gpui::test]
2745async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2746 let fs = FakeFs::new(cx.background());
2747 fs.insert_tree(
2748 "/dir",
2749 json!({
2750 "file1": "a\nb\nc\n",
2751 "file2": "one\r\ntwo\r\nthree\r\n",
2752 }),
2753 )
2754 .await;
2755
2756 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2757 let buffer1 = project
2758 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2759 .await
2760 .unwrap();
2761 let buffer2 = project
2762 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2763 .await
2764 .unwrap();
2765
2766 buffer1.read_with(cx, |buffer, _| {
2767 assert_eq!(buffer.text(), "a\nb\nc\n");
2768 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2769 });
2770 buffer2.read_with(cx, |buffer, _| {
2771 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2772 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2773 });
2774
2775 // Change a file's line endings on disk from unix to windows. The buffer's
2776 // state updates correctly.
2777 fs.save(
2778 "/dir/file1".as_ref(),
2779 &"aaa\nb\nc\n".into(),
2780 LineEnding::Windows,
2781 )
2782 .await
2783 .unwrap();
2784 cx.foreground().run_until_parked();
2785 buffer1.read_with(cx, |buffer, _| {
2786 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2787 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2788 });
2789
2790 // Save a file with windows line endings. The file is written correctly.
2791 buffer2.update(cx, |buffer, cx| {
2792 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2793 });
2794 project
2795 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
2796 .await
2797 .unwrap();
2798 assert_eq!(
2799 fs.load("/dir/file2".as_ref()).await.unwrap(),
2800 "one\r\ntwo\r\nthree\r\nfour\r\n",
2801 );
2802}
2803
2804#[gpui::test]
2805async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2806 cx.foreground().forbid_parking();
2807
2808 let fs = FakeFs::new(cx.background());
2809 fs.insert_tree(
2810 "/the-dir",
2811 json!({
2812 "a.rs": "
2813 fn foo(mut v: Vec<usize>) {
2814 for x in &v {
2815 v.push(1);
2816 }
2817 }
2818 "
2819 .unindent(),
2820 }),
2821 )
2822 .await;
2823
2824 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2825 let buffer = project
2826 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2827 .await
2828 .unwrap();
2829
2830 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2831 let message = lsp::PublishDiagnosticsParams {
2832 uri: buffer_uri.clone(),
2833 diagnostics: vec![
2834 lsp::Diagnostic {
2835 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2836 severity: Some(DiagnosticSeverity::WARNING),
2837 message: "error 1".to_string(),
2838 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2839 location: lsp::Location {
2840 uri: buffer_uri.clone(),
2841 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2842 },
2843 message: "error 1 hint 1".to_string(),
2844 }]),
2845 ..Default::default()
2846 },
2847 lsp::Diagnostic {
2848 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2849 severity: Some(DiagnosticSeverity::HINT),
2850 message: "error 1 hint 1".to_string(),
2851 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2852 location: lsp::Location {
2853 uri: buffer_uri.clone(),
2854 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2855 },
2856 message: "original diagnostic".to_string(),
2857 }]),
2858 ..Default::default()
2859 },
2860 lsp::Diagnostic {
2861 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2862 severity: Some(DiagnosticSeverity::ERROR),
2863 message: "error 2".to_string(),
2864 related_information: Some(vec![
2865 lsp::DiagnosticRelatedInformation {
2866 location: lsp::Location {
2867 uri: buffer_uri.clone(),
2868 range: lsp::Range::new(
2869 lsp::Position::new(1, 13),
2870 lsp::Position::new(1, 15),
2871 ),
2872 },
2873 message: "error 2 hint 1".to_string(),
2874 },
2875 lsp::DiagnosticRelatedInformation {
2876 location: lsp::Location {
2877 uri: buffer_uri.clone(),
2878 range: lsp::Range::new(
2879 lsp::Position::new(1, 13),
2880 lsp::Position::new(1, 15),
2881 ),
2882 },
2883 message: "error 2 hint 2".to_string(),
2884 },
2885 ]),
2886 ..Default::default()
2887 },
2888 lsp::Diagnostic {
2889 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2890 severity: Some(DiagnosticSeverity::HINT),
2891 message: "error 2 hint 1".to_string(),
2892 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2893 location: lsp::Location {
2894 uri: buffer_uri.clone(),
2895 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2896 },
2897 message: "original diagnostic".to_string(),
2898 }]),
2899 ..Default::default()
2900 },
2901 lsp::Diagnostic {
2902 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2903 severity: Some(DiagnosticSeverity::HINT),
2904 message: "error 2 hint 2".to_string(),
2905 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2906 location: lsp::Location {
2907 uri: buffer_uri,
2908 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2909 },
2910 message: "original diagnostic".to_string(),
2911 }]),
2912 ..Default::default()
2913 },
2914 ],
2915 version: None,
2916 };
2917
2918 project
2919 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2920 .unwrap();
2921 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2922
2923 assert_eq!(
2924 buffer
2925 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2926 .collect::<Vec<_>>(),
2927 &[
2928 DiagnosticEntry {
2929 range: Point::new(1, 8)..Point::new(1, 9),
2930 diagnostic: Diagnostic {
2931 severity: DiagnosticSeverity::WARNING,
2932 message: "error 1".to_string(),
2933 group_id: 1,
2934 is_primary: true,
2935 ..Default::default()
2936 }
2937 },
2938 DiagnosticEntry {
2939 range: Point::new(1, 8)..Point::new(1, 9),
2940 diagnostic: Diagnostic {
2941 severity: DiagnosticSeverity::HINT,
2942 message: "error 1 hint 1".to_string(),
2943 group_id: 1,
2944 is_primary: false,
2945 ..Default::default()
2946 }
2947 },
2948 DiagnosticEntry {
2949 range: Point::new(1, 13)..Point::new(1, 15),
2950 diagnostic: Diagnostic {
2951 severity: DiagnosticSeverity::HINT,
2952 message: "error 2 hint 1".to_string(),
2953 group_id: 0,
2954 is_primary: false,
2955 ..Default::default()
2956 }
2957 },
2958 DiagnosticEntry {
2959 range: Point::new(1, 13)..Point::new(1, 15),
2960 diagnostic: Diagnostic {
2961 severity: DiagnosticSeverity::HINT,
2962 message: "error 2 hint 2".to_string(),
2963 group_id: 0,
2964 is_primary: false,
2965 ..Default::default()
2966 }
2967 },
2968 DiagnosticEntry {
2969 range: Point::new(2, 8)..Point::new(2, 17),
2970 diagnostic: Diagnostic {
2971 severity: DiagnosticSeverity::ERROR,
2972 message: "error 2".to_string(),
2973 group_id: 0,
2974 is_primary: true,
2975 ..Default::default()
2976 }
2977 }
2978 ]
2979 );
2980
2981 assert_eq!(
2982 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2983 &[
2984 DiagnosticEntry {
2985 range: Point::new(1, 13)..Point::new(1, 15),
2986 diagnostic: Diagnostic {
2987 severity: DiagnosticSeverity::HINT,
2988 message: "error 2 hint 1".to_string(),
2989 group_id: 0,
2990 is_primary: false,
2991 ..Default::default()
2992 }
2993 },
2994 DiagnosticEntry {
2995 range: Point::new(1, 13)..Point::new(1, 15),
2996 diagnostic: Diagnostic {
2997 severity: DiagnosticSeverity::HINT,
2998 message: "error 2 hint 2".to_string(),
2999 group_id: 0,
3000 is_primary: false,
3001 ..Default::default()
3002 }
3003 },
3004 DiagnosticEntry {
3005 range: Point::new(2, 8)..Point::new(2, 17),
3006 diagnostic: Diagnostic {
3007 severity: DiagnosticSeverity::ERROR,
3008 message: "error 2".to_string(),
3009 group_id: 0,
3010 is_primary: true,
3011 ..Default::default()
3012 }
3013 }
3014 ]
3015 );
3016
3017 assert_eq!(
3018 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3019 &[
3020 DiagnosticEntry {
3021 range: Point::new(1, 8)..Point::new(1, 9),
3022 diagnostic: Diagnostic {
3023 severity: DiagnosticSeverity::WARNING,
3024 message: "error 1".to_string(),
3025 group_id: 1,
3026 is_primary: true,
3027 ..Default::default()
3028 }
3029 },
3030 DiagnosticEntry {
3031 range: Point::new(1, 8)..Point::new(1, 9),
3032 diagnostic: Diagnostic {
3033 severity: DiagnosticSeverity::HINT,
3034 message: "error 1 hint 1".to_string(),
3035 group_id: 1,
3036 is_primary: false,
3037 ..Default::default()
3038 }
3039 },
3040 ]
3041 );
3042}
3043
3044#[gpui::test]
3045async fn test_rename(cx: &mut gpui::TestAppContext) {
3046 cx.foreground().forbid_parking();
3047
3048 let mut language = Language::new(
3049 LanguageConfig {
3050 name: "Rust".into(),
3051 path_suffixes: vec!["rs".to_string()],
3052 ..Default::default()
3053 },
3054 Some(tree_sitter_rust::language()),
3055 );
3056 let mut fake_servers = language
3057 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3058 capabilities: lsp::ServerCapabilities {
3059 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3060 prepare_provider: Some(true),
3061 work_done_progress_options: Default::default(),
3062 })),
3063 ..Default::default()
3064 },
3065 ..Default::default()
3066 }))
3067 .await;
3068
3069 let fs = FakeFs::new(cx.background());
3070 fs.insert_tree(
3071 "/dir",
3072 json!({
3073 "one.rs": "const ONE: usize = 1;",
3074 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3075 }),
3076 )
3077 .await;
3078
3079 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3080 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3081 let buffer = project
3082 .update(cx, |project, cx| {
3083 project.open_local_buffer("/dir/one.rs", cx)
3084 })
3085 .await
3086 .unwrap();
3087
3088 let fake_server = fake_servers.next().await.unwrap();
3089
3090 let response = project.update(cx, |project, cx| {
3091 project.prepare_rename(buffer.clone(), 7, cx)
3092 });
3093 fake_server
3094 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3095 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3096 assert_eq!(params.position, lsp::Position::new(0, 7));
3097 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3098 lsp::Position::new(0, 6),
3099 lsp::Position::new(0, 9),
3100 ))))
3101 })
3102 .next()
3103 .await
3104 .unwrap();
3105 let range = response.await.unwrap().unwrap();
3106 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3107 assert_eq!(range, 6..9);
3108
3109 let response = project.update(cx, |project, cx| {
3110 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3111 });
3112 fake_server
3113 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3114 assert_eq!(
3115 params.text_document_position.text_document.uri.as_str(),
3116 "file:///dir/one.rs"
3117 );
3118 assert_eq!(
3119 params.text_document_position.position,
3120 lsp::Position::new(0, 7)
3121 );
3122 assert_eq!(params.new_name, "THREE");
3123 Ok(Some(lsp::WorkspaceEdit {
3124 changes: Some(
3125 [
3126 (
3127 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3128 vec![lsp::TextEdit::new(
3129 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3130 "THREE".to_string(),
3131 )],
3132 ),
3133 (
3134 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3135 vec![
3136 lsp::TextEdit::new(
3137 lsp::Range::new(
3138 lsp::Position::new(0, 24),
3139 lsp::Position::new(0, 27),
3140 ),
3141 "THREE".to_string(),
3142 ),
3143 lsp::TextEdit::new(
3144 lsp::Range::new(
3145 lsp::Position::new(0, 35),
3146 lsp::Position::new(0, 38),
3147 ),
3148 "THREE".to_string(),
3149 ),
3150 ],
3151 ),
3152 ]
3153 .into_iter()
3154 .collect(),
3155 ),
3156 ..Default::default()
3157 }))
3158 })
3159 .next()
3160 .await
3161 .unwrap();
3162 let mut transaction = response.await.unwrap().0;
3163 assert_eq!(transaction.len(), 2);
3164 assert_eq!(
3165 transaction
3166 .remove_entry(&buffer)
3167 .unwrap()
3168 .0
3169 .read_with(cx, |buffer, _| buffer.text()),
3170 "const THREE: usize = 1;"
3171 );
3172 assert_eq!(
3173 transaction
3174 .into_keys()
3175 .next()
3176 .unwrap()
3177 .read_with(cx, |buffer, _| buffer.text()),
3178 "const TWO: usize = one::THREE + one::THREE;"
3179 );
3180}
3181
3182#[gpui::test]
3183async fn test_search(cx: &mut gpui::TestAppContext) {
3184 let fs = FakeFs::new(cx.background());
3185 fs.insert_tree(
3186 "/dir",
3187 json!({
3188 "one.rs": "const ONE: usize = 1;",
3189 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3190 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3191 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3192 }),
3193 )
3194 .await;
3195 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3196 assert_eq!(
3197 search(&project, SearchQuery::text("TWO", false, true), cx)
3198 .await
3199 .unwrap(),
3200 HashMap::from_iter([
3201 ("two.rs".to_string(), vec![6..9]),
3202 ("three.rs".to_string(), vec![37..40])
3203 ])
3204 );
3205
3206 let buffer_4 = project
3207 .update(cx, |project, cx| {
3208 project.open_local_buffer("/dir/four.rs", cx)
3209 })
3210 .await
3211 .unwrap();
3212 buffer_4.update(cx, |buffer, cx| {
3213 let text = "two::TWO";
3214 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3215 });
3216
3217 assert_eq!(
3218 search(&project, SearchQuery::text("TWO", false, true), cx)
3219 .await
3220 .unwrap(),
3221 HashMap::from_iter([
3222 ("two.rs".to_string(), vec![6..9]),
3223 ("three.rs".to_string(), vec![37..40]),
3224 ("four.rs".to_string(), vec![25..28, 36..39])
3225 ])
3226 );
3227
3228 async fn search(
3229 project: &ModelHandle<Project>,
3230 query: SearchQuery,
3231 cx: &mut gpui::TestAppContext,
3232 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3233 let results = project
3234 .update(cx, |project, cx| project.search(query, cx))
3235 .await?;
3236
3237 Ok(results
3238 .into_iter()
3239 .map(|(buffer, ranges)| {
3240 buffer.read_with(cx, |buffer, _| {
3241 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3242 let ranges = ranges
3243 .into_iter()
3244 .map(|range| range.to_offset(buffer))
3245 .collect::<Vec<_>>();
3246 (path, ranges)
3247 })
3248 })
3249 .collect())
3250 }
3251}