1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::AppContext;
6use gpui::{executor::Deterministic, test::subscribe};
7use language::{
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 let dir = temp_tree(json!({
30 "root": {
31 "apple": "",
32 "banana": {
33 "carrot": {
34 "date": "",
35 "endive": "",
36 }
37 },
38 "fennel": {
39 "grape": "",
40 }
41 }
42 }));
43
44 let root_link_path = dir.path().join("root_link");
45 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
46 unix::fs::symlink(
47 &dir.path().join("root/fennel"),
48 &dir.path().join("root/finnochio"),
49 )
50 .unwrap();
51
52 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
53 project.read_with(cx, |project, cx| {
54 let tree = project.worktrees(cx).next().unwrap().read(cx);
55 assert_eq!(tree.file_count(), 5);
56 assert_eq!(
57 tree.inode_for_path("fennel/grape"),
58 tree.inode_for_path("finnochio/grape")
59 );
60 });
61}
62
63#[gpui::test]
64async fn test_managing_language_servers(
65 deterministic: Arc<Deterministic>,
66 cx: &mut gpui::TestAppContext,
67) {
68 cx.foreground().forbid_parking();
69
70 let mut rust_language = Language::new(
71 LanguageConfig {
72 name: "Rust".into(),
73 path_suffixes: vec!["rs".to_string()],
74 ..Default::default()
75 },
76 Some(tree_sitter_rust::language()),
77 );
78 let mut json_language = Language::new(
79 LanguageConfig {
80 name: "JSON".into(),
81 path_suffixes: vec!["json".to_string()],
82 ..Default::default()
83 },
84 None,
85 );
86 let mut fake_rust_servers = rust_language
87 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
88 name: "the-rust-language-server",
89 capabilities: lsp::ServerCapabilities {
90 completion_provider: Some(lsp::CompletionOptions {
91 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
92 ..Default::default()
93 }),
94 ..Default::default()
95 },
96 ..Default::default()
97 }))
98 .await;
99 let mut fake_json_servers = json_language
100 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
101 name: "the-json-language-server",
102 capabilities: lsp::ServerCapabilities {
103 completion_provider: Some(lsp::CompletionOptions {
104 trigger_characters: Some(vec![":".to_string()]),
105 ..Default::default()
106 }),
107 ..Default::default()
108 },
109 ..Default::default()
110 }))
111 .await;
112
113 let fs = FakeFs::new(cx.background());
114 fs.insert_tree(
115 "/the-root",
116 json!({
117 "test.rs": "const A: i32 = 1;",
118 "test2.rs": "",
119 "Cargo.toml": "a = 1",
120 "package.json": "{\"a\": 1}",
121 }),
122 )
123 .await;
124
125 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
126
127 // Open a buffer without an associated language server.
128 let toml_buffer = project
129 .update(cx, |project, cx| {
130 project.open_local_buffer("/the-root/Cargo.toml", cx)
131 })
132 .await
133 .unwrap();
134
135 // Open a buffer with an associated language server before the language for it has been loaded.
136 let rust_buffer = project
137 .update(cx, |project, cx| {
138 project.open_local_buffer("/the-root/test.rs", cx)
139 })
140 .await
141 .unwrap();
142 rust_buffer.read_with(cx, |buffer, _| {
143 assert_eq!(buffer.language().map(|l| l.name()), None);
144 });
145
146 // Now we add the languages to the project, and ensure they get assigned to all
147 // the relevant open buffers.
148 project.update(cx, |project, _| {
149 project.languages.add(Arc::new(json_language));
150 project.languages.add(Arc::new(rust_language));
151 });
152 deterministic.run_until_parked();
153 rust_buffer.read_with(cx, |buffer, _| {
154 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
155 });
156
157 // A server is started up, and it is notified about Rust files.
158 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
159 assert_eq!(
160 fake_rust_server
161 .receive_notification::<lsp::notification::DidOpenTextDocument>()
162 .await
163 .text_document,
164 lsp::TextDocumentItem {
165 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
166 version: 0,
167 text: "const A: i32 = 1;".to_string(),
168 language_id: Default::default()
169 }
170 );
171
172 // The buffer is configured based on the language server's capabilities.
173 rust_buffer.read_with(cx, |buffer, _| {
174 assert_eq!(
175 buffer.completion_triggers(),
176 &[".".to_string(), "::".to_string()]
177 );
178 });
179 toml_buffer.read_with(cx, |buffer, _| {
180 assert!(buffer.completion_triggers().is_empty());
181 });
182
183 // Edit a buffer. The changes are reported to the language server.
184 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
185 assert_eq!(
186 fake_rust_server
187 .receive_notification::<lsp::notification::DidChangeTextDocument>()
188 .await
189 .text_document,
190 lsp::VersionedTextDocumentIdentifier::new(
191 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
192 1
193 )
194 );
195
196 // Open a third buffer with a different associated language server.
197 let json_buffer = project
198 .update(cx, |project, cx| {
199 project.open_local_buffer("/the-root/package.json", cx)
200 })
201 .await
202 .unwrap();
203
204 // A json language server is started up and is only notified about the json buffer.
205 let mut fake_json_server = fake_json_servers.next().await.unwrap();
206 assert_eq!(
207 fake_json_server
208 .receive_notification::<lsp::notification::DidOpenTextDocument>()
209 .await
210 .text_document,
211 lsp::TextDocumentItem {
212 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
213 version: 0,
214 text: "{\"a\": 1}".to_string(),
215 language_id: Default::default()
216 }
217 );
218
219 // This buffer is configured based on the second language server's
220 // capabilities.
221 json_buffer.read_with(cx, |buffer, _| {
222 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
223 });
224
225 // When opening another buffer whose language server is already running,
226 // it is also configured based on the existing language server's capabilities.
227 let rust_buffer2 = project
228 .update(cx, |project, cx| {
229 project.open_local_buffer("/the-root/test2.rs", cx)
230 })
231 .await
232 .unwrap();
233 rust_buffer2.read_with(cx, |buffer, _| {
234 assert_eq!(
235 buffer.completion_triggers(),
236 &[".".to_string(), "::".to_string()]
237 );
238 });
239
240 // Changes are reported only to servers matching the buffer's language.
241 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
242 rust_buffer2.update(cx, |buffer, cx| {
243 buffer.edit([(0..0, "let x = 1;")], None, cx)
244 });
245 assert_eq!(
246 fake_rust_server
247 .receive_notification::<lsp::notification::DidChangeTextDocument>()
248 .await
249 .text_document,
250 lsp::VersionedTextDocumentIdentifier::new(
251 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
252 1
253 )
254 );
255
256 // Save notifications are reported to all servers.
257 project
258 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
259 .await
260 .unwrap();
261 assert_eq!(
262 fake_rust_server
263 .receive_notification::<lsp::notification::DidSaveTextDocument>()
264 .await
265 .text_document,
266 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
267 );
268 assert_eq!(
269 fake_json_server
270 .receive_notification::<lsp::notification::DidSaveTextDocument>()
271 .await
272 .text_document,
273 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
274 );
275
276 // Renames are reported only to servers matching the buffer's language.
277 fs.rename(
278 Path::new("/the-root/test2.rs"),
279 Path::new("/the-root/test3.rs"),
280 Default::default(),
281 )
282 .await
283 .unwrap();
284 assert_eq!(
285 fake_rust_server
286 .receive_notification::<lsp::notification::DidCloseTextDocument>()
287 .await
288 .text_document,
289 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
290 );
291 assert_eq!(
292 fake_rust_server
293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
294 .await
295 .text_document,
296 lsp::TextDocumentItem {
297 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
298 version: 0,
299 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
300 language_id: Default::default()
301 },
302 );
303
304 rust_buffer2.update(cx, |buffer, cx| {
305 buffer.update_diagnostics(
306 LanguageServerId(0),
307 DiagnosticSet::from_sorted_entries(
308 vec![DiagnosticEntry {
309 diagnostic: Default::default(),
310 range: Anchor::MIN..Anchor::MAX,
311 }],
312 &buffer.snapshot(),
313 ),
314 cx,
315 );
316 assert_eq!(
317 buffer
318 .snapshot()
319 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
320 .count(),
321 1
322 );
323 });
324
325 // When the rename changes the extension of the file, the buffer gets closed on the old
326 // language server and gets opened on the new one.
327 fs.rename(
328 Path::new("/the-root/test3.rs"),
329 Path::new("/the-root/test3.json"),
330 Default::default(),
331 )
332 .await
333 .unwrap();
334 assert_eq!(
335 fake_rust_server
336 .receive_notification::<lsp::notification::DidCloseTextDocument>()
337 .await
338 .text_document,
339 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
340 );
341 assert_eq!(
342 fake_json_server
343 .receive_notification::<lsp::notification::DidOpenTextDocument>()
344 .await
345 .text_document,
346 lsp::TextDocumentItem {
347 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
348 version: 0,
349 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
350 language_id: Default::default()
351 },
352 );
353
354 // We clear the diagnostics, since the language has changed.
355 rust_buffer2.read_with(cx, |buffer, _| {
356 assert_eq!(
357 buffer
358 .snapshot()
359 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
360 .count(),
361 0
362 );
363 });
364
365 // The renamed file's version resets after changing language server.
366 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
367 assert_eq!(
368 fake_json_server
369 .receive_notification::<lsp::notification::DidChangeTextDocument>()
370 .await
371 .text_document,
372 lsp::VersionedTextDocumentIdentifier::new(
373 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
374 1
375 )
376 );
377
378 // Restart language servers
379 project.update(cx, |project, cx| {
380 project.restart_language_servers_for_buffers(
381 vec![rust_buffer.clone(), json_buffer.clone()],
382 cx,
383 );
384 });
385
386 let mut rust_shutdown_requests = fake_rust_server
387 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
388 let mut json_shutdown_requests = fake_json_server
389 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
390 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
391
392 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
393 let mut fake_json_server = fake_json_servers.next().await.unwrap();
394
395 // Ensure rust document is reopened in new rust language server
396 assert_eq!(
397 fake_rust_server
398 .receive_notification::<lsp::notification::DidOpenTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentItem {
402 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
403 version: 0,
404 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
405 language_id: Default::default()
406 }
407 );
408
409 // Ensure json documents are reopened in new json language server
410 assert_set_eq!(
411 [
412 fake_json_server
413 .receive_notification::<lsp::notification::DidOpenTextDocument>()
414 .await
415 .text_document,
416 fake_json_server
417 .receive_notification::<lsp::notification::DidOpenTextDocument>()
418 .await
419 .text_document,
420 ],
421 [
422 lsp::TextDocumentItem {
423 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
424 version: 0,
425 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
426 language_id: Default::default()
427 },
428 lsp::TextDocumentItem {
429 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
430 version: 0,
431 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
432 language_id: Default::default()
433 }
434 ]
435 );
436
437 // Close notifications are reported only to servers matching the buffer's language.
438 cx.update(|_| drop(json_buffer));
439 let close_message = lsp::DidCloseTextDocumentParams {
440 text_document: lsp::TextDocumentIdentifier::new(
441 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
442 ),
443 };
444 assert_eq!(
445 fake_json_server
446 .receive_notification::<lsp::notification::DidCloseTextDocument>()
447 .await,
448 close_message,
449 );
450}
451
452#[gpui::test]
453async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
454 cx.foreground().forbid_parking();
455
456 let mut language = Language::new(
457 LanguageConfig {
458 name: "Rust".into(),
459 path_suffixes: vec!["rs".to_string()],
460 ..Default::default()
461 },
462 Some(tree_sitter_rust::language()),
463 );
464 let mut fake_servers = language
465 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
466 name: "the-language-server",
467 ..Default::default()
468 }))
469 .await;
470
471 let fs = FakeFs::new(cx.background());
472 fs.insert_tree(
473 "/the-root",
474 json!({
475 "a.rs": "",
476 "b.rs": "",
477 }),
478 )
479 .await;
480
481 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
482 project.update(cx, |project, _| {
483 project.languages.add(Arc::new(language));
484 });
485 cx.foreground().run_until_parked();
486
487 // Start the language server by opening a buffer with a compatible file extension.
488 let _buffer = project
489 .update(cx, |project, cx| {
490 project.open_local_buffer("/the-root/a.rs", cx)
491 })
492 .await
493 .unwrap();
494
495 // Keep track of the FS events reported to the language server.
496 let fake_server = fake_servers.next().await.unwrap();
497 let file_changes = Arc::new(Mutex::new(Vec::new()));
498 fake_server
499 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
500 registrations: vec![lsp::Registration {
501 id: Default::default(),
502 method: "workspace/didChangeWatchedFiles".to_string(),
503 register_options: serde_json::to_value(
504 lsp::DidChangeWatchedFilesRegistrationOptions {
505 watchers: vec![lsp::FileSystemWatcher {
506 glob_pattern: "*.{rs,c}".to_string(),
507 kind: None,
508 }],
509 },
510 )
511 .ok(),
512 }],
513 })
514 .await
515 .unwrap();
516 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
517 let file_changes = file_changes.clone();
518 move |params, _| {
519 let mut file_changes = file_changes.lock();
520 file_changes.extend(params.changes);
521 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
522 }
523 });
524
525 cx.foreground().run_until_parked();
526 assert_eq!(file_changes.lock().len(), 0);
527
528 // Perform some file system mutations, two of which match the watched patterns,
529 // and one of which does not.
530 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
531 .await
532 .unwrap();
533 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
534 .await
535 .unwrap();
536 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
537 .await
538 .unwrap();
539
540 // The language server receives events for the FS mutations that match its watch patterns.
541 cx.foreground().run_until_parked();
542 assert_eq!(
543 &*file_changes.lock(),
544 &[
545 lsp::FileEvent {
546 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
547 typ: lsp::FileChangeType::DELETED,
548 },
549 lsp::FileEvent {
550 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
551 typ: lsp::FileChangeType::CREATED,
552 },
553 ]
554 );
555}
556
557#[gpui::test]
558async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
559 cx.foreground().forbid_parking();
560
561 let fs = FakeFs::new(cx.background());
562 fs.insert_tree(
563 "/dir",
564 json!({
565 "a.rs": "let a = 1;",
566 "b.rs": "let b = 2;"
567 }),
568 )
569 .await;
570
571 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
572
573 let buffer_a = project
574 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
575 .await
576 .unwrap();
577 let buffer_b = project
578 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
579 .await
580 .unwrap();
581
582 project.update(cx, |project, cx| {
583 project
584 .update_diagnostics(
585 LanguageServerId(0),
586 lsp::PublishDiagnosticsParams {
587 uri: Url::from_file_path("/dir/a.rs").unwrap(),
588 version: None,
589 diagnostics: vec![lsp::Diagnostic {
590 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
591 severity: Some(lsp::DiagnosticSeverity::ERROR),
592 message: "error 1".to_string(),
593 ..Default::default()
594 }],
595 },
596 &[],
597 cx,
598 )
599 .unwrap();
600 project
601 .update_diagnostics(
602 LanguageServerId(0),
603 lsp::PublishDiagnosticsParams {
604 uri: Url::from_file_path("/dir/b.rs").unwrap(),
605 version: None,
606 diagnostics: vec![lsp::Diagnostic {
607 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
608 severity: Some(lsp::DiagnosticSeverity::WARNING),
609 message: "error 2".to_string(),
610 ..Default::default()
611 }],
612 },
613 &[],
614 cx,
615 )
616 .unwrap();
617 });
618
619 buffer_a.read_with(cx, |buffer, _| {
620 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
621 assert_eq!(
622 chunks
623 .iter()
624 .map(|(s, d)| (s.as_str(), *d))
625 .collect::<Vec<_>>(),
626 &[
627 ("let ", None),
628 ("a", Some(DiagnosticSeverity::ERROR)),
629 (" = 1;", None),
630 ]
631 );
632 });
633 buffer_b.read_with(cx, |buffer, _| {
634 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
635 assert_eq!(
636 chunks
637 .iter()
638 .map(|(s, d)| (s.as_str(), *d))
639 .collect::<Vec<_>>(),
640 &[
641 ("let ", None),
642 ("b", Some(DiagnosticSeverity::WARNING)),
643 (" = 2;", None),
644 ]
645 );
646 });
647}
648
649#[gpui::test]
650async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
651 cx.foreground().forbid_parking();
652
653 let fs = FakeFs::new(cx.background());
654 fs.insert_tree(
655 "/root",
656 json!({
657 "dir": {
658 "a.rs": "let a = 1;",
659 },
660 "other.rs": "let b = c;"
661 }),
662 )
663 .await;
664
665 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
666
667 let (worktree, _) = project
668 .update(cx, |project, cx| {
669 project.find_or_create_local_worktree("/root/other.rs", false, cx)
670 })
671 .await
672 .unwrap();
673 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
674
675 project.update(cx, |project, cx| {
676 project
677 .update_diagnostics(
678 LanguageServerId(0),
679 lsp::PublishDiagnosticsParams {
680 uri: Url::from_file_path("/root/other.rs").unwrap(),
681 version: None,
682 diagnostics: vec![lsp::Diagnostic {
683 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
684 severity: Some(lsp::DiagnosticSeverity::ERROR),
685 message: "unknown variable 'c'".to_string(),
686 ..Default::default()
687 }],
688 },
689 &[],
690 cx,
691 )
692 .unwrap();
693 });
694
695 let buffer = project
696 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
697 .await
698 .unwrap();
699 buffer.read_with(cx, |buffer, _| {
700 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
701 assert_eq!(
702 chunks
703 .iter()
704 .map(|(s, d)| (s.as_str(), *d))
705 .collect::<Vec<_>>(),
706 &[
707 ("let b = ", None),
708 ("c", Some(DiagnosticSeverity::ERROR)),
709 (";", None),
710 ]
711 );
712 });
713
714 project.read_with(cx, |project, cx| {
715 assert_eq!(project.diagnostic_summaries(cx).next(), None);
716 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
717 });
718}
719
720#[gpui::test]
721async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
722 cx.foreground().forbid_parking();
723
724 let progress_token = "the-progress-token";
725 let mut language = Language::new(
726 LanguageConfig {
727 name: "Rust".into(),
728 path_suffixes: vec!["rs".to_string()],
729 ..Default::default()
730 },
731 Some(tree_sitter_rust::language()),
732 );
733 let mut fake_servers = language
734 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
735 disk_based_diagnostics_progress_token: Some(progress_token.into()),
736 disk_based_diagnostics_sources: vec!["disk".into()],
737 ..Default::default()
738 }))
739 .await;
740
741 let fs = FakeFs::new(cx.background());
742 fs.insert_tree(
743 "/dir",
744 json!({
745 "a.rs": "fn a() { A }",
746 "b.rs": "const y: i32 = 1",
747 }),
748 )
749 .await;
750
751 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
752 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
753 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
754
755 // Cause worktree to start the fake language server
756 let _buffer = project
757 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
758 .await
759 .unwrap();
760
761 let mut events = subscribe(&project, cx);
762
763 let fake_server = fake_servers.next().await.unwrap();
764 fake_server
765 .start_progress(format!("{}/0", progress_token))
766 .await;
767 assert_eq!(
768 events.next().await.unwrap(),
769 Event::DiskBasedDiagnosticsStarted {
770 language_server_id: LanguageServerId(0),
771 }
772 );
773
774 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
775 uri: Url::from_file_path("/dir/a.rs").unwrap(),
776 version: None,
777 diagnostics: vec![lsp::Diagnostic {
778 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
779 severity: Some(lsp::DiagnosticSeverity::ERROR),
780 message: "undefined variable 'A'".to_string(),
781 ..Default::default()
782 }],
783 });
784 assert_eq!(
785 events.next().await.unwrap(),
786 Event::DiagnosticsUpdated {
787 language_server_id: LanguageServerId(0),
788 path: (worktree_id, Path::new("a.rs")).into()
789 }
790 );
791
792 fake_server.end_progress(format!("{}/0", progress_token));
793 assert_eq!(
794 events.next().await.unwrap(),
795 Event::DiskBasedDiagnosticsFinished {
796 language_server_id: LanguageServerId(0)
797 }
798 );
799
800 let buffer = project
801 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
802 .await
803 .unwrap();
804
805 buffer.read_with(cx, |buffer, _| {
806 let snapshot = buffer.snapshot();
807 let diagnostics = snapshot
808 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
809 .collect::<Vec<_>>();
810 assert_eq!(
811 diagnostics,
812 &[DiagnosticEntry {
813 range: Point::new(0, 9)..Point::new(0, 10),
814 diagnostic: Diagnostic {
815 severity: lsp::DiagnosticSeverity::ERROR,
816 message: "undefined variable 'A'".to_string(),
817 group_id: 0,
818 is_primary: true,
819 ..Default::default()
820 }
821 }]
822 )
823 });
824
825 // Ensure publishing empty diagnostics twice only results in one update event.
826 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
827 uri: Url::from_file_path("/dir/a.rs").unwrap(),
828 version: None,
829 diagnostics: Default::default(),
830 });
831 assert_eq!(
832 events.next().await.unwrap(),
833 Event::DiagnosticsUpdated {
834 language_server_id: LanguageServerId(0),
835 path: (worktree_id, Path::new("a.rs")).into()
836 }
837 );
838
839 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
840 uri: Url::from_file_path("/dir/a.rs").unwrap(),
841 version: None,
842 diagnostics: Default::default(),
843 });
844 cx.foreground().run_until_parked();
845 assert_eq!(futures::poll!(events.next()), Poll::Pending);
846}
847
848#[gpui::test]
849async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
850 cx.foreground().forbid_parking();
851
852 let progress_token = "the-progress-token";
853 let mut language = Language::new(
854 LanguageConfig {
855 path_suffixes: vec!["rs".to_string()],
856 ..Default::default()
857 },
858 None,
859 );
860 let mut fake_servers = language
861 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
862 disk_based_diagnostics_sources: vec!["disk".into()],
863 disk_based_diagnostics_progress_token: Some(progress_token.into()),
864 ..Default::default()
865 }))
866 .await;
867
868 let fs = FakeFs::new(cx.background());
869 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
870
871 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
872 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
873
874 let buffer = project
875 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
876 .await
877 .unwrap();
878
879 // Simulate diagnostics starting to update.
880 let fake_server = fake_servers.next().await.unwrap();
881 fake_server.start_progress(progress_token).await;
882
883 // Restart the server before the diagnostics finish updating.
884 project.update(cx, |project, cx| {
885 project.restart_language_servers_for_buffers([buffer], cx);
886 });
887 let mut events = subscribe(&project, cx);
888
889 // Simulate the newly started server sending more diagnostics.
890 let fake_server = fake_servers.next().await.unwrap();
891 fake_server.start_progress(progress_token).await;
892 assert_eq!(
893 events.next().await.unwrap(),
894 Event::DiskBasedDiagnosticsStarted {
895 language_server_id: LanguageServerId(1)
896 }
897 );
898 project.read_with(cx, |project, _| {
899 assert_eq!(
900 project
901 .language_servers_running_disk_based_diagnostics()
902 .collect::<Vec<_>>(),
903 [LanguageServerId(1)]
904 );
905 });
906
907 // All diagnostics are considered done, despite the old server's diagnostic
908 // task never completing.
909 fake_server.end_progress(progress_token);
910 assert_eq!(
911 events.next().await.unwrap(),
912 Event::DiskBasedDiagnosticsFinished {
913 language_server_id: LanguageServerId(1)
914 }
915 );
916 project.read_with(cx, |project, _| {
917 assert_eq!(
918 project
919 .language_servers_running_disk_based_diagnostics()
920 .collect::<Vec<_>>(),
921 [LanguageServerId(0); 0]
922 );
923 });
924}
925
926#[gpui::test]
927async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
928 cx.foreground().forbid_parking();
929
930 let mut language = Language::new(
931 LanguageConfig {
932 path_suffixes: vec!["rs".to_string()],
933 ..Default::default()
934 },
935 None,
936 );
937 let mut fake_servers = language
938 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
939 name: "the-lsp",
940 ..Default::default()
941 }))
942 .await;
943
944 let fs = FakeFs::new(cx.background());
945 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
946
947 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
948 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
949
950 let buffer = project
951 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
952 .await
953 .unwrap();
954
955 // Before restarting the server, report diagnostics with an unknown buffer version.
956 let fake_server = fake_servers.next().await.unwrap();
957 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
958 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
959 version: Some(10000),
960 diagnostics: Vec::new(),
961 });
962 cx.foreground().run_until_parked();
963
964 project.update(cx, |project, cx| {
965 project.restart_language_servers_for_buffers([buffer.clone()], cx);
966 });
967 let mut fake_server = fake_servers.next().await.unwrap();
968 let notification = fake_server
969 .receive_notification::<lsp::notification::DidOpenTextDocument>()
970 .await
971 .text_document;
972 assert_eq!(notification.version, 0);
973}
974
975#[gpui::test]
976async fn test_toggling_enable_language_server(
977 deterministic: Arc<Deterministic>,
978 cx: &mut gpui::TestAppContext,
979) {
980 deterministic.forbid_parking();
981
982 let mut rust = Language::new(
983 LanguageConfig {
984 name: Arc::from("Rust"),
985 path_suffixes: vec!["rs".to_string()],
986 ..Default::default()
987 },
988 None,
989 );
990 let mut fake_rust_servers = rust
991 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
992 name: "rust-lsp",
993 ..Default::default()
994 }))
995 .await;
996 let mut js = Language::new(
997 LanguageConfig {
998 name: Arc::from("JavaScript"),
999 path_suffixes: vec!["js".to_string()],
1000 ..Default::default()
1001 },
1002 None,
1003 );
1004 let mut fake_js_servers = js
1005 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1006 name: "js-lsp",
1007 ..Default::default()
1008 }))
1009 .await;
1010
1011 let fs = FakeFs::new(cx.background());
1012 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1013 .await;
1014
1015 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1016 project.update(cx, |project, _| {
1017 project.languages.add(Arc::new(rust));
1018 project.languages.add(Arc::new(js));
1019 });
1020
1021 let _rs_buffer = project
1022 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1023 .await
1024 .unwrap();
1025 let _js_buffer = project
1026 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1027 .await
1028 .unwrap();
1029
1030 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1031 assert_eq!(
1032 fake_rust_server_1
1033 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1034 .await
1035 .text_document
1036 .uri
1037 .as_str(),
1038 "file:///dir/a.rs"
1039 );
1040
1041 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1042 assert_eq!(
1043 fake_js_server
1044 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1045 .await
1046 .text_document
1047 .uri
1048 .as_str(),
1049 "file:///dir/b.js"
1050 );
1051
1052 // Disable Rust language server, ensuring only that server gets stopped.
1053 cx.update(|cx| {
1054 cx.update_global(|settings: &mut Settings, _| {
1055 settings.language_overrides.insert(
1056 Arc::from("Rust"),
1057 settings::EditorSettings {
1058 enable_language_server: Some(false),
1059 ..Default::default()
1060 },
1061 );
1062 })
1063 });
1064 fake_rust_server_1
1065 .receive_notification::<lsp::notification::Exit>()
1066 .await;
1067
1068 // Enable Rust and disable JavaScript language servers, ensuring that the
1069 // former gets started again and that the latter stops.
1070 cx.update(|cx| {
1071 cx.update_global(|settings: &mut Settings, _| {
1072 settings.language_overrides.insert(
1073 Arc::from("Rust"),
1074 settings::EditorSettings {
1075 enable_language_server: Some(true),
1076 ..Default::default()
1077 },
1078 );
1079 settings.language_overrides.insert(
1080 Arc::from("JavaScript"),
1081 settings::EditorSettings {
1082 enable_language_server: Some(false),
1083 ..Default::default()
1084 },
1085 );
1086 })
1087 });
1088 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1089 assert_eq!(
1090 fake_rust_server_2
1091 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1092 .await
1093 .text_document
1094 .uri
1095 .as_str(),
1096 "file:///dir/a.rs"
1097 );
1098 fake_js_server
1099 .receive_notification::<lsp::notification::Exit>()
1100 .await;
1101}
1102
1103#[gpui::test]
1104async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1105 cx.foreground().forbid_parking();
1106
1107 let mut language = Language::new(
1108 LanguageConfig {
1109 name: "Rust".into(),
1110 path_suffixes: vec!["rs".to_string()],
1111 ..Default::default()
1112 },
1113 Some(tree_sitter_rust::language()),
1114 );
1115 let mut fake_servers = language
1116 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1117 disk_based_diagnostics_sources: vec!["disk".into()],
1118 ..Default::default()
1119 }))
1120 .await;
1121
1122 let text = "
1123 fn a() { A }
1124 fn b() { BB }
1125 fn c() { CCC }
1126 "
1127 .unindent();
1128
1129 let fs = FakeFs::new(cx.background());
1130 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1131
1132 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1133 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1134
1135 let buffer = project
1136 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1137 .await
1138 .unwrap();
1139
1140 let mut fake_server = fake_servers.next().await.unwrap();
1141 let open_notification = fake_server
1142 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1143 .await;
1144
1145 // Edit the buffer, moving the content down
1146 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1147 let change_notification_1 = fake_server
1148 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1149 .await;
1150 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1151
1152 // Report some diagnostics for the initial version of the buffer
1153 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1154 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1155 version: Some(open_notification.text_document.version),
1156 diagnostics: vec![
1157 lsp::Diagnostic {
1158 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1159 severity: Some(DiagnosticSeverity::ERROR),
1160 message: "undefined variable 'A'".to_string(),
1161 source: Some("disk".to_string()),
1162 ..Default::default()
1163 },
1164 lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1166 severity: Some(DiagnosticSeverity::ERROR),
1167 message: "undefined variable 'BB'".to_string(),
1168 source: Some("disk".to_string()),
1169 ..Default::default()
1170 },
1171 lsp::Diagnostic {
1172 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1173 severity: Some(DiagnosticSeverity::ERROR),
1174 source: Some("disk".to_string()),
1175 message: "undefined variable 'CCC'".to_string(),
1176 ..Default::default()
1177 },
1178 ],
1179 });
1180
1181 // The diagnostics have moved down since they were created.
1182 buffer.next_notification(cx).await;
1183 buffer.read_with(cx, |buffer, _| {
1184 assert_eq!(
1185 buffer
1186 .snapshot()
1187 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1188 .collect::<Vec<_>>(),
1189 &[
1190 DiagnosticEntry {
1191 range: Point::new(3, 9)..Point::new(3, 11),
1192 diagnostic: Diagnostic {
1193 source: Some("disk".into()),
1194 severity: DiagnosticSeverity::ERROR,
1195 message: "undefined variable 'BB'".to_string(),
1196 is_disk_based: true,
1197 group_id: 1,
1198 is_primary: true,
1199 ..Default::default()
1200 },
1201 },
1202 DiagnosticEntry {
1203 range: Point::new(4, 9)..Point::new(4, 12),
1204 diagnostic: Diagnostic {
1205 source: Some("disk".into()),
1206 severity: DiagnosticSeverity::ERROR,
1207 message: "undefined variable 'CCC'".to_string(),
1208 is_disk_based: true,
1209 group_id: 2,
1210 is_primary: true,
1211 ..Default::default()
1212 }
1213 }
1214 ]
1215 );
1216 assert_eq!(
1217 chunks_with_diagnostics(buffer, 0..buffer.len()),
1218 [
1219 ("\n\nfn a() { ".to_string(), None),
1220 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1221 (" }\nfn b() { ".to_string(), None),
1222 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1223 (" }\nfn c() { ".to_string(), None),
1224 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1225 (" }\n".to_string(), None),
1226 ]
1227 );
1228 assert_eq!(
1229 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1230 [
1231 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1232 (" }\nfn c() { ".to_string(), None),
1233 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1234 ]
1235 );
1236 });
1237
1238 // Ensure overlapping diagnostics are highlighted correctly.
1239 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1240 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1241 version: Some(open_notification.text_document.version),
1242 diagnostics: vec![
1243 lsp::Diagnostic {
1244 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1245 severity: Some(DiagnosticSeverity::ERROR),
1246 message: "undefined variable 'A'".to_string(),
1247 source: Some("disk".to_string()),
1248 ..Default::default()
1249 },
1250 lsp::Diagnostic {
1251 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1252 severity: Some(DiagnosticSeverity::WARNING),
1253 message: "unreachable statement".to_string(),
1254 source: Some("disk".to_string()),
1255 ..Default::default()
1256 },
1257 ],
1258 });
1259
1260 buffer.next_notification(cx).await;
1261 buffer.read_with(cx, |buffer, _| {
1262 assert_eq!(
1263 buffer
1264 .snapshot()
1265 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1266 .collect::<Vec<_>>(),
1267 &[
1268 DiagnosticEntry {
1269 range: Point::new(2, 9)..Point::new(2, 12),
1270 diagnostic: Diagnostic {
1271 source: Some("disk".into()),
1272 severity: DiagnosticSeverity::WARNING,
1273 message: "unreachable statement".to_string(),
1274 is_disk_based: true,
1275 group_id: 4,
1276 is_primary: true,
1277 ..Default::default()
1278 }
1279 },
1280 DiagnosticEntry {
1281 range: Point::new(2, 9)..Point::new(2, 10),
1282 diagnostic: Diagnostic {
1283 source: Some("disk".into()),
1284 severity: DiagnosticSeverity::ERROR,
1285 message: "undefined variable 'A'".to_string(),
1286 is_disk_based: true,
1287 group_id: 3,
1288 is_primary: true,
1289 ..Default::default()
1290 },
1291 }
1292 ]
1293 );
1294 assert_eq!(
1295 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1296 [
1297 ("fn a() { ".to_string(), None),
1298 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1299 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1300 ("\n".to_string(), None),
1301 ]
1302 );
1303 assert_eq!(
1304 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1305 [
1306 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1307 ("\n".to_string(), None),
1308 ]
1309 );
1310 });
1311
1312 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1313 // changes since the last save.
1314 buffer.update(cx, |buffer, cx| {
1315 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1316 buffer.edit(
1317 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1318 None,
1319 cx,
1320 );
1321 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1322 });
1323 let change_notification_2 = fake_server
1324 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1325 .await;
1326 assert!(
1327 change_notification_2.text_document.version > change_notification_1.text_document.version
1328 );
1329
1330 // Handle out-of-order diagnostics
1331 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1332 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1333 version: Some(change_notification_2.text_document.version),
1334 diagnostics: vec![
1335 lsp::Diagnostic {
1336 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1337 severity: Some(DiagnosticSeverity::ERROR),
1338 message: "undefined variable 'BB'".to_string(),
1339 source: Some("disk".to_string()),
1340 ..Default::default()
1341 },
1342 lsp::Diagnostic {
1343 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1344 severity: Some(DiagnosticSeverity::WARNING),
1345 message: "undefined variable 'A'".to_string(),
1346 source: Some("disk".to_string()),
1347 ..Default::default()
1348 },
1349 ],
1350 });
1351
1352 buffer.next_notification(cx).await;
1353 buffer.read_with(cx, |buffer, _| {
1354 assert_eq!(
1355 buffer
1356 .snapshot()
1357 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1358 .collect::<Vec<_>>(),
1359 &[
1360 DiagnosticEntry {
1361 range: Point::new(2, 21)..Point::new(2, 22),
1362 diagnostic: Diagnostic {
1363 source: Some("disk".into()),
1364 severity: DiagnosticSeverity::WARNING,
1365 message: "undefined variable 'A'".to_string(),
1366 is_disk_based: true,
1367 group_id: 6,
1368 is_primary: true,
1369 ..Default::default()
1370 }
1371 },
1372 DiagnosticEntry {
1373 range: Point::new(3, 9)..Point::new(3, 14),
1374 diagnostic: Diagnostic {
1375 source: Some("disk".into()),
1376 severity: DiagnosticSeverity::ERROR,
1377 message: "undefined variable 'BB'".to_string(),
1378 is_disk_based: true,
1379 group_id: 5,
1380 is_primary: true,
1381 ..Default::default()
1382 },
1383 }
1384 ]
1385 );
1386 });
1387}
1388
1389#[gpui::test]
1390async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1391 cx.foreground().forbid_parking();
1392
1393 let text = concat!(
1394 "let one = ;\n", //
1395 "let two = \n",
1396 "let three = 3;\n",
1397 );
1398
1399 let fs = FakeFs::new(cx.background());
1400 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1401
1402 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1403 let buffer = project
1404 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1405 .await
1406 .unwrap();
1407
1408 project.update(cx, |project, cx| {
1409 project
1410 .update_buffer_diagnostics(
1411 &buffer,
1412 LanguageServerId(0),
1413 None,
1414 vec![
1415 DiagnosticEntry {
1416 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1417 diagnostic: Diagnostic {
1418 severity: DiagnosticSeverity::ERROR,
1419 message: "syntax error 1".to_string(),
1420 ..Default::default()
1421 },
1422 },
1423 DiagnosticEntry {
1424 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1425 diagnostic: Diagnostic {
1426 severity: DiagnosticSeverity::ERROR,
1427 message: "syntax error 2".to_string(),
1428 ..Default::default()
1429 },
1430 },
1431 ],
1432 cx,
1433 )
1434 .unwrap();
1435 });
1436
1437 // An empty range is extended forward to include the following character.
1438 // At the end of a line, an empty range is extended backward to include
1439 // the preceding character.
1440 buffer.read_with(cx, |buffer, _| {
1441 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1442 assert_eq!(
1443 chunks
1444 .iter()
1445 .map(|(s, d)| (s.as_str(), *d))
1446 .collect::<Vec<_>>(),
1447 &[
1448 ("let one = ", None),
1449 (";", Some(DiagnosticSeverity::ERROR)),
1450 ("\nlet two =", None),
1451 (" ", Some(DiagnosticSeverity::ERROR)),
1452 ("\nlet three = 3;\n", None)
1453 ]
1454 );
1455 });
1456}
1457
1458#[gpui::test]
1459async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1460 println!("hello from stdout");
1461 eprintln!("hello from stderr");
1462 cx.foreground().forbid_parking();
1463
1464 let fs = FakeFs::new(cx.background());
1465 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1466 .await;
1467
1468 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1469
1470 project.update(cx, |project, cx| {
1471 project
1472 .update_diagnostic_entries(
1473 LanguageServerId(0),
1474 Path::new("/dir/a.rs").to_owned(),
1475 None,
1476 vec![DiagnosticEntry {
1477 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1478 diagnostic: Diagnostic {
1479 severity: DiagnosticSeverity::ERROR,
1480 is_primary: true,
1481 message: "syntax error a1".to_string(),
1482 ..Default::default()
1483 },
1484 }],
1485 cx,
1486 )
1487 .unwrap();
1488 project
1489 .update_diagnostic_entries(
1490 LanguageServerId(1),
1491 Path::new("/dir/a.rs").to_owned(),
1492 None,
1493 vec![DiagnosticEntry {
1494 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1495 diagnostic: Diagnostic {
1496 severity: DiagnosticSeverity::ERROR,
1497 is_primary: true,
1498 message: "syntax error b1".to_string(),
1499 ..Default::default()
1500 },
1501 }],
1502 cx,
1503 )
1504 .unwrap();
1505
1506 assert_eq!(
1507 project.diagnostic_summary(cx),
1508 DiagnosticSummary {
1509 error_count: 2,
1510 warning_count: 0,
1511 }
1512 );
1513 });
1514}
1515
1516#[gpui::test]
1517async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1518 cx.foreground().forbid_parking();
1519
1520 let mut language = Language::new(
1521 LanguageConfig {
1522 name: "Rust".into(),
1523 path_suffixes: vec!["rs".to_string()],
1524 ..Default::default()
1525 },
1526 Some(tree_sitter_rust::language()),
1527 );
1528 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1529
1530 let text = "
1531 fn a() {
1532 f1();
1533 }
1534 fn b() {
1535 f2();
1536 }
1537 fn c() {
1538 f3();
1539 }
1540 "
1541 .unindent();
1542
1543 let fs = FakeFs::new(cx.background());
1544 fs.insert_tree(
1545 "/dir",
1546 json!({
1547 "a.rs": text.clone(),
1548 }),
1549 )
1550 .await;
1551
1552 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1553 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1554 let buffer = project
1555 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1556 .await
1557 .unwrap();
1558
1559 let mut fake_server = fake_servers.next().await.unwrap();
1560 let lsp_document_version = fake_server
1561 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1562 .await
1563 .text_document
1564 .version;
1565
1566 // Simulate editing the buffer after the language server computes some edits.
1567 buffer.update(cx, |buffer, cx| {
1568 buffer.edit(
1569 [(
1570 Point::new(0, 0)..Point::new(0, 0),
1571 "// above first function\n",
1572 )],
1573 None,
1574 cx,
1575 );
1576 buffer.edit(
1577 [(
1578 Point::new(2, 0)..Point::new(2, 0),
1579 " // inside first function\n",
1580 )],
1581 None,
1582 cx,
1583 );
1584 buffer.edit(
1585 [(
1586 Point::new(6, 4)..Point::new(6, 4),
1587 "// inside second function ",
1588 )],
1589 None,
1590 cx,
1591 );
1592
1593 assert_eq!(
1594 buffer.text(),
1595 "
1596 // above first function
1597 fn a() {
1598 // inside first function
1599 f1();
1600 }
1601 fn b() {
1602 // inside second function f2();
1603 }
1604 fn c() {
1605 f3();
1606 }
1607 "
1608 .unindent()
1609 );
1610 });
1611
1612 let edits = project
1613 .update(cx, |project, cx| {
1614 project.edits_from_lsp(
1615 &buffer,
1616 vec![
1617 // replace body of first function
1618 lsp::TextEdit {
1619 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1620 new_text: "
1621 fn a() {
1622 f10();
1623 }
1624 "
1625 .unindent(),
1626 },
1627 // edit inside second function
1628 lsp::TextEdit {
1629 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1630 new_text: "00".into(),
1631 },
1632 // edit inside third function via two distinct edits
1633 lsp::TextEdit {
1634 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1635 new_text: "4000".into(),
1636 },
1637 lsp::TextEdit {
1638 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1639 new_text: "".into(),
1640 },
1641 ],
1642 LanguageServerId(0),
1643 Some(lsp_document_version),
1644 cx,
1645 )
1646 })
1647 .await
1648 .unwrap();
1649
1650 buffer.update(cx, |buffer, cx| {
1651 for (range, new_text) in edits {
1652 buffer.edit([(range, new_text)], None, cx);
1653 }
1654 assert_eq!(
1655 buffer.text(),
1656 "
1657 // above first function
1658 fn a() {
1659 // inside first function
1660 f10();
1661 }
1662 fn b() {
1663 // inside second function f200();
1664 }
1665 fn c() {
1666 f4000();
1667 }
1668 "
1669 .unindent()
1670 );
1671 });
1672}
1673
1674#[gpui::test]
1675async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1676 cx.foreground().forbid_parking();
1677
1678 let text = "
1679 use a::b;
1680 use a::c;
1681
1682 fn f() {
1683 b();
1684 c();
1685 }
1686 "
1687 .unindent();
1688
1689 let fs = FakeFs::new(cx.background());
1690 fs.insert_tree(
1691 "/dir",
1692 json!({
1693 "a.rs": text.clone(),
1694 }),
1695 )
1696 .await;
1697
1698 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1699 let buffer = project
1700 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1701 .await
1702 .unwrap();
1703
1704 // Simulate the language server sending us a small edit in the form of a very large diff.
1705 // Rust-analyzer does this when performing a merge-imports code action.
1706 let edits = project
1707 .update(cx, |project, cx| {
1708 project.edits_from_lsp(
1709 &buffer,
1710 [
1711 // Replace the first use statement without editing the semicolon.
1712 lsp::TextEdit {
1713 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1714 new_text: "a::{b, c}".into(),
1715 },
1716 // Reinsert the remainder of the file between the semicolon and the final
1717 // newline of the file.
1718 lsp::TextEdit {
1719 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1720 new_text: "\n\n".into(),
1721 },
1722 lsp::TextEdit {
1723 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1724 new_text: "
1725 fn f() {
1726 b();
1727 c();
1728 }"
1729 .unindent(),
1730 },
1731 // Delete everything after the first newline of the file.
1732 lsp::TextEdit {
1733 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1734 new_text: "".into(),
1735 },
1736 ],
1737 LanguageServerId(0),
1738 None,
1739 cx,
1740 )
1741 })
1742 .await
1743 .unwrap();
1744
1745 buffer.update(cx, |buffer, cx| {
1746 let edits = edits
1747 .into_iter()
1748 .map(|(range, text)| {
1749 (
1750 range.start.to_point(buffer)..range.end.to_point(buffer),
1751 text,
1752 )
1753 })
1754 .collect::<Vec<_>>();
1755
1756 assert_eq!(
1757 edits,
1758 [
1759 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1760 (Point::new(1, 0)..Point::new(2, 0), "".into())
1761 ]
1762 );
1763
1764 for (range, new_text) in edits {
1765 buffer.edit([(range, new_text)], None, cx);
1766 }
1767 assert_eq!(
1768 buffer.text(),
1769 "
1770 use a::{b, c};
1771
1772 fn f() {
1773 b();
1774 c();
1775 }
1776 "
1777 .unindent()
1778 );
1779 });
1780}
1781
1782#[gpui::test]
1783async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1784 cx.foreground().forbid_parking();
1785
1786 let text = "
1787 use a::b;
1788 use a::c;
1789
1790 fn f() {
1791 b();
1792 c();
1793 }
1794 "
1795 .unindent();
1796
1797 let fs = FakeFs::new(cx.background());
1798 fs.insert_tree(
1799 "/dir",
1800 json!({
1801 "a.rs": text.clone(),
1802 }),
1803 )
1804 .await;
1805
1806 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1807 let buffer = project
1808 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1809 .await
1810 .unwrap();
1811
1812 // Simulate the language server sending us edits in a non-ordered fashion,
1813 // with ranges sometimes being inverted or pointing to invalid locations.
1814 let edits = project
1815 .update(cx, |project, cx| {
1816 project.edits_from_lsp(
1817 &buffer,
1818 [
1819 lsp::TextEdit {
1820 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1821 new_text: "\n\n".into(),
1822 },
1823 lsp::TextEdit {
1824 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1825 new_text: "a::{b, c}".into(),
1826 },
1827 lsp::TextEdit {
1828 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1829 new_text: "".into(),
1830 },
1831 lsp::TextEdit {
1832 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1833 new_text: "
1834 fn f() {
1835 b();
1836 c();
1837 }"
1838 .unindent(),
1839 },
1840 ],
1841 LanguageServerId(0),
1842 None,
1843 cx,
1844 )
1845 })
1846 .await
1847 .unwrap();
1848
1849 buffer.update(cx, |buffer, cx| {
1850 let edits = edits
1851 .into_iter()
1852 .map(|(range, text)| {
1853 (
1854 range.start.to_point(buffer)..range.end.to_point(buffer),
1855 text,
1856 )
1857 })
1858 .collect::<Vec<_>>();
1859
1860 assert_eq!(
1861 edits,
1862 [
1863 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1864 (Point::new(1, 0)..Point::new(2, 0), "".into())
1865 ]
1866 );
1867
1868 for (range, new_text) in edits {
1869 buffer.edit([(range, new_text)], None, cx);
1870 }
1871 assert_eq!(
1872 buffer.text(),
1873 "
1874 use a::{b, c};
1875
1876 fn f() {
1877 b();
1878 c();
1879 }
1880 "
1881 .unindent()
1882 );
1883 });
1884}
1885
1886fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1887 buffer: &Buffer,
1888 range: Range<T>,
1889) -> Vec<(String, Option<DiagnosticSeverity>)> {
1890 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1891 for chunk in buffer.snapshot().chunks(range, true) {
1892 if chunks.last().map_or(false, |prev_chunk| {
1893 prev_chunk.1 == chunk.diagnostic_severity
1894 }) {
1895 chunks.last_mut().unwrap().0.push_str(chunk.text);
1896 } else {
1897 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1898 }
1899 }
1900 chunks
1901}
1902
1903#[gpui::test(iterations = 10)]
1904async fn test_definition(cx: &mut gpui::TestAppContext) {
1905 let mut language = Language::new(
1906 LanguageConfig {
1907 name: "Rust".into(),
1908 path_suffixes: vec!["rs".to_string()],
1909 ..Default::default()
1910 },
1911 Some(tree_sitter_rust::language()),
1912 );
1913 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1914
1915 let fs = FakeFs::new(cx.background());
1916 fs.insert_tree(
1917 "/dir",
1918 json!({
1919 "a.rs": "const fn a() { A }",
1920 "b.rs": "const y: i32 = crate::a()",
1921 }),
1922 )
1923 .await;
1924
1925 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1926 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1927
1928 let buffer = project
1929 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1930 .await
1931 .unwrap();
1932
1933 let fake_server = fake_servers.next().await.unwrap();
1934 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1935 let params = params.text_document_position_params;
1936 assert_eq!(
1937 params.text_document.uri.to_file_path().unwrap(),
1938 Path::new("/dir/b.rs"),
1939 );
1940 assert_eq!(params.position, lsp::Position::new(0, 22));
1941
1942 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1943 lsp::Location::new(
1944 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1945 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1946 ),
1947 )))
1948 });
1949
1950 let mut definitions = project
1951 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1952 .await
1953 .unwrap();
1954
1955 // Assert no new language server started
1956 cx.foreground().run_until_parked();
1957 assert!(fake_servers.try_next().is_err());
1958
1959 assert_eq!(definitions.len(), 1);
1960 let definition = definitions.pop().unwrap();
1961 cx.update(|cx| {
1962 let target_buffer = definition.target.buffer.read(cx);
1963 assert_eq!(
1964 target_buffer
1965 .file()
1966 .unwrap()
1967 .as_local()
1968 .unwrap()
1969 .abs_path(cx),
1970 Path::new("/dir/a.rs"),
1971 );
1972 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1973 assert_eq!(
1974 list_worktrees(&project, cx),
1975 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1976 );
1977
1978 drop(definition);
1979 });
1980 cx.read(|cx| {
1981 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1982 });
1983
1984 fn list_worktrees<'a>(
1985 project: &'a ModelHandle<Project>,
1986 cx: &'a AppContext,
1987 ) -> Vec<(&'a Path, bool)> {
1988 project
1989 .read(cx)
1990 .worktrees(cx)
1991 .map(|worktree| {
1992 let worktree = worktree.read(cx);
1993 (
1994 worktree.as_local().unwrap().abs_path().as_ref(),
1995 worktree.is_visible(),
1996 )
1997 })
1998 .collect::<Vec<_>>()
1999 }
2000}
2001
2002#[gpui::test]
2003async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2004 let mut language = Language::new(
2005 LanguageConfig {
2006 name: "TypeScript".into(),
2007 path_suffixes: vec!["ts".to_string()],
2008 ..Default::default()
2009 },
2010 Some(tree_sitter_typescript::language_typescript()),
2011 );
2012 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2013
2014 let fs = FakeFs::new(cx.background());
2015 fs.insert_tree(
2016 "/dir",
2017 json!({
2018 "a.ts": "",
2019 }),
2020 )
2021 .await;
2022
2023 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2024 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2025 let buffer = project
2026 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2027 .await
2028 .unwrap();
2029
2030 let fake_server = fake_language_servers.next().await.unwrap();
2031
2032 let text = "let a = b.fqn";
2033 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2034 let completions = project.update(cx, |project, cx| {
2035 project.completions(&buffer, text.len(), cx)
2036 });
2037
2038 fake_server
2039 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2040 Ok(Some(lsp::CompletionResponse::Array(vec![
2041 lsp::CompletionItem {
2042 label: "fullyQualifiedName?".into(),
2043 insert_text: Some("fullyQualifiedName".into()),
2044 ..Default::default()
2045 },
2046 ])))
2047 })
2048 .next()
2049 .await;
2050 let completions = completions.await.unwrap();
2051 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2052 assert_eq!(completions.len(), 1);
2053 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2054 assert_eq!(
2055 completions[0].old_range.to_offset(&snapshot),
2056 text.len() - 3..text.len()
2057 );
2058
2059 let text = "let a = \"atoms/cmp\"";
2060 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2061 let completions = project.update(cx, |project, cx| {
2062 project.completions(&buffer, text.len() - 1, cx)
2063 });
2064
2065 fake_server
2066 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2067 Ok(Some(lsp::CompletionResponse::Array(vec![
2068 lsp::CompletionItem {
2069 label: "component".into(),
2070 ..Default::default()
2071 },
2072 ])))
2073 })
2074 .next()
2075 .await;
2076 let completions = completions.await.unwrap();
2077 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2078 assert_eq!(completions.len(), 1);
2079 assert_eq!(completions[0].new_text, "component");
2080 assert_eq!(
2081 completions[0].old_range.to_offset(&snapshot),
2082 text.len() - 4..text.len() - 1
2083 );
2084}
2085
2086#[gpui::test]
2087async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2088 let mut language = Language::new(
2089 LanguageConfig {
2090 name: "TypeScript".into(),
2091 path_suffixes: vec!["ts".to_string()],
2092 ..Default::default()
2093 },
2094 Some(tree_sitter_typescript::language_typescript()),
2095 );
2096 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2097
2098 let fs = FakeFs::new(cx.background());
2099 fs.insert_tree(
2100 "/dir",
2101 json!({
2102 "a.ts": "",
2103 }),
2104 )
2105 .await;
2106
2107 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2108 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2109 let buffer = project
2110 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2111 .await
2112 .unwrap();
2113
2114 let fake_server = fake_language_servers.next().await.unwrap();
2115
2116 let text = "let a = b.fqn";
2117 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2118 let completions = project.update(cx, |project, cx| {
2119 project.completions(&buffer, text.len(), cx)
2120 });
2121
2122 fake_server
2123 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2124 Ok(Some(lsp::CompletionResponse::Array(vec![
2125 lsp::CompletionItem {
2126 label: "fullyQualifiedName?".into(),
2127 insert_text: Some("fully\rQualified\r\nName".into()),
2128 ..Default::default()
2129 },
2130 ])))
2131 })
2132 .next()
2133 .await;
2134 let completions = completions.await.unwrap();
2135 assert_eq!(completions.len(), 1);
2136 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2137}
2138
2139#[gpui::test(iterations = 10)]
2140async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2141 let mut language = Language::new(
2142 LanguageConfig {
2143 name: "TypeScript".into(),
2144 path_suffixes: vec!["ts".to_string()],
2145 ..Default::default()
2146 },
2147 None,
2148 );
2149 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2150
2151 let fs = FakeFs::new(cx.background());
2152 fs.insert_tree(
2153 "/dir",
2154 json!({
2155 "a.ts": "a",
2156 }),
2157 )
2158 .await;
2159
2160 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2161 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2162 let buffer = project
2163 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2164 .await
2165 .unwrap();
2166
2167 let fake_server = fake_language_servers.next().await.unwrap();
2168
2169 // Language server returns code actions that contain commands, and not edits.
2170 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2171 fake_server
2172 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2173 Ok(Some(vec![
2174 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2175 title: "The code action".into(),
2176 command: Some(lsp::Command {
2177 title: "The command".into(),
2178 command: "_the/command".into(),
2179 arguments: Some(vec![json!("the-argument")]),
2180 }),
2181 ..Default::default()
2182 }),
2183 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2184 title: "two".into(),
2185 ..Default::default()
2186 }),
2187 ]))
2188 })
2189 .next()
2190 .await;
2191
2192 let action = actions.await.unwrap()[0].clone();
2193 let apply = project.update(cx, |project, cx| {
2194 project.apply_code_action(buffer.clone(), action, true, cx)
2195 });
2196
2197 // Resolving the code action does not populate its edits. In absence of
2198 // edits, we must execute the given command.
2199 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2200 |action, _| async move { Ok(action) },
2201 );
2202
2203 // While executing the command, the language server sends the editor
2204 // a `workspaceEdit` request.
2205 fake_server
2206 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2207 let fake = fake_server.clone();
2208 move |params, _| {
2209 assert_eq!(params.command, "_the/command");
2210 let fake = fake.clone();
2211 async move {
2212 fake.server
2213 .request::<lsp::request::ApplyWorkspaceEdit>(
2214 lsp::ApplyWorkspaceEditParams {
2215 label: None,
2216 edit: lsp::WorkspaceEdit {
2217 changes: Some(
2218 [(
2219 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2220 vec![lsp::TextEdit {
2221 range: lsp::Range::new(
2222 lsp::Position::new(0, 0),
2223 lsp::Position::new(0, 0),
2224 ),
2225 new_text: "X".into(),
2226 }],
2227 )]
2228 .into_iter()
2229 .collect(),
2230 ),
2231 ..Default::default()
2232 },
2233 },
2234 )
2235 .await
2236 .unwrap();
2237 Ok(Some(json!(null)))
2238 }
2239 }
2240 })
2241 .next()
2242 .await;
2243
2244 // Applying the code action returns a project transaction containing the edits
2245 // sent by the language server in its `workspaceEdit` request.
2246 let transaction = apply.await.unwrap();
2247 assert!(transaction.0.contains_key(&buffer));
2248 buffer.update(cx, |buffer, cx| {
2249 assert_eq!(buffer.text(), "Xa");
2250 buffer.undo(cx);
2251 assert_eq!(buffer.text(), "a");
2252 });
2253}
2254
2255#[gpui::test(iterations = 10)]
2256async fn test_save_file(cx: &mut gpui::TestAppContext) {
2257 let fs = FakeFs::new(cx.background());
2258 fs.insert_tree(
2259 "/dir",
2260 json!({
2261 "file1": "the old contents",
2262 }),
2263 )
2264 .await;
2265
2266 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2267 let buffer = project
2268 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2269 .await
2270 .unwrap();
2271 buffer.update(cx, |buffer, cx| {
2272 assert_eq!(buffer.text(), "the old contents");
2273 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2274 });
2275
2276 project
2277 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2278 .await
2279 .unwrap();
2280
2281 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2282 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2283}
2284
2285#[gpui::test]
2286async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2287 let fs = FakeFs::new(cx.background());
2288 fs.insert_tree(
2289 "/dir",
2290 json!({
2291 "file1": "the old contents",
2292 }),
2293 )
2294 .await;
2295
2296 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2297 let buffer = project
2298 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2299 .await
2300 .unwrap();
2301 buffer.update(cx, |buffer, cx| {
2302 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2303 });
2304
2305 project
2306 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2307 .await
2308 .unwrap();
2309
2310 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2311 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2312}
2313
2314#[gpui::test]
2315async fn test_save_as(cx: &mut gpui::TestAppContext) {
2316 let fs = FakeFs::new(cx.background());
2317 fs.insert_tree("/dir", json!({})).await;
2318
2319 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2320
2321 let languages = project.read_with(cx, |project, _| project.languages().clone());
2322 languages.register(
2323 "/some/path",
2324 LanguageConfig {
2325 name: "Rust".into(),
2326 path_suffixes: vec!["rs".into()],
2327 ..Default::default()
2328 },
2329 tree_sitter_rust::language(),
2330 vec![],
2331 |_| Default::default(),
2332 );
2333
2334 let buffer = project.update(cx, |project, cx| {
2335 project.create_buffer("", None, cx).unwrap()
2336 });
2337 buffer.update(cx, |buffer, cx| {
2338 buffer.edit([(0..0, "abc")], None, cx);
2339 assert!(buffer.is_dirty());
2340 assert!(!buffer.has_conflict());
2341 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2342 });
2343 project
2344 .update(cx, |project, cx| {
2345 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2346 })
2347 .await
2348 .unwrap();
2349 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2350
2351 cx.foreground().run_until_parked();
2352 buffer.read_with(cx, |buffer, cx| {
2353 assert_eq!(
2354 buffer.file().unwrap().full_path(cx),
2355 Path::new("dir/file1.rs")
2356 );
2357 assert!(!buffer.is_dirty());
2358 assert!(!buffer.has_conflict());
2359 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2360 });
2361
2362 let opened_buffer = project
2363 .update(cx, |project, cx| {
2364 project.open_local_buffer("/dir/file1.rs", cx)
2365 })
2366 .await
2367 .unwrap();
2368 assert_eq!(opened_buffer, buffer);
2369}
2370
2371#[gpui::test(retries = 5)]
2372async fn test_rescan_and_remote_updates(
2373 deterministic: Arc<Deterministic>,
2374 cx: &mut gpui::TestAppContext,
2375) {
2376 let dir = temp_tree(json!({
2377 "a": {
2378 "file1": "",
2379 "file2": "",
2380 "file3": "",
2381 },
2382 "b": {
2383 "c": {
2384 "file4": "",
2385 "file5": "",
2386 }
2387 }
2388 }));
2389
2390 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2391 let rpc = project.read_with(cx, |p, _| p.client.clone());
2392
2393 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2394 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2395 async move { buffer.await.unwrap() }
2396 };
2397 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2398 project.read_with(cx, |project, cx| {
2399 let tree = project.worktrees(cx).next().unwrap();
2400 tree.read(cx)
2401 .entry_for_path(path)
2402 .unwrap_or_else(|| panic!("no entry for path {}", path))
2403 .id
2404 })
2405 };
2406
2407 let buffer2 = buffer_for_path("a/file2", cx).await;
2408 let buffer3 = buffer_for_path("a/file3", cx).await;
2409 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2410 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2411
2412 let file2_id = id_for_path("a/file2", cx);
2413 let file3_id = id_for_path("a/file3", cx);
2414 let file4_id = id_for_path("b/c/file4", cx);
2415
2416 // Create a remote copy of this worktree.
2417 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2418 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2419 let remote = cx.update(|cx| {
2420 Worktree::remote(
2421 1,
2422 1,
2423 proto::WorktreeMetadata {
2424 id: initial_snapshot.id().to_proto(),
2425 root_name: initial_snapshot.root_name().into(),
2426 abs_path: initial_snapshot
2427 .abs_path()
2428 .as_os_str()
2429 .to_string_lossy()
2430 .into(),
2431 visible: true,
2432 },
2433 rpc.clone(),
2434 cx,
2435 )
2436 });
2437 remote.update(cx, |remote, _| {
2438 let update = initial_snapshot.build_initial_update(1);
2439 remote.as_remote_mut().unwrap().update_from_remote(update);
2440 });
2441 deterministic.run_until_parked();
2442
2443 cx.read(|cx| {
2444 assert!(!buffer2.read(cx).is_dirty());
2445 assert!(!buffer3.read(cx).is_dirty());
2446 assert!(!buffer4.read(cx).is_dirty());
2447 assert!(!buffer5.read(cx).is_dirty());
2448 });
2449
2450 // Rename and delete files and directories.
2451 tree.flush_fs_events(cx).await;
2452 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2453 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2454 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2455 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2456 tree.flush_fs_events(cx).await;
2457
2458 let expected_paths = vec![
2459 "a",
2460 "a/file1",
2461 "a/file2.new",
2462 "b",
2463 "d",
2464 "d/file3",
2465 "d/file4",
2466 ];
2467
2468 cx.read(|app| {
2469 assert_eq!(
2470 tree.read(app)
2471 .paths()
2472 .map(|p| p.to_str().unwrap())
2473 .collect::<Vec<_>>(),
2474 expected_paths
2475 );
2476
2477 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2478 assert_eq!(id_for_path("d/file3", cx), file3_id);
2479 assert_eq!(id_for_path("d/file4", cx), file4_id);
2480
2481 assert_eq!(
2482 buffer2.read(app).file().unwrap().path().as_ref(),
2483 Path::new("a/file2.new")
2484 );
2485 assert_eq!(
2486 buffer3.read(app).file().unwrap().path().as_ref(),
2487 Path::new("d/file3")
2488 );
2489 assert_eq!(
2490 buffer4.read(app).file().unwrap().path().as_ref(),
2491 Path::new("d/file4")
2492 );
2493 assert_eq!(
2494 buffer5.read(app).file().unwrap().path().as_ref(),
2495 Path::new("b/c/file5")
2496 );
2497
2498 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2499 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2500 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2501 assert!(buffer5.read(app).file().unwrap().is_deleted());
2502 });
2503
2504 // Update the remote worktree. Check that it becomes consistent with the
2505 // local worktree.
2506 remote.update(cx, |remote, cx| {
2507 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2508 &initial_snapshot,
2509 1,
2510 1,
2511 true,
2512 );
2513 remote.as_remote_mut().unwrap().update_from_remote(update);
2514 });
2515 deterministic.run_until_parked();
2516 remote.read_with(cx, |remote, _| {
2517 assert_eq!(
2518 remote
2519 .paths()
2520 .map(|p| p.to_str().unwrap())
2521 .collect::<Vec<_>>(),
2522 expected_paths
2523 );
2524 });
2525}
2526
2527#[gpui::test(iterations = 10)]
2528async fn test_buffer_identity_across_renames(
2529 deterministic: Arc<Deterministic>,
2530 cx: &mut gpui::TestAppContext,
2531) {
2532 let fs = FakeFs::new(cx.background());
2533 fs.insert_tree(
2534 "/dir",
2535 json!({
2536 "a": {
2537 "file1": "",
2538 }
2539 }),
2540 )
2541 .await;
2542
2543 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2544 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2545 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2546
2547 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2548 project.read_with(cx, |project, cx| {
2549 let tree = project.worktrees(cx).next().unwrap();
2550 tree.read(cx)
2551 .entry_for_path(path)
2552 .unwrap_or_else(|| panic!("no entry for path {}", path))
2553 .id
2554 })
2555 };
2556
2557 let dir_id = id_for_path("a", cx);
2558 let file_id = id_for_path("a/file1", cx);
2559 let buffer = project
2560 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2561 .await
2562 .unwrap();
2563 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2564
2565 project
2566 .update(cx, |project, cx| {
2567 project.rename_entry(dir_id, Path::new("b"), cx)
2568 })
2569 .unwrap()
2570 .await
2571 .unwrap();
2572 deterministic.run_until_parked();
2573 assert_eq!(id_for_path("b", cx), dir_id);
2574 assert_eq!(id_for_path("b/file1", cx), file_id);
2575 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2576}
2577
2578#[gpui::test]
2579async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2580 let fs = FakeFs::new(cx.background());
2581 fs.insert_tree(
2582 "/dir",
2583 json!({
2584 "a.txt": "a-contents",
2585 "b.txt": "b-contents",
2586 }),
2587 )
2588 .await;
2589
2590 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2591
2592 // Spawn multiple tasks to open paths, repeating some paths.
2593 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2594 (
2595 p.open_local_buffer("/dir/a.txt", cx),
2596 p.open_local_buffer("/dir/b.txt", cx),
2597 p.open_local_buffer("/dir/a.txt", cx),
2598 )
2599 });
2600
2601 let buffer_a_1 = buffer_a_1.await.unwrap();
2602 let buffer_a_2 = buffer_a_2.await.unwrap();
2603 let buffer_b = buffer_b.await.unwrap();
2604 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2605 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2606
2607 // There is only one buffer per path.
2608 let buffer_a_id = buffer_a_1.id();
2609 assert_eq!(buffer_a_2.id(), buffer_a_id);
2610
2611 // Open the same path again while it is still open.
2612 drop(buffer_a_1);
2613 let buffer_a_3 = project
2614 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2615 .await
2616 .unwrap();
2617
2618 // There's still only one buffer per path.
2619 assert_eq!(buffer_a_3.id(), buffer_a_id);
2620}
2621
2622#[gpui::test]
2623async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2624 let fs = FakeFs::new(cx.background());
2625 fs.insert_tree(
2626 "/dir",
2627 json!({
2628 "file1": "abc",
2629 "file2": "def",
2630 "file3": "ghi",
2631 }),
2632 )
2633 .await;
2634
2635 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2636
2637 let buffer1 = project
2638 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2639 .await
2640 .unwrap();
2641 let events = Rc::new(RefCell::new(Vec::new()));
2642
2643 // initially, the buffer isn't dirty.
2644 buffer1.update(cx, |buffer, cx| {
2645 cx.subscribe(&buffer1, {
2646 let events = events.clone();
2647 move |_, _, event, _| match event {
2648 BufferEvent::Operation(_) => {}
2649 _ => events.borrow_mut().push(event.clone()),
2650 }
2651 })
2652 .detach();
2653
2654 assert!(!buffer.is_dirty());
2655 assert!(events.borrow().is_empty());
2656
2657 buffer.edit([(1..2, "")], None, cx);
2658 });
2659
2660 // after the first edit, the buffer is dirty, and emits a dirtied event.
2661 buffer1.update(cx, |buffer, cx| {
2662 assert!(buffer.text() == "ac");
2663 assert!(buffer.is_dirty());
2664 assert_eq!(
2665 *events.borrow(),
2666 &[language::Event::Edited, language::Event::DirtyChanged]
2667 );
2668 events.borrow_mut().clear();
2669 buffer.did_save(
2670 buffer.version(),
2671 buffer.as_rope().fingerprint(),
2672 buffer.file().unwrap().mtime(),
2673 cx,
2674 );
2675 });
2676
2677 // after saving, the buffer is not dirty, and emits a saved event.
2678 buffer1.update(cx, |buffer, cx| {
2679 assert!(!buffer.is_dirty());
2680 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2681 events.borrow_mut().clear();
2682
2683 buffer.edit([(1..1, "B")], None, cx);
2684 buffer.edit([(2..2, "D")], None, cx);
2685 });
2686
2687 // after editing again, the buffer is dirty, and emits another dirty event.
2688 buffer1.update(cx, |buffer, cx| {
2689 assert!(buffer.text() == "aBDc");
2690 assert!(buffer.is_dirty());
2691 assert_eq!(
2692 *events.borrow(),
2693 &[
2694 language::Event::Edited,
2695 language::Event::DirtyChanged,
2696 language::Event::Edited,
2697 ],
2698 );
2699 events.borrow_mut().clear();
2700
2701 // After restoring the buffer to its previously-saved state,
2702 // the buffer is not considered dirty anymore.
2703 buffer.edit([(1..3, "")], None, cx);
2704 assert!(buffer.text() == "ac");
2705 assert!(!buffer.is_dirty());
2706 });
2707
2708 assert_eq!(
2709 *events.borrow(),
2710 &[language::Event::Edited, language::Event::DirtyChanged]
2711 );
2712
2713 // When a file is deleted, the buffer is considered dirty.
2714 let events = Rc::new(RefCell::new(Vec::new()));
2715 let buffer2 = project
2716 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2717 .await
2718 .unwrap();
2719 buffer2.update(cx, |_, cx| {
2720 cx.subscribe(&buffer2, {
2721 let events = events.clone();
2722 move |_, _, event, _| events.borrow_mut().push(event.clone())
2723 })
2724 .detach();
2725 });
2726
2727 fs.remove_file("/dir/file2".as_ref(), Default::default())
2728 .await
2729 .unwrap();
2730 cx.foreground().run_until_parked();
2731 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2732 assert_eq!(
2733 *events.borrow(),
2734 &[
2735 language::Event::DirtyChanged,
2736 language::Event::FileHandleChanged
2737 ]
2738 );
2739
2740 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2741 let events = Rc::new(RefCell::new(Vec::new()));
2742 let buffer3 = project
2743 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2744 .await
2745 .unwrap();
2746 buffer3.update(cx, |_, cx| {
2747 cx.subscribe(&buffer3, {
2748 let events = events.clone();
2749 move |_, _, event, _| events.borrow_mut().push(event.clone())
2750 })
2751 .detach();
2752 });
2753
2754 buffer3.update(cx, |buffer, cx| {
2755 buffer.edit([(0..0, "x")], None, cx);
2756 });
2757 events.borrow_mut().clear();
2758 fs.remove_file("/dir/file3".as_ref(), Default::default())
2759 .await
2760 .unwrap();
2761 cx.foreground().run_until_parked();
2762 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2763 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2764}
2765
2766#[gpui::test]
2767async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2768 let initial_contents = "aaa\nbbbbb\nc\n";
2769 let fs = FakeFs::new(cx.background());
2770 fs.insert_tree(
2771 "/dir",
2772 json!({
2773 "the-file": initial_contents,
2774 }),
2775 )
2776 .await;
2777 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2778 let buffer = project
2779 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2780 .await
2781 .unwrap();
2782
2783 let anchors = (0..3)
2784 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2785 .collect::<Vec<_>>();
2786
2787 // Change the file on disk, adding two new lines of text, and removing
2788 // one line.
2789 buffer.read_with(cx, |buffer, _| {
2790 assert!(!buffer.is_dirty());
2791 assert!(!buffer.has_conflict());
2792 });
2793 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2794 fs.save(
2795 "/dir/the-file".as_ref(),
2796 &new_contents.into(),
2797 LineEnding::Unix,
2798 )
2799 .await
2800 .unwrap();
2801
2802 // Because the buffer was not modified, it is reloaded from disk. Its
2803 // contents are edited according to the diff between the old and new
2804 // file contents.
2805 cx.foreground().run_until_parked();
2806 buffer.update(cx, |buffer, _| {
2807 assert_eq!(buffer.text(), new_contents);
2808 assert!(!buffer.is_dirty());
2809 assert!(!buffer.has_conflict());
2810
2811 let anchor_positions = anchors
2812 .iter()
2813 .map(|anchor| anchor.to_point(&*buffer))
2814 .collect::<Vec<_>>();
2815 assert_eq!(
2816 anchor_positions,
2817 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2818 );
2819 });
2820
2821 // Modify the buffer
2822 buffer.update(cx, |buffer, cx| {
2823 buffer.edit([(0..0, " ")], None, cx);
2824 assert!(buffer.is_dirty());
2825 assert!(!buffer.has_conflict());
2826 });
2827
2828 // Change the file on disk again, adding blank lines to the beginning.
2829 fs.save(
2830 "/dir/the-file".as_ref(),
2831 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2832 LineEnding::Unix,
2833 )
2834 .await
2835 .unwrap();
2836
2837 // Because the buffer is modified, it doesn't reload from disk, but is
2838 // marked as having a conflict.
2839 cx.foreground().run_until_parked();
2840 buffer.read_with(cx, |buffer, _| {
2841 assert!(buffer.has_conflict());
2842 });
2843}
2844
2845#[gpui::test]
2846async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2847 let fs = FakeFs::new(cx.background());
2848 fs.insert_tree(
2849 "/dir",
2850 json!({
2851 "file1": "a\nb\nc\n",
2852 "file2": "one\r\ntwo\r\nthree\r\n",
2853 }),
2854 )
2855 .await;
2856
2857 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2858 let buffer1 = project
2859 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2860 .await
2861 .unwrap();
2862 let buffer2 = project
2863 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2864 .await
2865 .unwrap();
2866
2867 buffer1.read_with(cx, |buffer, _| {
2868 assert_eq!(buffer.text(), "a\nb\nc\n");
2869 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2870 });
2871 buffer2.read_with(cx, |buffer, _| {
2872 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2873 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2874 });
2875
2876 // Change a file's line endings on disk from unix to windows. The buffer's
2877 // state updates correctly.
2878 fs.save(
2879 "/dir/file1".as_ref(),
2880 &"aaa\nb\nc\n".into(),
2881 LineEnding::Windows,
2882 )
2883 .await
2884 .unwrap();
2885 cx.foreground().run_until_parked();
2886 buffer1.read_with(cx, |buffer, _| {
2887 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2888 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2889 });
2890
2891 // Save a file with windows line endings. The file is written correctly.
2892 buffer2.update(cx, |buffer, cx| {
2893 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2894 });
2895 project
2896 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
2897 .await
2898 .unwrap();
2899 assert_eq!(
2900 fs.load("/dir/file2".as_ref()).await.unwrap(),
2901 "one\r\ntwo\r\nthree\r\nfour\r\n",
2902 );
2903}
2904
2905#[gpui::test]
2906async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2907 cx.foreground().forbid_parking();
2908
2909 let fs = FakeFs::new(cx.background());
2910 fs.insert_tree(
2911 "/the-dir",
2912 json!({
2913 "a.rs": "
2914 fn foo(mut v: Vec<usize>) {
2915 for x in &v {
2916 v.push(1);
2917 }
2918 }
2919 "
2920 .unindent(),
2921 }),
2922 )
2923 .await;
2924
2925 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2926 let buffer = project
2927 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2928 .await
2929 .unwrap();
2930
2931 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2932 let message = lsp::PublishDiagnosticsParams {
2933 uri: buffer_uri.clone(),
2934 diagnostics: vec![
2935 lsp::Diagnostic {
2936 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2937 severity: Some(DiagnosticSeverity::WARNING),
2938 message: "error 1".to_string(),
2939 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2940 location: lsp::Location {
2941 uri: buffer_uri.clone(),
2942 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2943 },
2944 message: "error 1 hint 1".to_string(),
2945 }]),
2946 ..Default::default()
2947 },
2948 lsp::Diagnostic {
2949 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2950 severity: Some(DiagnosticSeverity::HINT),
2951 message: "error 1 hint 1".to_string(),
2952 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2953 location: lsp::Location {
2954 uri: buffer_uri.clone(),
2955 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2956 },
2957 message: "original diagnostic".to_string(),
2958 }]),
2959 ..Default::default()
2960 },
2961 lsp::Diagnostic {
2962 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2963 severity: Some(DiagnosticSeverity::ERROR),
2964 message: "error 2".to_string(),
2965 related_information: Some(vec![
2966 lsp::DiagnosticRelatedInformation {
2967 location: lsp::Location {
2968 uri: buffer_uri.clone(),
2969 range: lsp::Range::new(
2970 lsp::Position::new(1, 13),
2971 lsp::Position::new(1, 15),
2972 ),
2973 },
2974 message: "error 2 hint 1".to_string(),
2975 },
2976 lsp::DiagnosticRelatedInformation {
2977 location: lsp::Location {
2978 uri: buffer_uri.clone(),
2979 range: lsp::Range::new(
2980 lsp::Position::new(1, 13),
2981 lsp::Position::new(1, 15),
2982 ),
2983 },
2984 message: "error 2 hint 2".to_string(),
2985 },
2986 ]),
2987 ..Default::default()
2988 },
2989 lsp::Diagnostic {
2990 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2991 severity: Some(DiagnosticSeverity::HINT),
2992 message: "error 2 hint 1".to_string(),
2993 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2994 location: lsp::Location {
2995 uri: buffer_uri.clone(),
2996 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2997 },
2998 message: "original diagnostic".to_string(),
2999 }]),
3000 ..Default::default()
3001 },
3002 lsp::Diagnostic {
3003 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3004 severity: Some(DiagnosticSeverity::HINT),
3005 message: "error 2 hint 2".to_string(),
3006 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3007 location: lsp::Location {
3008 uri: buffer_uri,
3009 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3010 },
3011 message: "original diagnostic".to_string(),
3012 }]),
3013 ..Default::default()
3014 },
3015 ],
3016 version: None,
3017 };
3018
3019 project
3020 .update(cx, |p, cx| {
3021 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3022 })
3023 .unwrap();
3024 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3025
3026 assert_eq!(
3027 buffer
3028 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3029 .collect::<Vec<_>>(),
3030 &[
3031 DiagnosticEntry {
3032 range: Point::new(1, 8)..Point::new(1, 9),
3033 diagnostic: Diagnostic {
3034 severity: DiagnosticSeverity::WARNING,
3035 message: "error 1".to_string(),
3036 group_id: 1,
3037 is_primary: true,
3038 ..Default::default()
3039 }
3040 },
3041 DiagnosticEntry {
3042 range: Point::new(1, 8)..Point::new(1, 9),
3043 diagnostic: Diagnostic {
3044 severity: DiagnosticSeverity::HINT,
3045 message: "error 1 hint 1".to_string(),
3046 group_id: 1,
3047 is_primary: false,
3048 ..Default::default()
3049 }
3050 },
3051 DiagnosticEntry {
3052 range: Point::new(1, 13)..Point::new(1, 15),
3053 diagnostic: Diagnostic {
3054 severity: DiagnosticSeverity::HINT,
3055 message: "error 2 hint 1".to_string(),
3056 group_id: 0,
3057 is_primary: false,
3058 ..Default::default()
3059 }
3060 },
3061 DiagnosticEntry {
3062 range: Point::new(1, 13)..Point::new(1, 15),
3063 diagnostic: Diagnostic {
3064 severity: DiagnosticSeverity::HINT,
3065 message: "error 2 hint 2".to_string(),
3066 group_id: 0,
3067 is_primary: false,
3068 ..Default::default()
3069 }
3070 },
3071 DiagnosticEntry {
3072 range: Point::new(2, 8)..Point::new(2, 17),
3073 diagnostic: Diagnostic {
3074 severity: DiagnosticSeverity::ERROR,
3075 message: "error 2".to_string(),
3076 group_id: 0,
3077 is_primary: true,
3078 ..Default::default()
3079 }
3080 }
3081 ]
3082 );
3083
3084 assert_eq!(
3085 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3086 &[
3087 DiagnosticEntry {
3088 range: Point::new(1, 13)..Point::new(1, 15),
3089 diagnostic: Diagnostic {
3090 severity: DiagnosticSeverity::HINT,
3091 message: "error 2 hint 1".to_string(),
3092 group_id: 0,
3093 is_primary: false,
3094 ..Default::default()
3095 }
3096 },
3097 DiagnosticEntry {
3098 range: Point::new(1, 13)..Point::new(1, 15),
3099 diagnostic: Diagnostic {
3100 severity: DiagnosticSeverity::HINT,
3101 message: "error 2 hint 2".to_string(),
3102 group_id: 0,
3103 is_primary: false,
3104 ..Default::default()
3105 }
3106 },
3107 DiagnosticEntry {
3108 range: Point::new(2, 8)..Point::new(2, 17),
3109 diagnostic: Diagnostic {
3110 severity: DiagnosticSeverity::ERROR,
3111 message: "error 2".to_string(),
3112 group_id: 0,
3113 is_primary: true,
3114 ..Default::default()
3115 }
3116 }
3117 ]
3118 );
3119
3120 assert_eq!(
3121 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3122 &[
3123 DiagnosticEntry {
3124 range: Point::new(1, 8)..Point::new(1, 9),
3125 diagnostic: Diagnostic {
3126 severity: DiagnosticSeverity::WARNING,
3127 message: "error 1".to_string(),
3128 group_id: 1,
3129 is_primary: true,
3130 ..Default::default()
3131 }
3132 },
3133 DiagnosticEntry {
3134 range: Point::new(1, 8)..Point::new(1, 9),
3135 diagnostic: Diagnostic {
3136 severity: DiagnosticSeverity::HINT,
3137 message: "error 1 hint 1".to_string(),
3138 group_id: 1,
3139 is_primary: false,
3140 ..Default::default()
3141 }
3142 },
3143 ]
3144 );
3145}
3146
3147#[gpui::test]
3148async fn test_rename(cx: &mut gpui::TestAppContext) {
3149 cx.foreground().forbid_parking();
3150
3151 let mut language = Language::new(
3152 LanguageConfig {
3153 name: "Rust".into(),
3154 path_suffixes: vec!["rs".to_string()],
3155 ..Default::default()
3156 },
3157 Some(tree_sitter_rust::language()),
3158 );
3159 let mut fake_servers = language
3160 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3161 capabilities: lsp::ServerCapabilities {
3162 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3163 prepare_provider: Some(true),
3164 work_done_progress_options: Default::default(),
3165 })),
3166 ..Default::default()
3167 },
3168 ..Default::default()
3169 }))
3170 .await;
3171
3172 let fs = FakeFs::new(cx.background());
3173 fs.insert_tree(
3174 "/dir",
3175 json!({
3176 "one.rs": "const ONE: usize = 1;",
3177 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3178 }),
3179 )
3180 .await;
3181
3182 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3183 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3184 let buffer = project
3185 .update(cx, |project, cx| {
3186 project.open_local_buffer("/dir/one.rs", cx)
3187 })
3188 .await
3189 .unwrap();
3190
3191 let fake_server = fake_servers.next().await.unwrap();
3192
3193 let response = project.update(cx, |project, cx| {
3194 project.prepare_rename(buffer.clone(), 7, cx)
3195 });
3196 fake_server
3197 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3198 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3199 assert_eq!(params.position, lsp::Position::new(0, 7));
3200 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3201 lsp::Position::new(0, 6),
3202 lsp::Position::new(0, 9),
3203 ))))
3204 })
3205 .next()
3206 .await
3207 .unwrap();
3208 let range = response.await.unwrap().unwrap();
3209 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3210 assert_eq!(range, 6..9);
3211
3212 let response = project.update(cx, |project, cx| {
3213 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3214 });
3215 fake_server
3216 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3217 assert_eq!(
3218 params.text_document_position.text_document.uri.as_str(),
3219 "file:///dir/one.rs"
3220 );
3221 assert_eq!(
3222 params.text_document_position.position,
3223 lsp::Position::new(0, 7)
3224 );
3225 assert_eq!(params.new_name, "THREE");
3226 Ok(Some(lsp::WorkspaceEdit {
3227 changes: Some(
3228 [
3229 (
3230 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3231 vec![lsp::TextEdit::new(
3232 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3233 "THREE".to_string(),
3234 )],
3235 ),
3236 (
3237 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3238 vec![
3239 lsp::TextEdit::new(
3240 lsp::Range::new(
3241 lsp::Position::new(0, 24),
3242 lsp::Position::new(0, 27),
3243 ),
3244 "THREE".to_string(),
3245 ),
3246 lsp::TextEdit::new(
3247 lsp::Range::new(
3248 lsp::Position::new(0, 35),
3249 lsp::Position::new(0, 38),
3250 ),
3251 "THREE".to_string(),
3252 ),
3253 ],
3254 ),
3255 ]
3256 .into_iter()
3257 .collect(),
3258 ),
3259 ..Default::default()
3260 }))
3261 })
3262 .next()
3263 .await
3264 .unwrap();
3265 let mut transaction = response.await.unwrap().0;
3266 assert_eq!(transaction.len(), 2);
3267 assert_eq!(
3268 transaction
3269 .remove_entry(&buffer)
3270 .unwrap()
3271 .0
3272 .read_with(cx, |buffer, _| buffer.text()),
3273 "const THREE: usize = 1;"
3274 );
3275 assert_eq!(
3276 transaction
3277 .into_keys()
3278 .next()
3279 .unwrap()
3280 .read_with(cx, |buffer, _| buffer.text()),
3281 "const TWO: usize = one::THREE + one::THREE;"
3282 );
3283}
3284
3285#[gpui::test]
3286async fn test_search(cx: &mut gpui::TestAppContext) {
3287 let fs = FakeFs::new(cx.background());
3288 fs.insert_tree(
3289 "/dir",
3290 json!({
3291 "one.rs": "const ONE: usize = 1;",
3292 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3293 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3294 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3295 }),
3296 )
3297 .await;
3298 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3299 assert_eq!(
3300 search(&project, SearchQuery::text("TWO", false, true), cx)
3301 .await
3302 .unwrap(),
3303 HashMap::from_iter([
3304 ("two.rs".to_string(), vec![6..9]),
3305 ("three.rs".to_string(), vec![37..40])
3306 ])
3307 );
3308
3309 let buffer_4 = project
3310 .update(cx, |project, cx| {
3311 project.open_local_buffer("/dir/four.rs", cx)
3312 })
3313 .await
3314 .unwrap();
3315 buffer_4.update(cx, |buffer, cx| {
3316 let text = "two::TWO";
3317 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3318 });
3319
3320 assert_eq!(
3321 search(&project, SearchQuery::text("TWO", false, true), cx)
3322 .await
3323 .unwrap(),
3324 HashMap::from_iter([
3325 ("two.rs".to_string(), vec![6..9]),
3326 ("three.rs".to_string(), vec![37..40]),
3327 ("four.rs".to_string(), vec![25..28, 36..39])
3328 ])
3329 );
3330
3331 async fn search(
3332 project: &ModelHandle<Project>,
3333 query: SearchQuery,
3334 cx: &mut gpui::TestAppContext,
3335 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3336 let results = project
3337 .update(cx, |project, cx| project.search(query, cx))
3338 .await?;
3339
3340 Ok(results
3341 .into_iter()
3342 .map(|(buffer, ranges)| {
3343 buffer.read_with(cx, |buffer, _| {
3344 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3345 let ranges = ranges
3346 .into_iter()
3347 .map(|range| range.to_offset(buffer))
3348 .collect::<Vec<_>>();
3349 (path, ranges)
3350 })
3351 })
3352 .collect())
3353 }
3354}