1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[gpui::test]
49async fn test_symlinks(cx: &mut gpui::TestAppContext) {
50 init_test(cx);
51 cx.executor().allow_parking();
52
53 let dir = temp_tree(json!({
54 "root": {
55 "apple": "",
56 "banana": {
57 "carrot": {
58 "date": "",
59 "endive": "",
60 }
61 },
62 "fennel": {
63 "grape": "",
64 }
65 }
66 }));
67
68 let root_link_path = dir.path().join("root_link");
69 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
70 os::unix::fs::symlink(
71 &dir.path().join("root/fennel"),
72 &dir.path().join("root/finnochio"),
73 )
74 .unwrap();
75
76 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
77
78 project.update(cx, |project, cx| {
79 let tree = project.worktrees().next().unwrap().read(cx);
80 assert_eq!(tree.file_count(), 5);
81 assert_eq!(
82 tree.inode_for_path("fennel/grape"),
83 tree.inode_for_path("finnochio/grape")
84 );
85 });
86}
87
88#[gpui::test]
89async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91
92 let fs = FakeFs::new(cx.executor());
93 fs.insert_tree(
94 "/the-root",
95 json!({
96 ".zed": {
97 "settings.json": r#"{ "tab_size": 8 }"#
98 },
99 "a": {
100 "a.rs": "fn a() {\n A\n}"
101 },
102 "b": {
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 2 }"#
105 },
106 "b.rs": "fn b() {\n B\n}"
107 }
108 }),
109 )
110 .await;
111
112 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
113 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
114
115 cx.executor().run_until_parked();
116 cx.update(|cx| {
117 let tree = worktree.read(cx);
118
119 let settings_a = language_settings(
120 None,
121 Some(
122 &(File::for_entry(
123 tree.entry_for_path("a/a.rs").unwrap().clone(),
124 worktree.clone(),
125 ) as _),
126 ),
127 cx,
128 );
129 let settings_b = language_settings(
130 None,
131 Some(
132 &(File::for_entry(
133 tree.entry_for_path("b/b.rs").unwrap().clone(),
134 worktree.clone(),
135 ) as _),
136 ),
137 cx,
138 );
139
140 assert_eq!(settings_a.tab_size.get(), 8);
141 assert_eq!(settings_b.tab_size.get(), 2);
142 });
143}
144
145#[gpui::test]
146async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let mut rust_language = Language::new(
150 LanguageConfig {
151 name: "Rust".into(),
152 path_suffixes: vec!["rs".to_string()],
153 ..Default::default()
154 },
155 Some(tree_sitter_rust::language()),
156 );
157 let mut json_language = Language::new(
158 LanguageConfig {
159 name: "JSON".into(),
160 path_suffixes: vec!["json".to_string()],
161 ..Default::default()
162 },
163 None,
164 );
165 let mut fake_rust_servers = rust_language
166 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
167 name: "the-rust-language-server",
168 capabilities: lsp::ServerCapabilities {
169 completion_provider: Some(lsp::CompletionOptions {
170 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
171 ..Default::default()
172 }),
173 ..Default::default()
174 },
175 ..Default::default()
176 }))
177 .await;
178 let mut fake_json_servers = json_language
179 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
180 name: "the-json-language-server",
181 capabilities: lsp::ServerCapabilities {
182 completion_provider: Some(lsp::CompletionOptions {
183 trigger_characters: Some(vec![":".to_string()]),
184 ..Default::default()
185 }),
186 ..Default::default()
187 },
188 ..Default::default()
189 }))
190 .await;
191
192 let fs = FakeFs::new(cx.executor());
193 fs.insert_tree(
194 "/the-root",
195 json!({
196 "test.rs": "const A: i32 = 1;",
197 "test2.rs": "",
198 "Cargo.toml": "a = 1",
199 "package.json": "{\"a\": 1}",
200 }),
201 )
202 .await;
203
204 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
205
206 // Open a buffer without an associated language server.
207 let toml_buffer = project
208 .update(cx, |project, cx| {
209 project.open_local_buffer("/the-root/Cargo.toml", cx)
210 })
211 .await
212 .unwrap();
213
214 // Open a buffer with an associated language server before the language for it has been loaded.
215 let rust_buffer = project
216 .update(cx, |project, cx| {
217 project.open_local_buffer("/the-root/test.rs", cx)
218 })
219 .await
220 .unwrap();
221 rust_buffer.update(cx, |buffer, _| {
222 assert_eq!(buffer.language().map(|l| l.name()), None);
223 });
224
225 // Now we add the languages to the project, and ensure they get assigned to all
226 // the relevant open buffers.
227 project.update(cx, |project, _| {
228 project.languages.add(Arc::new(json_language));
229 project.languages.add(Arc::new(rust_language));
230 });
231 cx.executor().run_until_parked();
232 rust_buffer.update(cx, |buffer, _| {
233 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
234 });
235
236 // A server is started up, and it is notified about Rust files.
237 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
238 assert_eq!(
239 fake_rust_server
240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
241 .await
242 .text_document,
243 lsp::TextDocumentItem {
244 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
245 version: 0,
246 text: "const A: i32 = 1;".to_string(),
247 language_id: Default::default()
248 }
249 );
250
251 // The buffer is configured based on the language server's capabilities.
252 rust_buffer.update(cx, |buffer, _| {
253 assert_eq!(
254 buffer.completion_triggers(),
255 &[".".to_string(), "::".to_string()]
256 );
257 });
258 toml_buffer.update(cx, |buffer, _| {
259 assert!(buffer.completion_triggers().is_empty());
260 });
261
262 // Edit a buffer. The changes are reported to the language server.
263 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidChangeTextDocument>()
267 .await
268 .text_document,
269 lsp::VersionedTextDocumentIdentifier::new(
270 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
271 1
272 )
273 );
274
275 // Open a third buffer with a different associated language server.
276 let json_buffer = project
277 .update(cx, |project, cx| {
278 project.open_local_buffer("/the-root/package.json", cx)
279 })
280 .await
281 .unwrap();
282
283 // A json language server is started up and is only notified about the json buffer.
284 let mut fake_json_server = fake_json_servers.next().await.unwrap();
285 assert_eq!(
286 fake_json_server
287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
288 .await
289 .text_document,
290 lsp::TextDocumentItem {
291 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
292 version: 0,
293 text: "{\"a\": 1}".to_string(),
294 language_id: Default::default()
295 }
296 );
297
298 // This buffer is configured based on the second language server's
299 // capabilities.
300 json_buffer.update(cx, |buffer, _| {
301 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
302 });
303
304 // When opening another buffer whose language server is already running,
305 // it is also configured based on the existing language server's capabilities.
306 let rust_buffer2 = project
307 .update(cx, |project, cx| {
308 project.open_local_buffer("/the-root/test2.rs", cx)
309 })
310 .await
311 .unwrap();
312 rust_buffer2.update(cx, |buffer, _| {
313 assert_eq!(
314 buffer.completion_triggers(),
315 &[".".to_string(), "::".to_string()]
316 );
317 });
318
319 // Changes are reported only to servers matching the buffer's language.
320 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
321 rust_buffer2.update(cx, |buffer, cx| {
322 buffer.edit([(0..0, "let x = 1;")], None, cx)
323 });
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidChangeTextDocument>()
327 .await
328 .text_document,
329 lsp::VersionedTextDocumentIdentifier::new(
330 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
331 1
332 )
333 );
334
335 // Save notifications are reported to all servers.
336 project
337 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
338 .await
339 .unwrap();
340 assert_eq!(
341 fake_rust_server
342 .receive_notification::<lsp::notification::DidSaveTextDocument>()
343 .await
344 .text_document,
345 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
346 );
347 assert_eq!(
348 fake_json_server
349 .receive_notification::<lsp::notification::DidSaveTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
353 );
354
355 // Renames are reported only to servers matching the buffer's language.
356 fs.rename(
357 Path::new("/the-root/test2.rs"),
358 Path::new("/the-root/test3.rs"),
359 Default::default(),
360 )
361 .await
362 .unwrap();
363 assert_eq!(
364 fake_rust_server
365 .receive_notification::<lsp::notification::DidCloseTextDocument>()
366 .await
367 .text_document,
368 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
369 );
370 assert_eq!(
371 fake_rust_server
372 .receive_notification::<lsp::notification::DidOpenTextDocument>()
373 .await
374 .text_document,
375 lsp::TextDocumentItem {
376 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
377 version: 0,
378 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
379 language_id: Default::default()
380 },
381 );
382
383 rust_buffer2.update(cx, |buffer, cx| {
384 buffer.update_diagnostics(
385 LanguageServerId(0),
386 DiagnosticSet::from_sorted_entries(
387 vec![DiagnosticEntry {
388 diagnostic: Default::default(),
389 range: Anchor::MIN..Anchor::MAX,
390 }],
391 &buffer.snapshot(),
392 ),
393 cx,
394 );
395 assert_eq!(
396 buffer
397 .snapshot()
398 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
399 .count(),
400 1
401 );
402 });
403
404 // When the rename changes the extension of the file, the buffer gets closed on the old
405 // language server and gets opened on the new one.
406 fs.rename(
407 Path::new("/the-root/test3.rs"),
408 Path::new("/the-root/test3.json"),
409 Default::default(),
410 )
411 .await
412 .unwrap();
413 assert_eq!(
414 fake_rust_server
415 .receive_notification::<lsp::notification::DidCloseTextDocument>()
416 .await
417 .text_document,
418 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
419 );
420 assert_eq!(
421 fake_json_server
422 .receive_notification::<lsp::notification::DidOpenTextDocument>()
423 .await
424 .text_document,
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
427 version: 0,
428 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 );
432
433 // We clear the diagnostics, since the language has changed.
434 rust_buffer2.update(cx, |buffer, _| {
435 assert_eq!(
436 buffer
437 .snapshot()
438 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
439 .count(),
440 0
441 );
442 });
443
444 // The renamed file's version resets after changing language server.
445 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
446 assert_eq!(
447 fake_json_server
448 .receive_notification::<lsp::notification::DidChangeTextDocument>()
449 .await
450 .text_document,
451 lsp::VersionedTextDocumentIdentifier::new(
452 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
453 1
454 )
455 );
456
457 // Restart language servers
458 project.update(cx, |project, cx| {
459 project.restart_language_servers_for_buffers(
460 vec![rust_buffer.clone(), json_buffer.clone()],
461 cx,
462 );
463 });
464
465 let mut rust_shutdown_requests = fake_rust_server
466 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
467 let mut json_shutdown_requests = fake_json_server
468 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
469 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
470
471 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
472 let mut fake_json_server = fake_json_servers.next().await.unwrap();
473
474 // Ensure rust document is reopened in new rust language server
475 assert_eq!(
476 fake_rust_server
477 .receive_notification::<lsp::notification::DidOpenTextDocument>()
478 .await
479 .text_document,
480 lsp::TextDocumentItem {
481 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
482 version: 0,
483 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
484 language_id: Default::default()
485 }
486 );
487
488 // Ensure json documents are reopened in new json language server
489 assert_set_eq!(
490 [
491 fake_json_server
492 .receive_notification::<lsp::notification::DidOpenTextDocument>()
493 .await
494 .text_document,
495 fake_json_server
496 .receive_notification::<lsp::notification::DidOpenTextDocument>()
497 .await
498 .text_document,
499 ],
500 [
501 lsp::TextDocumentItem {
502 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
503 version: 0,
504 text: json_buffer.update(cx, |buffer, _| buffer.text()),
505 language_id: Default::default()
506 },
507 lsp::TextDocumentItem {
508 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
509 version: 0,
510 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
511 language_id: Default::default()
512 }
513 ]
514 );
515
516 // Close notifications are reported only to servers matching the buffer's language.
517 cx.update(|_| drop(json_buffer));
518 let close_message = lsp::DidCloseTextDocumentParams {
519 text_document: lsp::TextDocumentIdentifier::new(
520 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
521 ),
522 };
523 assert_eq!(
524 fake_json_server
525 .receive_notification::<lsp::notification::DidCloseTextDocument>()
526 .await,
527 close_message,
528 );
529}
530
531#[gpui::test]
532async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
533 init_test(cx);
534
535 let mut language = Language::new(
536 LanguageConfig {
537 name: "Rust".into(),
538 path_suffixes: vec!["rs".to_string()],
539 ..Default::default()
540 },
541 Some(tree_sitter_rust::language()),
542 );
543 let mut fake_servers = language
544 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
545 name: "the-language-server",
546 ..Default::default()
547 }))
548 .await;
549
550 let fs = FakeFs::new(cx.executor());
551 fs.insert_tree(
552 "/the-root",
553 json!({
554 ".gitignore": "target\n",
555 "src": {
556 "a.rs": "",
557 "b.rs": "",
558 },
559 "target": {
560 "x": {
561 "out": {
562 "x.rs": ""
563 }
564 },
565 "y": {
566 "out": {
567 "y.rs": "",
568 }
569 },
570 "z": {
571 "out": {
572 "z.rs": ""
573 }
574 }
575 }
576 }),
577 )
578 .await;
579
580 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
581 project.update(cx, |project, _| {
582 project.languages.add(Arc::new(language));
583 });
584 cx.executor().run_until_parked();
585
586 // Start the language server by opening a buffer with a compatible file extension.
587 let _buffer = project
588 .update(cx, |project, cx| {
589 project.open_local_buffer("/the-root/src/a.rs", cx)
590 })
591 .await
592 .unwrap();
593
594 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
595 project.update(cx, |project, cx| {
596 let worktree = project.worktrees().next().unwrap();
597 assert_eq!(
598 worktree
599 .read(cx)
600 .snapshot()
601 .entries(true)
602 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
603 .collect::<Vec<_>>(),
604 &[
605 (Path::new(""), false),
606 (Path::new(".gitignore"), false),
607 (Path::new("src"), false),
608 (Path::new("src/a.rs"), false),
609 (Path::new("src/b.rs"), false),
610 (Path::new("target"), true),
611 ]
612 );
613 });
614
615 let prev_read_dir_count = fs.read_dir_call_count();
616
617 // Keep track of the FS events reported to the language server.
618 let fake_server = fake_servers.next().await.unwrap();
619 let file_changes = Arc::new(Mutex::new(Vec::new()));
620 fake_server
621 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
622 registrations: vec![lsp::Registration {
623 id: Default::default(),
624 method: "workspace/didChangeWatchedFiles".to_string(),
625 register_options: serde_json::to_value(
626 lsp::DidChangeWatchedFilesRegistrationOptions {
627 watchers: vec![
628 lsp::FileSystemWatcher {
629 glob_pattern: lsp::GlobPattern::String(
630 "/the-root/Cargo.toml".to_string(),
631 ),
632 kind: None,
633 },
634 lsp::FileSystemWatcher {
635 glob_pattern: lsp::GlobPattern::String(
636 "/the-root/src/*.{rs,c}".to_string(),
637 ),
638 kind: None,
639 },
640 lsp::FileSystemWatcher {
641 glob_pattern: lsp::GlobPattern::String(
642 "/the-root/target/y/**/*.rs".to_string(),
643 ),
644 kind: None,
645 },
646 ],
647 },
648 )
649 .ok(),
650 }],
651 })
652 .await
653 .unwrap();
654 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
655 let file_changes = file_changes.clone();
656 move |params, _| {
657 let mut file_changes = file_changes.lock();
658 file_changes.extend(params.changes);
659 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
660 }
661 });
662
663 cx.executor().run_until_parked();
664 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
665 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
666
667 // Now the language server has asked us to watch an ignored directory path,
668 // so we recursively load it.
669 project.update(cx, |project, cx| {
670 let worktree = project.worktrees().next().unwrap();
671 assert_eq!(
672 worktree
673 .read(cx)
674 .snapshot()
675 .entries(true)
676 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
677 .collect::<Vec<_>>(),
678 &[
679 (Path::new(""), false),
680 (Path::new(".gitignore"), false),
681 (Path::new("src"), false),
682 (Path::new("src/a.rs"), false),
683 (Path::new("src/b.rs"), false),
684 (Path::new("target"), true),
685 (Path::new("target/x"), true),
686 (Path::new("target/y"), true),
687 (Path::new("target/y/out"), true),
688 (Path::new("target/y/out/y.rs"), true),
689 (Path::new("target/z"), true),
690 ]
691 );
692 });
693
694 // Perform some file system mutations, two of which match the watched patterns,
695 // and one of which does not.
696 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
697 .await
698 .unwrap();
699 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
700 .await
701 .unwrap();
702 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
703 .await
704 .unwrap();
705 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
706 .await
707 .unwrap();
708 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
709 .await
710 .unwrap();
711
712 // The language server receives events for the FS mutations that match its watch patterns.
713 cx.executor().run_until_parked();
714 assert_eq!(
715 &*file_changes.lock(),
716 &[
717 lsp::FileEvent {
718 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
719 typ: lsp::FileChangeType::DELETED,
720 },
721 lsp::FileEvent {
722 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
723 typ: lsp::FileChangeType::CREATED,
724 },
725 lsp::FileEvent {
726 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
727 typ: lsp::FileChangeType::CREATED,
728 },
729 ]
730 );
731}
732
733#[gpui::test]
734async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
735 init_test(cx);
736
737 let fs = FakeFs::new(cx.executor());
738 fs.insert_tree(
739 "/dir",
740 json!({
741 "a.rs": "let a = 1;",
742 "b.rs": "let b = 2;"
743 }),
744 )
745 .await;
746
747 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
748
749 let buffer_a = project
750 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
751 .await
752 .unwrap();
753 let buffer_b = project
754 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
755 .await
756 .unwrap();
757
758 project.update(cx, |project, cx| {
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/a.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::ERROR),
768 message: "error 1".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 project
777 .update_diagnostics(
778 LanguageServerId(0),
779 lsp::PublishDiagnosticsParams {
780 uri: Url::from_file_path("/dir/b.rs").unwrap(),
781 version: None,
782 diagnostics: vec![lsp::Diagnostic {
783 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
784 severity: Some(lsp::DiagnosticSeverity::WARNING),
785 message: "error 2".to_string(),
786 ..Default::default()
787 }],
788 },
789 &[],
790 cx,
791 )
792 .unwrap();
793 });
794
795 buffer_a.update(cx, |buffer, _| {
796 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
797 assert_eq!(
798 chunks
799 .iter()
800 .map(|(s, d)| (s.as_str(), *d))
801 .collect::<Vec<_>>(),
802 &[
803 ("let ", None),
804 ("a", Some(DiagnosticSeverity::ERROR)),
805 (" = 1;", None),
806 ]
807 );
808 });
809 buffer_b.update(cx, |buffer, _| {
810 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
811 assert_eq!(
812 chunks
813 .iter()
814 .map(|(s, d)| (s.as_str(), *d))
815 .collect::<Vec<_>>(),
816 &[
817 ("let ", None),
818 ("b", Some(DiagnosticSeverity::WARNING)),
819 (" = 2;", None),
820 ]
821 );
822 });
823}
824
825#[gpui::test]
826async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
827 init_test(cx);
828
829 let fs = FakeFs::new(cx.executor());
830 fs.insert_tree(
831 "/root",
832 json!({
833 "dir": {
834 "a.rs": "let a = 1;",
835 },
836 "other.rs": "let b = c;"
837 }),
838 )
839 .await;
840
841 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
842
843 let (worktree, _) = project
844 .update(cx, |project, cx| {
845 project.find_or_create_local_worktree("/root/other.rs", false, cx)
846 })
847 .await
848 .unwrap();
849 let worktree_id = worktree.update(cx, |tree, _| tree.id());
850
851 project.update(cx, |project, cx| {
852 project
853 .update_diagnostics(
854 LanguageServerId(0),
855 lsp::PublishDiagnosticsParams {
856 uri: Url::from_file_path("/root/other.rs").unwrap(),
857 version: None,
858 diagnostics: vec![lsp::Diagnostic {
859 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
860 severity: Some(lsp::DiagnosticSeverity::ERROR),
861 message: "unknown variable 'c'".to_string(),
862 ..Default::default()
863 }],
864 },
865 &[],
866 cx,
867 )
868 .unwrap();
869 });
870
871 let buffer = project
872 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
873 .await
874 .unwrap();
875 buffer.update(cx, |buffer, _| {
876 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
877 assert_eq!(
878 chunks
879 .iter()
880 .map(|(s, d)| (s.as_str(), *d))
881 .collect::<Vec<_>>(),
882 &[
883 ("let b = ", None),
884 ("c", Some(DiagnosticSeverity::ERROR)),
885 (";", None),
886 ]
887 );
888 });
889
890 project.update(cx, |project, cx| {
891 assert_eq!(project.diagnostic_summaries(cx).next(), None);
892 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
893 });
894}
895
896#[gpui::test]
897async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
898 init_test(cx);
899
900 let progress_token = "the-progress-token";
901 let mut language = Language::new(
902 LanguageConfig {
903 name: "Rust".into(),
904 path_suffixes: vec!["rs".to_string()],
905 ..Default::default()
906 },
907 Some(tree_sitter_rust::language()),
908 );
909 let mut fake_servers = language
910 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
911 disk_based_diagnostics_progress_token: Some(progress_token.into()),
912 disk_based_diagnostics_sources: vec!["disk".into()],
913 ..Default::default()
914 }))
915 .await;
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 "/dir",
920 json!({
921 "a.rs": "fn a() { A }",
922 "b.rs": "const y: i32 = 1",
923 }),
924 )
925 .await;
926
927 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
928 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
929 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
930
931 // Cause worktree to start the fake language server
932 let _buffer = project
933 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
934 .await
935 .unwrap();
936
937 let mut events = cx.events(&project);
938
939 let fake_server = fake_servers.next().await.unwrap();
940 assert_eq!(
941 events.next().await.unwrap(),
942 Event::LanguageServerAdded(LanguageServerId(0)),
943 );
944
945 fake_server
946 .start_progress(format!("{}/0", progress_token))
947 .await;
948 assert_eq!(
949 events.next().await.unwrap(),
950 Event::DiskBasedDiagnosticsStarted {
951 language_server_id: LanguageServerId(0),
952 }
953 );
954
955 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
956 uri: Url::from_file_path("/dir/a.rs").unwrap(),
957 version: None,
958 diagnostics: vec![lsp::Diagnostic {
959 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
960 severity: Some(lsp::DiagnosticSeverity::ERROR),
961 message: "undefined variable 'A'".to_string(),
962 ..Default::default()
963 }],
964 });
965 assert_eq!(
966 events.next().await.unwrap(),
967 Event::DiagnosticsUpdated {
968 language_server_id: LanguageServerId(0),
969 path: (worktree_id, Path::new("a.rs")).into()
970 }
971 );
972
973 fake_server.end_progress(format!("{}/0", progress_token));
974 assert_eq!(
975 events.next().await.unwrap(),
976 Event::DiskBasedDiagnosticsFinished {
977 language_server_id: LanguageServerId(0)
978 }
979 );
980
981 let buffer = project
982 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
983 .await
984 .unwrap();
985
986 buffer.update(cx, |buffer, _| {
987 let snapshot = buffer.snapshot();
988 let diagnostics = snapshot
989 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
990 .collect::<Vec<_>>();
991 assert_eq!(
992 diagnostics,
993 &[DiagnosticEntry {
994 range: Point::new(0, 9)..Point::new(0, 10),
995 diagnostic: Diagnostic {
996 severity: lsp::DiagnosticSeverity::ERROR,
997 message: "undefined variable 'A'".to_string(),
998 group_id: 0,
999 is_primary: true,
1000 ..Default::default()
1001 }
1002 }]
1003 )
1004 });
1005
1006 // Ensure publishing empty diagnostics twice only results in one update event.
1007 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1008 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1009 version: None,
1010 diagnostics: Default::default(),
1011 });
1012 assert_eq!(
1013 events.next().await.unwrap(),
1014 Event::DiagnosticsUpdated {
1015 language_server_id: LanguageServerId(0),
1016 path: (worktree_id, Path::new("a.rs")).into()
1017 }
1018 );
1019
1020 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1021 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1022 version: None,
1023 diagnostics: Default::default(),
1024 });
1025 cx.executor().run_until_parked();
1026 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1027}
1028
1029#[gpui::test]
1030async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1031 init_test(cx);
1032
1033 let progress_token = "the-progress-token";
1034 let mut language = Language::new(
1035 LanguageConfig {
1036 path_suffixes: vec!["rs".to_string()],
1037 ..Default::default()
1038 },
1039 None,
1040 );
1041 let mut fake_servers = language
1042 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1043 disk_based_diagnostics_sources: vec!["disk".into()],
1044 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1045 ..Default::default()
1046 }))
1047 .await;
1048
1049 let fs = FakeFs::new(cx.executor());
1050 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1051
1052 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1053 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1054
1055 let buffer = project
1056 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1057 .await
1058 .unwrap();
1059
1060 // Simulate diagnostics starting to update.
1061 let fake_server = fake_servers.next().await.unwrap();
1062 fake_server.start_progress(progress_token).await;
1063
1064 // Restart the server before the diagnostics finish updating.
1065 project.update(cx, |project, cx| {
1066 project.restart_language_servers_for_buffers([buffer], cx);
1067 });
1068 let mut events = cx.events(&project);
1069
1070 // Simulate the newly started server sending more diagnostics.
1071 let fake_server = fake_servers.next().await.unwrap();
1072 assert_eq!(
1073 events.next().await.unwrap(),
1074 Event::LanguageServerAdded(LanguageServerId(1))
1075 );
1076 fake_server.start_progress(progress_token).await;
1077 assert_eq!(
1078 events.next().await.unwrap(),
1079 Event::DiskBasedDiagnosticsStarted {
1080 language_server_id: LanguageServerId(1)
1081 }
1082 );
1083 project.update(cx, |project, _| {
1084 assert_eq!(
1085 project
1086 .language_servers_running_disk_based_diagnostics()
1087 .collect::<Vec<_>>(),
1088 [LanguageServerId(1)]
1089 );
1090 });
1091
1092 // All diagnostics are considered done, despite the old server's diagnostic
1093 // task never completing.
1094 fake_server.end_progress(progress_token);
1095 assert_eq!(
1096 events.next().await.unwrap(),
1097 Event::DiskBasedDiagnosticsFinished {
1098 language_server_id: LanguageServerId(1)
1099 }
1100 );
1101 project.update(cx, |project, _| {
1102 assert_eq!(
1103 project
1104 .language_servers_running_disk_based_diagnostics()
1105 .collect::<Vec<_>>(),
1106 [LanguageServerId(0); 0]
1107 );
1108 });
1109}
1110
1111#[gpui::test]
1112async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1113 init_test(cx);
1114
1115 let mut language = Language::new(
1116 LanguageConfig {
1117 path_suffixes: vec!["rs".to_string()],
1118 ..Default::default()
1119 },
1120 None,
1121 );
1122 let mut fake_servers = language
1123 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1124 ..Default::default()
1125 }))
1126 .await;
1127
1128 let fs = FakeFs::new(cx.executor());
1129 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1130
1131 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1132 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1133
1134 let buffer = project
1135 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1136 .await
1137 .unwrap();
1138
1139 // Publish diagnostics
1140 let fake_server = fake_servers.next().await.unwrap();
1141 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1142 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1143 version: None,
1144 diagnostics: vec![lsp::Diagnostic {
1145 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1146 severity: Some(lsp::DiagnosticSeverity::ERROR),
1147 message: "the message".to_string(),
1148 ..Default::default()
1149 }],
1150 });
1151
1152 cx.executor().run_until_parked();
1153 buffer.update(cx, |buffer, _| {
1154 assert_eq!(
1155 buffer
1156 .snapshot()
1157 .diagnostics_in_range::<_, usize>(0..1, false)
1158 .map(|entry| entry.diagnostic.message.clone())
1159 .collect::<Vec<_>>(),
1160 ["the message".to_string()]
1161 );
1162 });
1163 project.update(cx, |project, cx| {
1164 assert_eq!(
1165 project.diagnostic_summary(cx),
1166 DiagnosticSummary {
1167 error_count: 1,
1168 warning_count: 0,
1169 }
1170 );
1171 });
1172
1173 project.update(cx, |project, cx| {
1174 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1175 });
1176
1177 // The diagnostics are cleared.
1178 cx.executor().run_until_parked();
1179 buffer.update(cx, |buffer, _| {
1180 assert_eq!(
1181 buffer
1182 .snapshot()
1183 .diagnostics_in_range::<_, usize>(0..1, false)
1184 .map(|entry| entry.diagnostic.message.clone())
1185 .collect::<Vec<_>>(),
1186 Vec::<String>::new(),
1187 );
1188 });
1189 project.update(cx, |project, cx| {
1190 assert_eq!(
1191 project.diagnostic_summary(cx),
1192 DiagnosticSummary {
1193 error_count: 0,
1194 warning_count: 0,
1195 }
1196 );
1197 });
1198}
1199
1200#[gpui::test]
1201async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1202 init_test(cx);
1203
1204 let mut language = Language::new(
1205 LanguageConfig {
1206 path_suffixes: vec!["rs".to_string()],
1207 ..Default::default()
1208 },
1209 None,
1210 );
1211 let mut fake_servers = language
1212 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1213 name: "the-lsp",
1214 ..Default::default()
1215 }))
1216 .await;
1217
1218 let fs = FakeFs::new(cx.executor());
1219 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1220
1221 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1222 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1223
1224 let buffer = project
1225 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1226 .await
1227 .unwrap();
1228
1229 // Before restarting the server, report diagnostics with an unknown buffer version.
1230 let fake_server = fake_servers.next().await.unwrap();
1231 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1232 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1233 version: Some(10000),
1234 diagnostics: Vec::new(),
1235 });
1236 cx.executor().run_until_parked();
1237
1238 project.update(cx, |project, cx| {
1239 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1240 });
1241 let mut fake_server = fake_servers.next().await.unwrap();
1242 let notification = fake_server
1243 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1244 .await
1245 .text_document;
1246 assert_eq!(notification.version, 0);
1247}
1248
1249#[gpui::test]
1250async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1251 init_test(cx);
1252
1253 let mut rust = Language::new(
1254 LanguageConfig {
1255 name: Arc::from("Rust"),
1256 path_suffixes: vec!["rs".to_string()],
1257 ..Default::default()
1258 },
1259 None,
1260 );
1261 let mut fake_rust_servers = rust
1262 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1263 name: "rust-lsp",
1264 ..Default::default()
1265 }))
1266 .await;
1267 let mut js = Language::new(
1268 LanguageConfig {
1269 name: Arc::from("JavaScript"),
1270 path_suffixes: vec!["js".to_string()],
1271 ..Default::default()
1272 },
1273 None,
1274 );
1275 let mut fake_js_servers = js
1276 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1277 name: "js-lsp",
1278 ..Default::default()
1279 }))
1280 .await;
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1284 .await;
1285
1286 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1287 project.update(cx, |project, _| {
1288 project.languages.add(Arc::new(rust));
1289 project.languages.add(Arc::new(js));
1290 });
1291
1292 let _rs_buffer = project
1293 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1294 .await
1295 .unwrap();
1296 let _js_buffer = project
1297 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1298 .await
1299 .unwrap();
1300
1301 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1302 assert_eq!(
1303 fake_rust_server_1
1304 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1305 .await
1306 .text_document
1307 .uri
1308 .as_str(),
1309 "file:///dir/a.rs"
1310 );
1311
1312 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1313 assert_eq!(
1314 fake_js_server
1315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1316 .await
1317 .text_document
1318 .uri
1319 .as_str(),
1320 "file:///dir/b.js"
1321 );
1322
1323 // Disable Rust language server, ensuring only that server gets stopped.
1324 cx.update(|cx| {
1325 cx.update_global(|settings: &mut SettingsStore, cx| {
1326 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1327 settings.languages.insert(
1328 Arc::from("Rust"),
1329 LanguageSettingsContent {
1330 enable_language_server: Some(false),
1331 ..Default::default()
1332 },
1333 );
1334 });
1335 })
1336 });
1337 fake_rust_server_1
1338 .receive_notification::<lsp::notification::Exit>()
1339 .await;
1340
1341 // Enable Rust and disable JavaScript language servers, ensuring that the
1342 // former gets started again and that the latter stops.
1343 cx.update(|cx| {
1344 cx.update_global(|settings: &mut SettingsStore, cx| {
1345 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1346 settings.languages.insert(
1347 Arc::from("Rust"),
1348 LanguageSettingsContent {
1349 enable_language_server: Some(true),
1350 ..Default::default()
1351 },
1352 );
1353 settings.languages.insert(
1354 Arc::from("JavaScript"),
1355 LanguageSettingsContent {
1356 enable_language_server: Some(false),
1357 ..Default::default()
1358 },
1359 );
1360 });
1361 })
1362 });
1363 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1364 assert_eq!(
1365 fake_rust_server_2
1366 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1367 .await
1368 .text_document
1369 .uri
1370 .as_str(),
1371 "file:///dir/a.rs"
1372 );
1373 fake_js_server
1374 .receive_notification::<lsp::notification::Exit>()
1375 .await;
1376}
1377
1378#[gpui::test(iterations = 3)]
1379async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1380 init_test(cx);
1381
1382 let mut language = Language::new(
1383 LanguageConfig {
1384 name: "Rust".into(),
1385 path_suffixes: vec!["rs".to_string()],
1386 ..Default::default()
1387 },
1388 Some(tree_sitter_rust::language()),
1389 );
1390 let mut fake_servers = language
1391 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 ..Default::default()
1394 }))
1395 .await;
1396
1397 let text = "
1398 fn a() { A }
1399 fn b() { BB }
1400 fn c() { CCC }
1401 "
1402 .unindent();
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1406
1407 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1408 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1409
1410 let buffer = project
1411 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1412 .await
1413 .unwrap();
1414
1415 let mut fake_server = fake_servers.next().await.unwrap();
1416 let open_notification = fake_server
1417 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1418 .await;
1419
1420 // Edit the buffer, moving the content down
1421 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1422 let change_notification_1 = fake_server
1423 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1424 .await;
1425 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1426
1427 // Report some diagnostics for the initial version of the buffer
1428 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1429 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1430 version: Some(open_notification.text_document.version),
1431 diagnostics: vec![
1432 lsp::Diagnostic {
1433 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1434 severity: Some(DiagnosticSeverity::ERROR),
1435 message: "undefined variable 'A'".to_string(),
1436 source: Some("disk".to_string()),
1437 ..Default::default()
1438 },
1439 lsp::Diagnostic {
1440 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1441 severity: Some(DiagnosticSeverity::ERROR),
1442 message: "undefined variable 'BB'".to_string(),
1443 source: Some("disk".to_string()),
1444 ..Default::default()
1445 },
1446 lsp::Diagnostic {
1447 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1448 severity: Some(DiagnosticSeverity::ERROR),
1449 source: Some("disk".to_string()),
1450 message: "undefined variable 'CCC'".to_string(),
1451 ..Default::default()
1452 },
1453 ],
1454 });
1455
1456 // The diagnostics have moved down since they were created.
1457 cx.executor().run_until_parked();
1458 buffer.update(cx, |buffer, _| {
1459 assert_eq!(
1460 buffer
1461 .snapshot()
1462 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1463 .collect::<Vec<_>>(),
1464 &[
1465 DiagnosticEntry {
1466 range: Point::new(3, 9)..Point::new(3, 11),
1467 diagnostic: Diagnostic {
1468 source: Some("disk".into()),
1469 severity: DiagnosticSeverity::ERROR,
1470 message: "undefined variable 'BB'".to_string(),
1471 is_disk_based: true,
1472 group_id: 1,
1473 is_primary: true,
1474 ..Default::default()
1475 },
1476 },
1477 DiagnosticEntry {
1478 range: Point::new(4, 9)..Point::new(4, 12),
1479 diagnostic: Diagnostic {
1480 source: Some("disk".into()),
1481 severity: DiagnosticSeverity::ERROR,
1482 message: "undefined variable 'CCC'".to_string(),
1483 is_disk_based: true,
1484 group_id: 2,
1485 is_primary: true,
1486 ..Default::default()
1487 }
1488 }
1489 ]
1490 );
1491 assert_eq!(
1492 chunks_with_diagnostics(buffer, 0..buffer.len()),
1493 [
1494 ("\n\nfn a() { ".to_string(), None),
1495 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1496 (" }\nfn b() { ".to_string(), None),
1497 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1498 (" }\nfn c() { ".to_string(), None),
1499 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1500 (" }\n".to_string(), None),
1501 ]
1502 );
1503 assert_eq!(
1504 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1505 [
1506 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1507 (" }\nfn c() { ".to_string(), None),
1508 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1509 ]
1510 );
1511 });
1512
1513 // Ensure overlapping diagnostics are highlighted correctly.
1514 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1515 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1516 version: Some(open_notification.text_document.version),
1517 diagnostics: vec![
1518 lsp::Diagnostic {
1519 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1520 severity: Some(DiagnosticSeverity::ERROR),
1521 message: "undefined variable 'A'".to_string(),
1522 source: Some("disk".to_string()),
1523 ..Default::default()
1524 },
1525 lsp::Diagnostic {
1526 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1527 severity: Some(DiagnosticSeverity::WARNING),
1528 message: "unreachable statement".to_string(),
1529 source: Some("disk".to_string()),
1530 ..Default::default()
1531 },
1532 ],
1533 });
1534
1535 cx.executor().run_until_parked();
1536 buffer.update(cx, |buffer, _| {
1537 assert_eq!(
1538 buffer
1539 .snapshot()
1540 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1541 .collect::<Vec<_>>(),
1542 &[
1543 DiagnosticEntry {
1544 range: Point::new(2, 9)..Point::new(2, 12),
1545 diagnostic: Diagnostic {
1546 source: Some("disk".into()),
1547 severity: DiagnosticSeverity::WARNING,
1548 message: "unreachable statement".to_string(),
1549 is_disk_based: true,
1550 group_id: 4,
1551 is_primary: true,
1552 ..Default::default()
1553 }
1554 },
1555 DiagnosticEntry {
1556 range: Point::new(2, 9)..Point::new(2, 10),
1557 diagnostic: Diagnostic {
1558 source: Some("disk".into()),
1559 severity: DiagnosticSeverity::ERROR,
1560 message: "undefined variable 'A'".to_string(),
1561 is_disk_based: true,
1562 group_id: 3,
1563 is_primary: true,
1564 ..Default::default()
1565 },
1566 }
1567 ]
1568 );
1569 assert_eq!(
1570 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1571 [
1572 ("fn a() { ".to_string(), None),
1573 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1574 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1575 ("\n".to_string(), None),
1576 ]
1577 );
1578 assert_eq!(
1579 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1580 [
1581 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1582 ("\n".to_string(), None),
1583 ]
1584 );
1585 });
1586
1587 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1588 // changes since the last save.
1589 buffer.update(cx, |buffer, cx| {
1590 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1591 buffer.edit(
1592 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1593 None,
1594 cx,
1595 );
1596 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1597 });
1598 let change_notification_2 = fake_server
1599 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1600 .await;
1601 assert!(
1602 change_notification_2.text_document.version > change_notification_1.text_document.version
1603 );
1604
1605 // Handle out-of-order diagnostics
1606 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1607 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1608 version: Some(change_notification_2.text_document.version),
1609 diagnostics: vec![
1610 lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1612 severity: Some(DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'BB'".to_string(),
1614 source: Some("disk".to_string()),
1615 ..Default::default()
1616 },
1617 lsp::Diagnostic {
1618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1619 severity: Some(DiagnosticSeverity::WARNING),
1620 message: "undefined variable 'A'".to_string(),
1621 source: Some("disk".to_string()),
1622 ..Default::default()
1623 },
1624 ],
1625 });
1626
1627 cx.executor().run_until_parked();
1628 buffer.update(cx, |buffer, _| {
1629 assert_eq!(
1630 buffer
1631 .snapshot()
1632 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1633 .collect::<Vec<_>>(),
1634 &[
1635 DiagnosticEntry {
1636 range: Point::new(2, 21)..Point::new(2, 22),
1637 diagnostic: Diagnostic {
1638 source: Some("disk".into()),
1639 severity: DiagnosticSeverity::WARNING,
1640 message: "undefined variable 'A'".to_string(),
1641 is_disk_based: true,
1642 group_id: 6,
1643 is_primary: true,
1644 ..Default::default()
1645 }
1646 },
1647 DiagnosticEntry {
1648 range: Point::new(3, 9)..Point::new(3, 14),
1649 diagnostic: Diagnostic {
1650 source: Some("disk".into()),
1651 severity: DiagnosticSeverity::ERROR,
1652 message: "undefined variable 'BB'".to_string(),
1653 is_disk_based: true,
1654 group_id: 5,
1655 is_primary: true,
1656 ..Default::default()
1657 },
1658 }
1659 ]
1660 );
1661 });
1662}
1663
1664#[gpui::test]
1665async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1666 init_test(cx);
1667
1668 let text = concat!(
1669 "let one = ;\n", //
1670 "let two = \n",
1671 "let three = 3;\n",
1672 );
1673
1674 let fs = FakeFs::new(cx.executor());
1675 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1676
1677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1678 let buffer = project
1679 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1680 .await
1681 .unwrap();
1682
1683 project.update(cx, |project, cx| {
1684 project
1685 .update_buffer_diagnostics(
1686 &buffer,
1687 LanguageServerId(0),
1688 None,
1689 vec![
1690 DiagnosticEntry {
1691 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1692 diagnostic: Diagnostic {
1693 severity: DiagnosticSeverity::ERROR,
1694 message: "syntax error 1".to_string(),
1695 ..Default::default()
1696 },
1697 },
1698 DiagnosticEntry {
1699 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1700 diagnostic: Diagnostic {
1701 severity: DiagnosticSeverity::ERROR,
1702 message: "syntax error 2".to_string(),
1703 ..Default::default()
1704 },
1705 },
1706 ],
1707 cx,
1708 )
1709 .unwrap();
1710 });
1711
1712 // An empty range is extended forward to include the following character.
1713 // At the end of a line, an empty range is extended backward to include
1714 // the preceding character.
1715 buffer.update(cx, |buffer, _| {
1716 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1717 assert_eq!(
1718 chunks
1719 .iter()
1720 .map(|(s, d)| (s.as_str(), *d))
1721 .collect::<Vec<_>>(),
1722 &[
1723 ("let one = ", None),
1724 (";", Some(DiagnosticSeverity::ERROR)),
1725 ("\nlet two =", None),
1726 (" ", Some(DiagnosticSeverity::ERROR)),
1727 ("\nlet three = 3;\n", None)
1728 ]
1729 );
1730 });
1731}
1732
1733#[gpui::test]
1734async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1735 init_test(cx);
1736
1737 let fs = FakeFs::new(cx.executor());
1738 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1739 .await;
1740
1741 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1742
1743 project.update(cx, |project, cx| {
1744 project
1745 .update_diagnostic_entries(
1746 LanguageServerId(0),
1747 Path::new("/dir/a.rs").to_owned(),
1748 None,
1749 vec![DiagnosticEntry {
1750 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1751 diagnostic: Diagnostic {
1752 severity: DiagnosticSeverity::ERROR,
1753 is_primary: true,
1754 message: "syntax error a1".to_string(),
1755 ..Default::default()
1756 },
1757 }],
1758 cx,
1759 )
1760 .unwrap();
1761 project
1762 .update_diagnostic_entries(
1763 LanguageServerId(1),
1764 Path::new("/dir/a.rs").to_owned(),
1765 None,
1766 vec![DiagnosticEntry {
1767 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1768 diagnostic: Diagnostic {
1769 severity: DiagnosticSeverity::ERROR,
1770 is_primary: true,
1771 message: "syntax error b1".to_string(),
1772 ..Default::default()
1773 },
1774 }],
1775 cx,
1776 )
1777 .unwrap();
1778
1779 assert_eq!(
1780 project.diagnostic_summary(cx),
1781 DiagnosticSummary {
1782 error_count: 2,
1783 warning_count: 0,
1784 }
1785 );
1786 });
1787}
1788
1789#[gpui::test]
1790async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1791 init_test(cx);
1792
1793 let mut language = Language::new(
1794 LanguageConfig {
1795 name: "Rust".into(),
1796 path_suffixes: vec!["rs".to_string()],
1797 ..Default::default()
1798 },
1799 Some(tree_sitter_rust::language()),
1800 );
1801 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1802
1803 let text = "
1804 fn a() {
1805 f1();
1806 }
1807 fn b() {
1808 f2();
1809 }
1810 fn c() {
1811 f3();
1812 }
1813 "
1814 .unindent();
1815
1816 let fs = FakeFs::new(cx.executor());
1817 fs.insert_tree(
1818 "/dir",
1819 json!({
1820 "a.rs": text.clone(),
1821 }),
1822 )
1823 .await;
1824
1825 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1826 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1827 let buffer = project
1828 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1829 .await
1830 .unwrap();
1831
1832 let mut fake_server = fake_servers.next().await.unwrap();
1833 let lsp_document_version = fake_server
1834 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1835 .await
1836 .text_document
1837 .version;
1838
1839 // Simulate editing the buffer after the language server computes some edits.
1840 buffer.update(cx, |buffer, cx| {
1841 buffer.edit(
1842 [(
1843 Point::new(0, 0)..Point::new(0, 0),
1844 "// above first function\n",
1845 )],
1846 None,
1847 cx,
1848 );
1849 buffer.edit(
1850 [(
1851 Point::new(2, 0)..Point::new(2, 0),
1852 " // inside first function\n",
1853 )],
1854 None,
1855 cx,
1856 );
1857 buffer.edit(
1858 [(
1859 Point::new(6, 4)..Point::new(6, 4),
1860 "// inside second function ",
1861 )],
1862 None,
1863 cx,
1864 );
1865
1866 assert_eq!(
1867 buffer.text(),
1868 "
1869 // above first function
1870 fn a() {
1871 // inside first function
1872 f1();
1873 }
1874 fn b() {
1875 // inside second function f2();
1876 }
1877 fn c() {
1878 f3();
1879 }
1880 "
1881 .unindent()
1882 );
1883 });
1884
1885 let edits = project
1886 .update(cx, |project, cx| {
1887 project.edits_from_lsp(
1888 &buffer,
1889 vec![
1890 // replace body of first function
1891 lsp::TextEdit {
1892 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1893 new_text: "
1894 fn a() {
1895 f10();
1896 }
1897 "
1898 .unindent(),
1899 },
1900 // edit inside second function
1901 lsp::TextEdit {
1902 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1903 new_text: "00".into(),
1904 },
1905 // edit inside third function via two distinct edits
1906 lsp::TextEdit {
1907 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1908 new_text: "4000".into(),
1909 },
1910 lsp::TextEdit {
1911 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1912 new_text: "".into(),
1913 },
1914 ],
1915 LanguageServerId(0),
1916 Some(lsp_document_version),
1917 cx,
1918 )
1919 })
1920 .await
1921 .unwrap();
1922
1923 buffer.update(cx, |buffer, cx| {
1924 for (range, new_text) in edits {
1925 buffer.edit([(range, new_text)], None, cx);
1926 }
1927 assert_eq!(
1928 buffer.text(),
1929 "
1930 // above first function
1931 fn a() {
1932 // inside first function
1933 f10();
1934 }
1935 fn b() {
1936 // inside second function f200();
1937 }
1938 fn c() {
1939 f4000();
1940 }
1941 "
1942 .unindent()
1943 );
1944 });
1945}
1946
1947#[gpui::test]
1948async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1949 init_test(cx);
1950
1951 let text = "
1952 use a::b;
1953 use a::c;
1954
1955 fn f() {
1956 b();
1957 c();
1958 }
1959 "
1960 .unindent();
1961
1962 let fs = FakeFs::new(cx.executor());
1963 fs.insert_tree(
1964 "/dir",
1965 json!({
1966 "a.rs": text.clone(),
1967 }),
1968 )
1969 .await;
1970
1971 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1972 let buffer = project
1973 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1974 .await
1975 .unwrap();
1976
1977 // Simulate the language server sending us a small edit in the form of a very large diff.
1978 // Rust-analyzer does this when performing a merge-imports code action.
1979 let edits = project
1980 .update(cx, |project, cx| {
1981 project.edits_from_lsp(
1982 &buffer,
1983 [
1984 // Replace the first use statement without editing the semicolon.
1985 lsp::TextEdit {
1986 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1987 new_text: "a::{b, c}".into(),
1988 },
1989 // Reinsert the remainder of the file between the semicolon and the final
1990 // newline of the file.
1991 lsp::TextEdit {
1992 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1993 new_text: "\n\n".into(),
1994 },
1995 lsp::TextEdit {
1996 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1997 new_text: "
1998 fn f() {
1999 b();
2000 c();
2001 }"
2002 .unindent(),
2003 },
2004 // Delete everything after the first newline of the file.
2005 lsp::TextEdit {
2006 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2007 new_text: "".into(),
2008 },
2009 ],
2010 LanguageServerId(0),
2011 None,
2012 cx,
2013 )
2014 })
2015 .await
2016 .unwrap();
2017
2018 buffer.update(cx, |buffer, cx| {
2019 let edits = edits
2020 .into_iter()
2021 .map(|(range, text)| {
2022 (
2023 range.start.to_point(buffer)..range.end.to_point(buffer),
2024 text,
2025 )
2026 })
2027 .collect::<Vec<_>>();
2028
2029 assert_eq!(
2030 edits,
2031 [
2032 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2033 (Point::new(1, 0)..Point::new(2, 0), "".into())
2034 ]
2035 );
2036
2037 for (range, new_text) in edits {
2038 buffer.edit([(range, new_text)], None, cx);
2039 }
2040 assert_eq!(
2041 buffer.text(),
2042 "
2043 use a::{b, c};
2044
2045 fn f() {
2046 b();
2047 c();
2048 }
2049 "
2050 .unindent()
2051 );
2052 });
2053}
2054
2055#[gpui::test]
2056async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2057 init_test(cx);
2058
2059 let text = "
2060 use a::b;
2061 use a::c;
2062
2063 fn f() {
2064 b();
2065 c();
2066 }
2067 "
2068 .unindent();
2069
2070 let fs = FakeFs::new(cx.executor());
2071 fs.insert_tree(
2072 "/dir",
2073 json!({
2074 "a.rs": text.clone(),
2075 }),
2076 )
2077 .await;
2078
2079 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2080 let buffer = project
2081 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2082 .await
2083 .unwrap();
2084
2085 // Simulate the language server sending us edits in a non-ordered fashion,
2086 // with ranges sometimes being inverted or pointing to invalid locations.
2087 let edits = project
2088 .update(cx, |project, cx| {
2089 project.edits_from_lsp(
2090 &buffer,
2091 [
2092 lsp::TextEdit {
2093 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2094 new_text: "\n\n".into(),
2095 },
2096 lsp::TextEdit {
2097 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2098 new_text: "a::{b, c}".into(),
2099 },
2100 lsp::TextEdit {
2101 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2102 new_text: "".into(),
2103 },
2104 lsp::TextEdit {
2105 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2106 new_text: "
2107 fn f() {
2108 b();
2109 c();
2110 }"
2111 .unindent(),
2112 },
2113 ],
2114 LanguageServerId(0),
2115 None,
2116 cx,
2117 )
2118 })
2119 .await
2120 .unwrap();
2121
2122 buffer.update(cx, |buffer, cx| {
2123 let edits = edits
2124 .into_iter()
2125 .map(|(range, text)| {
2126 (
2127 range.start.to_point(buffer)..range.end.to_point(buffer),
2128 text,
2129 )
2130 })
2131 .collect::<Vec<_>>();
2132
2133 assert_eq!(
2134 edits,
2135 [
2136 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2137 (Point::new(1, 0)..Point::new(2, 0), "".into())
2138 ]
2139 );
2140
2141 for (range, new_text) in edits {
2142 buffer.edit([(range, new_text)], None, cx);
2143 }
2144 assert_eq!(
2145 buffer.text(),
2146 "
2147 use a::{b, c};
2148
2149 fn f() {
2150 b();
2151 c();
2152 }
2153 "
2154 .unindent()
2155 );
2156 });
2157}
2158
2159fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2160 buffer: &Buffer,
2161 range: Range<T>,
2162) -> Vec<(String, Option<DiagnosticSeverity>)> {
2163 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2164 for chunk in buffer.snapshot().chunks(range, true) {
2165 if chunks.last().map_or(false, |prev_chunk| {
2166 prev_chunk.1 == chunk.diagnostic_severity
2167 }) {
2168 chunks.last_mut().unwrap().0.push_str(chunk.text);
2169 } else {
2170 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2171 }
2172 }
2173 chunks
2174}
2175
2176#[gpui::test(iterations = 10)]
2177async fn test_definition(cx: &mut gpui::TestAppContext) {
2178 init_test(cx);
2179
2180 let mut language = Language::new(
2181 LanguageConfig {
2182 name: "Rust".into(),
2183 path_suffixes: vec!["rs".to_string()],
2184 ..Default::default()
2185 },
2186 Some(tree_sitter_rust::language()),
2187 );
2188 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2189
2190 let fs = FakeFs::new(cx.executor());
2191 fs.insert_tree(
2192 "/dir",
2193 json!({
2194 "a.rs": "const fn a() { A }",
2195 "b.rs": "const y: i32 = crate::a()",
2196 }),
2197 )
2198 .await;
2199
2200 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2201 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2202
2203 let buffer = project
2204 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2205 .await
2206 .unwrap();
2207
2208 let fake_server = fake_servers.next().await.unwrap();
2209 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2210 let params = params.text_document_position_params;
2211 assert_eq!(
2212 params.text_document.uri.to_file_path().unwrap(),
2213 Path::new("/dir/b.rs"),
2214 );
2215 assert_eq!(params.position, lsp::Position::new(0, 22));
2216
2217 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2218 lsp::Location::new(
2219 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2220 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2221 ),
2222 )))
2223 });
2224
2225 let mut definitions = project
2226 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2227 .await
2228 .unwrap();
2229
2230 // Assert no new language server started
2231 cx.executor().run_until_parked();
2232 assert!(fake_servers.try_next().is_err());
2233
2234 assert_eq!(definitions.len(), 1);
2235 let definition = definitions.pop().unwrap();
2236 cx.update(|cx| {
2237 let target_buffer = definition.target.buffer.read(cx);
2238 assert_eq!(
2239 target_buffer
2240 .file()
2241 .unwrap()
2242 .as_local()
2243 .unwrap()
2244 .abs_path(cx),
2245 Path::new("/dir/a.rs"),
2246 );
2247 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2248 assert_eq!(
2249 list_worktrees(&project, cx),
2250 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2251 );
2252
2253 drop(definition);
2254 });
2255 cx.update(|cx| {
2256 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2257 });
2258
2259 fn list_worktrees<'a>(
2260 project: &'a Model<Project>,
2261 cx: &'a AppContext,
2262 ) -> Vec<(&'a Path, bool)> {
2263 project
2264 .read(cx)
2265 .worktrees()
2266 .map(|worktree| {
2267 let worktree = worktree.read(cx);
2268 (
2269 worktree.as_local().unwrap().abs_path().as_ref(),
2270 worktree.is_visible(),
2271 )
2272 })
2273 .collect::<Vec<_>>()
2274 }
2275}
2276
2277#[gpui::test]
2278async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let mut language = Language::new(
2282 LanguageConfig {
2283 name: "TypeScript".into(),
2284 path_suffixes: vec!["ts".to_string()],
2285 ..Default::default()
2286 },
2287 Some(tree_sitter_typescript::language_typescript()),
2288 );
2289 let mut fake_language_servers = language
2290 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2291 capabilities: lsp::ServerCapabilities {
2292 completion_provider: Some(lsp::CompletionOptions {
2293 trigger_characters: Some(vec![":".to_string()]),
2294 ..Default::default()
2295 }),
2296 ..Default::default()
2297 },
2298 ..Default::default()
2299 }))
2300 .await;
2301
2302 let fs = FakeFs::new(cx.executor());
2303 fs.insert_tree(
2304 "/dir",
2305 json!({
2306 "a.ts": "",
2307 }),
2308 )
2309 .await;
2310
2311 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2312 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2313 let buffer = project
2314 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2315 .await
2316 .unwrap();
2317
2318 let fake_server = fake_language_servers.next().await.unwrap();
2319
2320 let text = "let a = b.fqn";
2321 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2322 let completions = project.update(cx, |project, cx| {
2323 project.completions(&buffer, text.len(), cx)
2324 });
2325
2326 fake_server
2327 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2328 Ok(Some(lsp::CompletionResponse::Array(vec![
2329 lsp::CompletionItem {
2330 label: "fullyQualifiedName?".into(),
2331 insert_text: Some("fullyQualifiedName".into()),
2332 ..Default::default()
2333 },
2334 ])))
2335 })
2336 .next()
2337 .await;
2338 let completions = completions.await.unwrap();
2339 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2340 assert_eq!(completions.len(), 1);
2341 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2342 assert_eq!(
2343 completions[0].old_range.to_offset(&snapshot),
2344 text.len() - 3..text.len()
2345 );
2346
2347 let text = "let a = \"atoms/cmp\"";
2348 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2349 let completions = project.update(cx, |project, cx| {
2350 project.completions(&buffer, text.len() - 1, cx)
2351 });
2352
2353 fake_server
2354 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2355 Ok(Some(lsp::CompletionResponse::Array(vec![
2356 lsp::CompletionItem {
2357 label: "component".into(),
2358 ..Default::default()
2359 },
2360 ])))
2361 })
2362 .next()
2363 .await;
2364 let completions = completions.await.unwrap();
2365 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2366 assert_eq!(completions.len(), 1);
2367 assert_eq!(completions[0].new_text, "component");
2368 assert_eq!(
2369 completions[0].old_range.to_offset(&snapshot),
2370 text.len() - 4..text.len() - 1
2371 );
2372}
2373
2374#[gpui::test]
2375async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2376 init_test(cx);
2377
2378 let mut language = Language::new(
2379 LanguageConfig {
2380 name: "TypeScript".into(),
2381 path_suffixes: vec!["ts".to_string()],
2382 ..Default::default()
2383 },
2384 Some(tree_sitter_typescript::language_typescript()),
2385 );
2386 let mut fake_language_servers = language
2387 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2388 capabilities: lsp::ServerCapabilities {
2389 completion_provider: Some(lsp::CompletionOptions {
2390 trigger_characters: Some(vec![":".to_string()]),
2391 ..Default::default()
2392 }),
2393 ..Default::default()
2394 },
2395 ..Default::default()
2396 }))
2397 .await;
2398
2399 let fs = FakeFs::new(cx.executor());
2400 fs.insert_tree(
2401 "/dir",
2402 json!({
2403 "a.ts": "",
2404 }),
2405 )
2406 .await;
2407
2408 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2409 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2410 let buffer = project
2411 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2412 .await
2413 .unwrap();
2414
2415 let fake_server = fake_language_servers.next().await.unwrap();
2416
2417 let text = "let a = b.fqn";
2418 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2419 let completions = project.update(cx, |project, cx| {
2420 project.completions(&buffer, text.len(), cx)
2421 });
2422
2423 fake_server
2424 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2425 Ok(Some(lsp::CompletionResponse::Array(vec![
2426 lsp::CompletionItem {
2427 label: "fullyQualifiedName?".into(),
2428 insert_text: Some("fully\rQualified\r\nName".into()),
2429 ..Default::default()
2430 },
2431 ])))
2432 })
2433 .next()
2434 .await;
2435 let completions = completions.await.unwrap();
2436 assert_eq!(completions.len(), 1);
2437 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2438}
2439
2440#[gpui::test(iterations = 10)]
2441async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2442 init_test(cx);
2443
2444 let mut language = Language::new(
2445 LanguageConfig {
2446 name: "TypeScript".into(),
2447 path_suffixes: vec!["ts".to_string()],
2448 ..Default::default()
2449 },
2450 None,
2451 );
2452 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2453
2454 let fs = FakeFs::new(cx.executor());
2455 fs.insert_tree(
2456 "/dir",
2457 json!({
2458 "a.ts": "a",
2459 }),
2460 )
2461 .await;
2462
2463 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2464 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2465 let buffer = project
2466 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2467 .await
2468 .unwrap();
2469
2470 let fake_server = fake_language_servers.next().await.unwrap();
2471
2472 // Language server returns code actions that contain commands, and not edits.
2473 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2474 fake_server
2475 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2476 Ok(Some(vec![
2477 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2478 title: "The code action".into(),
2479 command: Some(lsp::Command {
2480 title: "The command".into(),
2481 command: "_the/command".into(),
2482 arguments: Some(vec![json!("the-argument")]),
2483 }),
2484 ..Default::default()
2485 }),
2486 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2487 title: "two".into(),
2488 ..Default::default()
2489 }),
2490 ]))
2491 })
2492 .next()
2493 .await;
2494
2495 let action = actions.await.unwrap()[0].clone();
2496 let apply = project.update(cx, |project, cx| {
2497 project.apply_code_action(buffer.clone(), action, true, cx)
2498 });
2499
2500 // Resolving the code action does not populate its edits. In absence of
2501 // edits, we must execute the given command.
2502 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2503 |action, _| async move { Ok(action) },
2504 );
2505
2506 // While executing the command, the language server sends the editor
2507 // a `workspaceEdit` request.
2508 fake_server
2509 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2510 let fake = fake_server.clone();
2511 move |params, _| {
2512 assert_eq!(params.command, "_the/command");
2513 let fake = fake.clone();
2514 async move {
2515 fake.server
2516 .request::<lsp::request::ApplyWorkspaceEdit>(
2517 lsp::ApplyWorkspaceEditParams {
2518 label: None,
2519 edit: lsp::WorkspaceEdit {
2520 changes: Some(
2521 [(
2522 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2523 vec![lsp::TextEdit {
2524 range: lsp::Range::new(
2525 lsp::Position::new(0, 0),
2526 lsp::Position::new(0, 0),
2527 ),
2528 new_text: "X".into(),
2529 }],
2530 )]
2531 .into_iter()
2532 .collect(),
2533 ),
2534 ..Default::default()
2535 },
2536 },
2537 )
2538 .await
2539 .unwrap();
2540 Ok(Some(json!(null)))
2541 }
2542 }
2543 })
2544 .next()
2545 .await;
2546
2547 // Applying the code action returns a project transaction containing the edits
2548 // sent by the language server in its `workspaceEdit` request.
2549 let transaction = apply.await.unwrap();
2550 assert!(transaction.0.contains_key(&buffer));
2551 buffer.update(cx, |buffer, cx| {
2552 assert_eq!(buffer.text(), "Xa");
2553 buffer.undo(cx);
2554 assert_eq!(buffer.text(), "a");
2555 });
2556}
2557
2558#[gpui::test(iterations = 10)]
2559async fn test_save_file(cx: &mut gpui::TestAppContext) {
2560 init_test(cx);
2561
2562 let fs = FakeFs::new(cx.executor());
2563 fs.insert_tree(
2564 "/dir",
2565 json!({
2566 "file1": "the old contents",
2567 }),
2568 )
2569 .await;
2570
2571 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2572 let buffer = project
2573 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2574 .await
2575 .unwrap();
2576 buffer.update(cx, |buffer, cx| {
2577 assert_eq!(buffer.text(), "the old contents");
2578 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2579 });
2580
2581 project
2582 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2583 .await
2584 .unwrap();
2585
2586 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2587 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2588}
2589
2590#[gpui::test(iterations = 30)]
2591async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2592 init_test(cx);
2593
2594 let fs = FakeFs::new(cx.executor().clone());
2595 fs.insert_tree(
2596 "/dir",
2597 json!({
2598 "file1": "the original contents",
2599 }),
2600 )
2601 .await;
2602
2603 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2604 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2605 let buffer = project
2606 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2607 .await
2608 .unwrap();
2609
2610 // Simulate buffer diffs being slow, so that they don't complete before
2611 // the next file change occurs.
2612 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2613
2614 // Change the buffer's file on disk, and then wait for the file change
2615 // to be detected by the worktree, so that the buffer starts reloading.
2616 fs.save(
2617 "/dir/file1".as_ref(),
2618 &"the first contents".into(),
2619 Default::default(),
2620 )
2621 .await
2622 .unwrap();
2623 worktree.next_event(cx);
2624
2625 // Change the buffer's file again. Depending on the random seed, the
2626 // previous file change may still be in progress.
2627 fs.save(
2628 "/dir/file1".as_ref(),
2629 &"the second contents".into(),
2630 Default::default(),
2631 )
2632 .await
2633 .unwrap();
2634 worktree.next_event(cx);
2635
2636 cx.executor().run_until_parked();
2637 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2638 buffer.read_with(cx, |buffer, _| {
2639 let buffer_text = buffer.text();
2640 if buffer_text == on_disk_text {
2641 assert!(
2642 !buffer.is_dirty() && !buffer.has_conflict(),
2643 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2644 );
2645 }
2646 // If the file change occurred while the buffer was processing the first
2647 // change, the buffer will be in a conflicting state.
2648 else {
2649 assert!(
2650 buffer.is_dirty() && buffer.has_conflict(),
2651 "buffer should report that it has a conflict. text: {buffer_text:?}, disk text: {on_disk_text:?}"
2652 );
2653 }
2654 });
2655}
2656
2657#[gpui::test]
2658async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2659 init_test(cx);
2660
2661 let fs = FakeFs::new(cx.executor());
2662 fs.insert_tree(
2663 "/dir",
2664 json!({
2665 "file1": "the old contents",
2666 }),
2667 )
2668 .await;
2669
2670 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2671 let buffer = project
2672 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2673 .await
2674 .unwrap();
2675 buffer.update(cx, |buffer, cx| {
2676 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2677 });
2678
2679 project
2680 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2681 .await
2682 .unwrap();
2683
2684 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2685 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2686}
2687
2688#[gpui::test]
2689async fn test_save_as(cx: &mut gpui::TestAppContext) {
2690 init_test(cx);
2691
2692 let fs = FakeFs::new(cx.executor());
2693 fs.insert_tree("/dir", json!({})).await;
2694
2695 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2696
2697 let languages = project.update(cx, |project, _| project.languages().clone());
2698 languages.register(
2699 "/some/path",
2700 LanguageConfig {
2701 name: "Rust".into(),
2702 path_suffixes: vec!["rs".into()],
2703 ..Default::default()
2704 },
2705 tree_sitter_rust::language(),
2706 vec![],
2707 |_| Default::default(),
2708 );
2709
2710 let buffer = project.update(cx, |project, cx| {
2711 project.create_buffer("", None, cx).unwrap()
2712 });
2713 buffer.update(cx, |buffer, cx| {
2714 buffer.edit([(0..0, "abc")], None, cx);
2715 assert!(buffer.is_dirty());
2716 assert!(!buffer.has_conflict());
2717 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2718 });
2719 project
2720 .update(cx, |project, cx| {
2721 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2722 })
2723 .await
2724 .unwrap();
2725 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2726
2727 cx.executor().run_until_parked();
2728 buffer.update(cx, |buffer, cx| {
2729 assert_eq!(
2730 buffer.file().unwrap().full_path(cx),
2731 Path::new("dir/file1.rs")
2732 );
2733 assert!(!buffer.is_dirty());
2734 assert!(!buffer.has_conflict());
2735 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2736 });
2737
2738 let opened_buffer = project
2739 .update(cx, |project, cx| {
2740 project.open_local_buffer("/dir/file1.rs", cx)
2741 })
2742 .await
2743 .unwrap();
2744 assert_eq!(opened_buffer, buffer);
2745}
2746
2747#[gpui::test(retries = 5)]
2748async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2749 init_test(cx);
2750 cx.executor().allow_parking();
2751
2752 let dir = temp_tree(json!({
2753 "a": {
2754 "file1": "",
2755 "file2": "",
2756 "file3": "",
2757 },
2758 "b": {
2759 "c": {
2760 "file4": "",
2761 "file5": "",
2762 }
2763 }
2764 }));
2765
2766 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2767 let rpc = project.update(cx, |p, _| p.client.clone());
2768
2769 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2770 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2771 async move { buffer.await.unwrap() }
2772 };
2773 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2774 project.update(cx, |project, cx| {
2775 let tree = project.worktrees().next().unwrap();
2776 tree.read(cx)
2777 .entry_for_path(path)
2778 .unwrap_or_else(|| panic!("no entry for path {}", path))
2779 .id
2780 })
2781 };
2782
2783 let buffer2 = buffer_for_path("a/file2", cx).await;
2784 let buffer3 = buffer_for_path("a/file3", cx).await;
2785 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2786 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2787
2788 let file2_id = id_for_path("a/file2", cx);
2789 let file3_id = id_for_path("a/file3", cx);
2790 let file4_id = id_for_path("b/c/file4", cx);
2791
2792 // Create a remote copy of this worktree.
2793 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2794
2795 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2796
2797 let updates = Arc::new(Mutex::new(Vec::new()));
2798 tree.update(cx, |tree, cx| {
2799 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2800 let updates = updates.clone();
2801 move |update| {
2802 updates.lock().push(update);
2803 async { true }
2804 }
2805 });
2806 });
2807
2808 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2809
2810 cx.executor().run_until_parked();
2811
2812 cx.update(|cx| {
2813 assert!(!buffer2.read(cx).is_dirty());
2814 assert!(!buffer3.read(cx).is_dirty());
2815 assert!(!buffer4.read(cx).is_dirty());
2816 assert!(!buffer5.read(cx).is_dirty());
2817 });
2818
2819 // Rename and delete files and directories.
2820 tree.flush_fs_events(cx).await;
2821 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2822 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2823 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2824 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2825 tree.flush_fs_events(cx).await;
2826
2827 let expected_paths = vec![
2828 "a",
2829 "a/file1",
2830 "a/file2.new",
2831 "b",
2832 "d",
2833 "d/file3",
2834 "d/file4",
2835 ];
2836
2837 cx.update(|app| {
2838 assert_eq!(
2839 tree.read(app)
2840 .paths()
2841 .map(|p| p.to_str().unwrap())
2842 .collect::<Vec<_>>(),
2843 expected_paths
2844 );
2845 });
2846
2847 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2848 assert_eq!(id_for_path("d/file3", cx), file3_id);
2849 assert_eq!(id_for_path("d/file4", cx), file4_id);
2850
2851 cx.update(|cx| {
2852 assert_eq!(
2853 buffer2.read(cx).file().unwrap().path().as_ref(),
2854 Path::new("a/file2.new")
2855 );
2856 assert_eq!(
2857 buffer3.read(cx).file().unwrap().path().as_ref(),
2858 Path::new("d/file3")
2859 );
2860 assert_eq!(
2861 buffer4.read(cx).file().unwrap().path().as_ref(),
2862 Path::new("d/file4")
2863 );
2864 assert_eq!(
2865 buffer5.read(cx).file().unwrap().path().as_ref(),
2866 Path::new("b/c/file5")
2867 );
2868
2869 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2870 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2871 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2872 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2873 });
2874
2875 // Update the remote worktree. Check that it becomes consistent with the
2876 // local worktree.
2877 cx.executor().run_until_parked();
2878
2879 remote.update(cx, |remote, _| {
2880 for update in updates.lock().drain(..) {
2881 remote.as_remote_mut().unwrap().update_from_remote(update);
2882 }
2883 });
2884 cx.executor().run_until_parked();
2885 remote.update(cx, |remote, _| {
2886 assert_eq!(
2887 remote
2888 .paths()
2889 .map(|p| p.to_str().unwrap())
2890 .collect::<Vec<_>>(),
2891 expected_paths
2892 );
2893 });
2894}
2895
2896#[gpui::test(iterations = 10)]
2897async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2898 init_test(cx);
2899
2900 let fs = FakeFs::new(cx.executor());
2901 fs.insert_tree(
2902 "/dir",
2903 json!({
2904 "a": {
2905 "file1": "",
2906 }
2907 }),
2908 )
2909 .await;
2910
2911 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2912 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2913 let tree_id = tree.update(cx, |tree, _| tree.id());
2914
2915 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2916 project.update(cx, |project, cx| {
2917 let tree = project.worktrees().next().unwrap();
2918 tree.read(cx)
2919 .entry_for_path(path)
2920 .unwrap_or_else(|| panic!("no entry for path {}", path))
2921 .id
2922 })
2923 };
2924
2925 let dir_id = id_for_path("a", cx);
2926 let file_id = id_for_path("a/file1", cx);
2927 let buffer = project
2928 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2929 .await
2930 .unwrap();
2931 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2932
2933 project
2934 .update(cx, |project, cx| {
2935 project.rename_entry(dir_id, Path::new("b"), cx)
2936 })
2937 .unwrap()
2938 .await
2939 .unwrap();
2940 cx.executor().run_until_parked();
2941
2942 assert_eq!(id_for_path("b", cx), dir_id);
2943 assert_eq!(id_for_path("b/file1", cx), file_id);
2944 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2945}
2946
2947#[gpui::test]
2948async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2949 init_test(cx);
2950
2951 let fs = FakeFs::new(cx.executor());
2952 fs.insert_tree(
2953 "/dir",
2954 json!({
2955 "a.txt": "a-contents",
2956 "b.txt": "b-contents",
2957 }),
2958 )
2959 .await;
2960
2961 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2962
2963 // Spawn multiple tasks to open paths, repeating some paths.
2964 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2965 (
2966 p.open_local_buffer("/dir/a.txt", cx),
2967 p.open_local_buffer("/dir/b.txt", cx),
2968 p.open_local_buffer("/dir/a.txt", cx),
2969 )
2970 });
2971
2972 let buffer_a_1 = buffer_a_1.await.unwrap();
2973 let buffer_a_2 = buffer_a_2.await.unwrap();
2974 let buffer_b = buffer_b.await.unwrap();
2975 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
2976 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
2977
2978 // There is only one buffer per path.
2979 let buffer_a_id = buffer_a_1.entity_id();
2980 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
2981
2982 // Open the same path again while it is still open.
2983 drop(buffer_a_1);
2984 let buffer_a_3 = project
2985 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2986 .await
2987 .unwrap();
2988
2989 // There's still only one buffer per path.
2990 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
2991}
2992
2993#[gpui::test]
2994async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2995 init_test(cx);
2996
2997 let fs = FakeFs::new(cx.executor());
2998 fs.insert_tree(
2999 "/dir",
3000 json!({
3001 "file1": "abc",
3002 "file2": "def",
3003 "file3": "ghi",
3004 }),
3005 )
3006 .await;
3007
3008 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3009
3010 let buffer1 = project
3011 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3012 .await
3013 .unwrap();
3014 let events = Arc::new(Mutex::new(Vec::new()));
3015
3016 // initially, the buffer isn't dirty.
3017 buffer1.update(cx, |buffer, cx| {
3018 cx.subscribe(&buffer1, {
3019 let events = events.clone();
3020 move |_, _, event, _| match event {
3021 BufferEvent::Operation(_) => {}
3022 _ => events.lock().push(event.clone()),
3023 }
3024 })
3025 .detach();
3026
3027 assert!(!buffer.is_dirty());
3028 assert!(events.lock().is_empty());
3029
3030 buffer.edit([(1..2, "")], None, cx);
3031 });
3032
3033 // after the first edit, the buffer is dirty, and emits a dirtied event.
3034 buffer1.update(cx, |buffer, cx| {
3035 assert!(buffer.text() == "ac");
3036 assert!(buffer.is_dirty());
3037 assert_eq!(
3038 *events.lock(),
3039 &[language::Event::Edited, language::Event::DirtyChanged]
3040 );
3041 events.lock().clear();
3042 buffer.did_save(
3043 buffer.version(),
3044 buffer.as_rope().fingerprint(),
3045 buffer.file().unwrap().mtime(),
3046 cx,
3047 );
3048 });
3049
3050 // after saving, the buffer is not dirty, and emits a saved event.
3051 buffer1.update(cx, |buffer, cx| {
3052 assert!(!buffer.is_dirty());
3053 assert_eq!(*events.lock(), &[language::Event::Saved]);
3054 events.lock().clear();
3055
3056 buffer.edit([(1..1, "B")], None, cx);
3057 buffer.edit([(2..2, "D")], None, cx);
3058 });
3059
3060 // after editing again, the buffer is dirty, and emits another dirty event.
3061 buffer1.update(cx, |buffer, cx| {
3062 assert!(buffer.text() == "aBDc");
3063 assert!(buffer.is_dirty());
3064 assert_eq!(
3065 *events.lock(),
3066 &[
3067 language::Event::Edited,
3068 language::Event::DirtyChanged,
3069 language::Event::Edited,
3070 ],
3071 );
3072 events.lock().clear();
3073
3074 // After restoring the buffer to its previously-saved state,
3075 // the buffer is not considered dirty anymore.
3076 buffer.edit([(1..3, "")], None, cx);
3077 assert!(buffer.text() == "ac");
3078 assert!(!buffer.is_dirty());
3079 });
3080
3081 assert_eq!(
3082 *events.lock(),
3083 &[language::Event::Edited, language::Event::DirtyChanged]
3084 );
3085
3086 // When a file is deleted, the buffer is considered dirty.
3087 let events = Arc::new(Mutex::new(Vec::new()));
3088 let buffer2 = project
3089 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3090 .await
3091 .unwrap();
3092 buffer2.update(cx, |_, cx| {
3093 cx.subscribe(&buffer2, {
3094 let events = events.clone();
3095 move |_, _, event, _| events.lock().push(event.clone())
3096 })
3097 .detach();
3098 });
3099
3100 fs.remove_file("/dir/file2".as_ref(), Default::default())
3101 .await
3102 .unwrap();
3103 cx.executor().run_until_parked();
3104 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3105 assert_eq!(
3106 *events.lock(),
3107 &[
3108 language::Event::DirtyChanged,
3109 language::Event::FileHandleChanged
3110 ]
3111 );
3112
3113 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3114 let events = Arc::new(Mutex::new(Vec::new()));
3115 let buffer3 = project
3116 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3117 .await
3118 .unwrap();
3119 buffer3.update(cx, |_, cx| {
3120 cx.subscribe(&buffer3, {
3121 let events = events.clone();
3122 move |_, _, event, _| events.lock().push(event.clone())
3123 })
3124 .detach();
3125 });
3126
3127 buffer3.update(cx, |buffer, cx| {
3128 buffer.edit([(0..0, "x")], None, cx);
3129 });
3130 events.lock().clear();
3131 fs.remove_file("/dir/file3".as_ref(), Default::default())
3132 .await
3133 .unwrap();
3134 cx.executor().run_until_parked();
3135 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3136 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3137}
3138
3139#[gpui::test]
3140async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3141 init_test(cx);
3142
3143 let initial_contents = "aaa\nbbbbb\nc\n";
3144 let fs = FakeFs::new(cx.executor());
3145 fs.insert_tree(
3146 "/dir",
3147 json!({
3148 "the-file": initial_contents,
3149 }),
3150 )
3151 .await;
3152 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3153 let buffer = project
3154 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3155 .await
3156 .unwrap();
3157
3158 let anchors = (0..3)
3159 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3160 .collect::<Vec<_>>();
3161
3162 // Change the file on disk, adding two new lines of text, and removing
3163 // one line.
3164 buffer.update(cx, |buffer, _| {
3165 assert!(!buffer.is_dirty());
3166 assert!(!buffer.has_conflict());
3167 });
3168 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3169 fs.save(
3170 "/dir/the-file".as_ref(),
3171 &new_contents.into(),
3172 LineEnding::Unix,
3173 )
3174 .await
3175 .unwrap();
3176
3177 // Because the buffer was not modified, it is reloaded from disk. Its
3178 // contents are edited according to the diff between the old and new
3179 // file contents.
3180 cx.executor().run_until_parked();
3181 buffer.update(cx, |buffer, _| {
3182 assert_eq!(buffer.text(), new_contents);
3183 assert!(!buffer.is_dirty());
3184 assert!(!buffer.has_conflict());
3185
3186 let anchor_positions = anchors
3187 .iter()
3188 .map(|anchor| anchor.to_point(&*buffer))
3189 .collect::<Vec<_>>();
3190 assert_eq!(
3191 anchor_positions,
3192 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3193 );
3194 });
3195
3196 // Modify the buffer
3197 buffer.update(cx, |buffer, cx| {
3198 buffer.edit([(0..0, " ")], None, cx);
3199 assert!(buffer.is_dirty());
3200 assert!(!buffer.has_conflict());
3201 });
3202
3203 // Change the file on disk again, adding blank lines to the beginning.
3204 fs.save(
3205 "/dir/the-file".as_ref(),
3206 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3207 LineEnding::Unix,
3208 )
3209 .await
3210 .unwrap();
3211
3212 // Because the buffer is modified, it doesn't reload from disk, but is
3213 // marked as having a conflict.
3214 cx.executor().run_until_parked();
3215 buffer.update(cx, |buffer, _| {
3216 assert!(buffer.has_conflict());
3217 });
3218}
3219
3220#[gpui::test]
3221async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3222 init_test(cx);
3223
3224 let fs = FakeFs::new(cx.executor());
3225 fs.insert_tree(
3226 "/dir",
3227 json!({
3228 "file1": "a\nb\nc\n",
3229 "file2": "one\r\ntwo\r\nthree\r\n",
3230 }),
3231 )
3232 .await;
3233
3234 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3235 let buffer1 = project
3236 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3237 .await
3238 .unwrap();
3239 let buffer2 = project
3240 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3241 .await
3242 .unwrap();
3243
3244 buffer1.update(cx, |buffer, _| {
3245 assert_eq!(buffer.text(), "a\nb\nc\n");
3246 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3247 });
3248 buffer2.update(cx, |buffer, _| {
3249 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3250 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3251 });
3252
3253 // Change a file's line endings on disk from unix to windows. The buffer's
3254 // state updates correctly.
3255 fs.save(
3256 "/dir/file1".as_ref(),
3257 &"aaa\nb\nc\n".into(),
3258 LineEnding::Windows,
3259 )
3260 .await
3261 .unwrap();
3262 cx.executor().run_until_parked();
3263 buffer1.update(cx, |buffer, _| {
3264 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3265 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3266 });
3267
3268 // Save a file with windows line endings. The file is written correctly.
3269 buffer2.update(cx, |buffer, cx| {
3270 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3271 });
3272 project
3273 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3274 .await
3275 .unwrap();
3276 assert_eq!(
3277 fs.load("/dir/file2".as_ref()).await.unwrap(),
3278 "one\r\ntwo\r\nthree\r\nfour\r\n",
3279 );
3280}
3281
3282#[gpui::test]
3283async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3284 init_test(cx);
3285
3286 let fs = FakeFs::new(cx.executor());
3287 fs.insert_tree(
3288 "/the-dir",
3289 json!({
3290 "a.rs": "
3291 fn foo(mut v: Vec<usize>) {
3292 for x in &v {
3293 v.push(1);
3294 }
3295 }
3296 "
3297 .unindent(),
3298 }),
3299 )
3300 .await;
3301
3302 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3303 let buffer = project
3304 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3305 .await
3306 .unwrap();
3307
3308 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3309 let message = lsp::PublishDiagnosticsParams {
3310 uri: buffer_uri.clone(),
3311 diagnostics: vec![
3312 lsp::Diagnostic {
3313 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3314 severity: Some(DiagnosticSeverity::WARNING),
3315 message: "error 1".to_string(),
3316 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3317 location: lsp::Location {
3318 uri: buffer_uri.clone(),
3319 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3320 },
3321 message: "error 1 hint 1".to_string(),
3322 }]),
3323 ..Default::default()
3324 },
3325 lsp::Diagnostic {
3326 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3327 severity: Some(DiagnosticSeverity::HINT),
3328 message: "error 1 hint 1".to_string(),
3329 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3330 location: lsp::Location {
3331 uri: buffer_uri.clone(),
3332 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3333 },
3334 message: "original diagnostic".to_string(),
3335 }]),
3336 ..Default::default()
3337 },
3338 lsp::Diagnostic {
3339 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3340 severity: Some(DiagnosticSeverity::ERROR),
3341 message: "error 2".to_string(),
3342 related_information: Some(vec![
3343 lsp::DiagnosticRelatedInformation {
3344 location: lsp::Location {
3345 uri: buffer_uri.clone(),
3346 range: lsp::Range::new(
3347 lsp::Position::new(1, 13),
3348 lsp::Position::new(1, 15),
3349 ),
3350 },
3351 message: "error 2 hint 1".to_string(),
3352 },
3353 lsp::DiagnosticRelatedInformation {
3354 location: lsp::Location {
3355 uri: buffer_uri.clone(),
3356 range: lsp::Range::new(
3357 lsp::Position::new(1, 13),
3358 lsp::Position::new(1, 15),
3359 ),
3360 },
3361 message: "error 2 hint 2".to_string(),
3362 },
3363 ]),
3364 ..Default::default()
3365 },
3366 lsp::Diagnostic {
3367 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3368 severity: Some(DiagnosticSeverity::HINT),
3369 message: "error 2 hint 1".to_string(),
3370 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3371 location: lsp::Location {
3372 uri: buffer_uri.clone(),
3373 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3374 },
3375 message: "original diagnostic".to_string(),
3376 }]),
3377 ..Default::default()
3378 },
3379 lsp::Diagnostic {
3380 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3381 severity: Some(DiagnosticSeverity::HINT),
3382 message: "error 2 hint 2".to_string(),
3383 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3384 location: lsp::Location {
3385 uri: buffer_uri,
3386 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3387 },
3388 message: "original diagnostic".to_string(),
3389 }]),
3390 ..Default::default()
3391 },
3392 ],
3393 version: None,
3394 };
3395
3396 project
3397 .update(cx, |p, cx| {
3398 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3399 })
3400 .unwrap();
3401 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3402
3403 assert_eq!(
3404 buffer
3405 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3406 .collect::<Vec<_>>(),
3407 &[
3408 DiagnosticEntry {
3409 range: Point::new(1, 8)..Point::new(1, 9),
3410 diagnostic: Diagnostic {
3411 severity: DiagnosticSeverity::WARNING,
3412 message: "error 1".to_string(),
3413 group_id: 1,
3414 is_primary: true,
3415 ..Default::default()
3416 }
3417 },
3418 DiagnosticEntry {
3419 range: Point::new(1, 8)..Point::new(1, 9),
3420 diagnostic: Diagnostic {
3421 severity: DiagnosticSeverity::HINT,
3422 message: "error 1 hint 1".to_string(),
3423 group_id: 1,
3424 is_primary: false,
3425 ..Default::default()
3426 }
3427 },
3428 DiagnosticEntry {
3429 range: Point::new(1, 13)..Point::new(1, 15),
3430 diagnostic: Diagnostic {
3431 severity: DiagnosticSeverity::HINT,
3432 message: "error 2 hint 1".to_string(),
3433 group_id: 0,
3434 is_primary: false,
3435 ..Default::default()
3436 }
3437 },
3438 DiagnosticEntry {
3439 range: Point::new(1, 13)..Point::new(1, 15),
3440 diagnostic: Diagnostic {
3441 severity: DiagnosticSeverity::HINT,
3442 message: "error 2 hint 2".to_string(),
3443 group_id: 0,
3444 is_primary: false,
3445 ..Default::default()
3446 }
3447 },
3448 DiagnosticEntry {
3449 range: Point::new(2, 8)..Point::new(2, 17),
3450 diagnostic: Diagnostic {
3451 severity: DiagnosticSeverity::ERROR,
3452 message: "error 2".to_string(),
3453 group_id: 0,
3454 is_primary: true,
3455 ..Default::default()
3456 }
3457 }
3458 ]
3459 );
3460
3461 assert_eq!(
3462 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3463 &[
3464 DiagnosticEntry {
3465 range: Point::new(1, 13)..Point::new(1, 15),
3466 diagnostic: Diagnostic {
3467 severity: DiagnosticSeverity::HINT,
3468 message: "error 2 hint 1".to_string(),
3469 group_id: 0,
3470 is_primary: false,
3471 ..Default::default()
3472 }
3473 },
3474 DiagnosticEntry {
3475 range: Point::new(1, 13)..Point::new(1, 15),
3476 diagnostic: Diagnostic {
3477 severity: DiagnosticSeverity::HINT,
3478 message: "error 2 hint 2".to_string(),
3479 group_id: 0,
3480 is_primary: false,
3481 ..Default::default()
3482 }
3483 },
3484 DiagnosticEntry {
3485 range: Point::new(2, 8)..Point::new(2, 17),
3486 diagnostic: Diagnostic {
3487 severity: DiagnosticSeverity::ERROR,
3488 message: "error 2".to_string(),
3489 group_id: 0,
3490 is_primary: true,
3491 ..Default::default()
3492 }
3493 }
3494 ]
3495 );
3496
3497 assert_eq!(
3498 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3499 &[
3500 DiagnosticEntry {
3501 range: Point::new(1, 8)..Point::new(1, 9),
3502 diagnostic: Diagnostic {
3503 severity: DiagnosticSeverity::WARNING,
3504 message: "error 1".to_string(),
3505 group_id: 1,
3506 is_primary: true,
3507 ..Default::default()
3508 }
3509 },
3510 DiagnosticEntry {
3511 range: Point::new(1, 8)..Point::new(1, 9),
3512 diagnostic: Diagnostic {
3513 severity: DiagnosticSeverity::HINT,
3514 message: "error 1 hint 1".to_string(),
3515 group_id: 1,
3516 is_primary: false,
3517 ..Default::default()
3518 }
3519 },
3520 ]
3521 );
3522}
3523
3524#[gpui::test]
3525async fn test_rename(cx: &mut gpui::TestAppContext) {
3526 init_test(cx);
3527
3528 let mut language = Language::new(
3529 LanguageConfig {
3530 name: "Rust".into(),
3531 path_suffixes: vec!["rs".to_string()],
3532 ..Default::default()
3533 },
3534 Some(tree_sitter_rust::language()),
3535 );
3536 let mut fake_servers = language
3537 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3538 capabilities: lsp::ServerCapabilities {
3539 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3540 prepare_provider: Some(true),
3541 work_done_progress_options: Default::default(),
3542 })),
3543 ..Default::default()
3544 },
3545 ..Default::default()
3546 }))
3547 .await;
3548
3549 let fs = FakeFs::new(cx.executor());
3550 fs.insert_tree(
3551 "/dir",
3552 json!({
3553 "one.rs": "const ONE: usize = 1;",
3554 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3555 }),
3556 )
3557 .await;
3558
3559 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3560 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3561 let buffer = project
3562 .update(cx, |project, cx| {
3563 project.open_local_buffer("/dir/one.rs", cx)
3564 })
3565 .await
3566 .unwrap();
3567
3568 let fake_server = fake_servers.next().await.unwrap();
3569
3570 let response = project.update(cx, |project, cx| {
3571 project.prepare_rename(buffer.clone(), 7, cx)
3572 });
3573 fake_server
3574 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3575 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3576 assert_eq!(params.position, lsp::Position::new(0, 7));
3577 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3578 lsp::Position::new(0, 6),
3579 lsp::Position::new(0, 9),
3580 ))))
3581 })
3582 .next()
3583 .await
3584 .unwrap();
3585 let range = response.await.unwrap().unwrap();
3586 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3587 assert_eq!(range, 6..9);
3588
3589 let response = project.update(cx, |project, cx| {
3590 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3591 });
3592 fake_server
3593 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3594 assert_eq!(
3595 params.text_document_position.text_document.uri.as_str(),
3596 "file:///dir/one.rs"
3597 );
3598 assert_eq!(
3599 params.text_document_position.position,
3600 lsp::Position::new(0, 7)
3601 );
3602 assert_eq!(params.new_name, "THREE");
3603 Ok(Some(lsp::WorkspaceEdit {
3604 changes: Some(
3605 [
3606 (
3607 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3608 vec![lsp::TextEdit::new(
3609 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3610 "THREE".to_string(),
3611 )],
3612 ),
3613 (
3614 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3615 vec![
3616 lsp::TextEdit::new(
3617 lsp::Range::new(
3618 lsp::Position::new(0, 24),
3619 lsp::Position::new(0, 27),
3620 ),
3621 "THREE".to_string(),
3622 ),
3623 lsp::TextEdit::new(
3624 lsp::Range::new(
3625 lsp::Position::new(0, 35),
3626 lsp::Position::new(0, 38),
3627 ),
3628 "THREE".to_string(),
3629 ),
3630 ],
3631 ),
3632 ]
3633 .into_iter()
3634 .collect(),
3635 ),
3636 ..Default::default()
3637 }))
3638 })
3639 .next()
3640 .await
3641 .unwrap();
3642 let mut transaction = response.await.unwrap().0;
3643 assert_eq!(transaction.len(), 2);
3644 assert_eq!(
3645 transaction
3646 .remove_entry(&buffer)
3647 .unwrap()
3648 .0
3649 .update(cx, |buffer, _| buffer.text()),
3650 "const THREE: usize = 1;"
3651 );
3652 assert_eq!(
3653 transaction
3654 .into_keys()
3655 .next()
3656 .unwrap()
3657 .update(cx, |buffer, _| buffer.text()),
3658 "const TWO: usize = one::THREE + one::THREE;"
3659 );
3660}
3661
3662#[gpui::test]
3663async fn test_search(cx: &mut gpui::TestAppContext) {
3664 init_test(cx);
3665
3666 let fs = FakeFs::new(cx.executor());
3667 fs.insert_tree(
3668 "/dir",
3669 json!({
3670 "one.rs": "const ONE: usize = 1;",
3671 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3672 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3673 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3674 }),
3675 )
3676 .await;
3677 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3678 assert_eq!(
3679 search(
3680 &project,
3681 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3682 cx
3683 )
3684 .await
3685 .unwrap(),
3686 HashMap::from_iter([
3687 ("two.rs".to_string(), vec![6..9]),
3688 ("three.rs".to_string(), vec![37..40])
3689 ])
3690 );
3691
3692 let buffer_4 = project
3693 .update(cx, |project, cx| {
3694 project.open_local_buffer("/dir/four.rs", cx)
3695 })
3696 .await
3697 .unwrap();
3698 buffer_4.update(cx, |buffer, cx| {
3699 let text = "two::TWO";
3700 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3701 });
3702
3703 assert_eq!(
3704 search(
3705 &project,
3706 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3707 cx
3708 )
3709 .await
3710 .unwrap(),
3711 HashMap::from_iter([
3712 ("two.rs".to_string(), vec![6..9]),
3713 ("three.rs".to_string(), vec![37..40]),
3714 ("four.rs".to_string(), vec![25..28, 36..39])
3715 ])
3716 );
3717}
3718
3719#[gpui::test]
3720async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3721 init_test(cx);
3722
3723 let search_query = "file";
3724
3725 let fs = FakeFs::new(cx.executor());
3726 fs.insert_tree(
3727 "/dir",
3728 json!({
3729 "one.rs": r#"// Rust file one"#,
3730 "one.ts": r#"// TypeScript file one"#,
3731 "two.rs": r#"// Rust file two"#,
3732 "two.ts": r#"// TypeScript file two"#,
3733 }),
3734 )
3735 .await;
3736 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3737
3738 assert!(
3739 search(
3740 &project,
3741 SearchQuery::text(
3742 search_query,
3743 false,
3744 true,
3745 vec![PathMatcher::new("*.odd").unwrap()],
3746 Vec::new()
3747 )
3748 .unwrap(),
3749 cx
3750 )
3751 .await
3752 .unwrap()
3753 .is_empty(),
3754 "If no inclusions match, no files should be returned"
3755 );
3756
3757 assert_eq!(
3758 search(
3759 &project,
3760 SearchQuery::text(
3761 search_query,
3762 false,
3763 true,
3764 vec![PathMatcher::new("*.rs").unwrap()],
3765 Vec::new()
3766 )
3767 .unwrap(),
3768 cx
3769 )
3770 .await
3771 .unwrap(),
3772 HashMap::from_iter([
3773 ("one.rs".to_string(), vec![8..12]),
3774 ("two.rs".to_string(), vec![8..12]),
3775 ]),
3776 "Rust only search should give only Rust files"
3777 );
3778
3779 assert_eq!(
3780 search(
3781 &project,
3782 SearchQuery::text(
3783 search_query,
3784 false,
3785 true,
3786 vec![
3787 PathMatcher::new("*.ts").unwrap(),
3788 PathMatcher::new("*.odd").unwrap(),
3789 ],
3790 Vec::new()
3791 ).unwrap(),
3792 cx
3793 )
3794 .await
3795 .unwrap(),
3796 HashMap::from_iter([
3797 ("one.ts".to_string(), vec![14..18]),
3798 ("two.ts".to_string(), vec![14..18]),
3799 ]),
3800 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3801 );
3802
3803 assert_eq!(
3804 search(
3805 &project,
3806 SearchQuery::text(
3807 search_query,
3808 false,
3809 true,
3810 vec![
3811 PathMatcher::new("*.rs").unwrap(),
3812 PathMatcher::new("*.ts").unwrap(),
3813 PathMatcher::new("*.odd").unwrap(),
3814 ],
3815 Vec::new()
3816 ).unwrap(),
3817 cx
3818 )
3819 .await
3820 .unwrap(),
3821 HashMap::from_iter([
3822 ("one.rs".to_string(), vec![8..12]),
3823 ("one.ts".to_string(), vec![14..18]),
3824 ("two.rs".to_string(), vec![8..12]),
3825 ("two.ts".to_string(), vec![14..18]),
3826 ]),
3827 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3828 );
3829}
3830
3831#[gpui::test]
3832async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3833 init_test(cx);
3834
3835 let search_query = "file";
3836
3837 let fs = FakeFs::new(cx.executor());
3838 fs.insert_tree(
3839 "/dir",
3840 json!({
3841 "one.rs": r#"// Rust file one"#,
3842 "one.ts": r#"// TypeScript file one"#,
3843 "two.rs": r#"// Rust file two"#,
3844 "two.ts": r#"// TypeScript file two"#,
3845 }),
3846 )
3847 .await;
3848 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3849
3850 assert_eq!(
3851 search(
3852 &project,
3853 SearchQuery::text(
3854 search_query,
3855 false,
3856 true,
3857 Vec::new(),
3858 vec![PathMatcher::new("*.odd").unwrap()],
3859 )
3860 .unwrap(),
3861 cx
3862 )
3863 .await
3864 .unwrap(),
3865 HashMap::from_iter([
3866 ("one.rs".to_string(), vec![8..12]),
3867 ("one.ts".to_string(), vec![14..18]),
3868 ("two.rs".to_string(), vec![8..12]),
3869 ("two.ts".to_string(), vec![14..18]),
3870 ]),
3871 "If no exclusions match, all files should be returned"
3872 );
3873
3874 assert_eq!(
3875 search(
3876 &project,
3877 SearchQuery::text(
3878 search_query,
3879 false,
3880 true,
3881 Vec::new(),
3882 vec![PathMatcher::new("*.rs").unwrap()],
3883 )
3884 .unwrap(),
3885 cx
3886 )
3887 .await
3888 .unwrap(),
3889 HashMap::from_iter([
3890 ("one.ts".to_string(), vec![14..18]),
3891 ("two.ts".to_string(), vec![14..18]),
3892 ]),
3893 "Rust exclusion search should give only TypeScript files"
3894 );
3895
3896 assert_eq!(
3897 search(
3898 &project,
3899 SearchQuery::text(
3900 search_query,
3901 false,
3902 true,
3903 Vec::new(),
3904 vec![
3905 PathMatcher::new("*.ts").unwrap(),
3906 PathMatcher::new("*.odd").unwrap(),
3907 ],
3908 ).unwrap(),
3909 cx
3910 )
3911 .await
3912 .unwrap(),
3913 HashMap::from_iter([
3914 ("one.rs".to_string(), vec![8..12]),
3915 ("two.rs".to_string(), vec![8..12]),
3916 ]),
3917 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3918 );
3919
3920 assert!(
3921 search(
3922 &project,
3923 SearchQuery::text(
3924 search_query,
3925 false,
3926 true,
3927 Vec::new(),
3928 vec![
3929 PathMatcher::new("*.rs").unwrap(),
3930 PathMatcher::new("*.ts").unwrap(),
3931 PathMatcher::new("*.odd").unwrap(),
3932 ],
3933 ).unwrap(),
3934 cx
3935 )
3936 .await
3937 .unwrap().is_empty(),
3938 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3939 );
3940}
3941
3942#[gpui::test]
3943async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3944 init_test(cx);
3945
3946 let search_query = "file";
3947
3948 let fs = FakeFs::new(cx.executor());
3949 fs.insert_tree(
3950 "/dir",
3951 json!({
3952 "one.rs": r#"// Rust file one"#,
3953 "one.ts": r#"// TypeScript file one"#,
3954 "two.rs": r#"// Rust file two"#,
3955 "two.ts": r#"// TypeScript file two"#,
3956 }),
3957 )
3958 .await;
3959 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3960
3961 assert!(
3962 search(
3963 &project,
3964 SearchQuery::text(
3965 search_query,
3966 false,
3967 true,
3968 vec![PathMatcher::new("*.odd").unwrap()],
3969 vec![PathMatcher::new("*.odd").unwrap()],
3970 )
3971 .unwrap(),
3972 cx
3973 )
3974 .await
3975 .unwrap()
3976 .is_empty(),
3977 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3978 );
3979
3980 assert!(
3981 search(
3982 &project,
3983 SearchQuery::text(
3984 search_query,
3985 false,
3986 true,
3987 vec![PathMatcher::new("*.ts").unwrap()],
3988 vec![PathMatcher::new("*.ts").unwrap()],
3989 ).unwrap(),
3990 cx
3991 )
3992 .await
3993 .unwrap()
3994 .is_empty(),
3995 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3996 );
3997
3998 assert!(
3999 search(
4000 &project,
4001 SearchQuery::text(
4002 search_query,
4003 false,
4004 true,
4005 vec![
4006 PathMatcher::new("*.ts").unwrap(),
4007 PathMatcher::new("*.odd").unwrap()
4008 ],
4009 vec![
4010 PathMatcher::new("*.ts").unwrap(),
4011 PathMatcher::new("*.odd").unwrap()
4012 ],
4013 )
4014 .unwrap(),
4015 cx
4016 )
4017 .await
4018 .unwrap()
4019 .is_empty(),
4020 "Non-matching inclusions and exclusions should not change that."
4021 );
4022
4023 assert_eq!(
4024 search(
4025 &project,
4026 SearchQuery::text(
4027 search_query,
4028 false,
4029 true,
4030 vec![
4031 PathMatcher::new("*.ts").unwrap(),
4032 PathMatcher::new("*.odd").unwrap()
4033 ],
4034 vec![
4035 PathMatcher::new("*.rs").unwrap(),
4036 PathMatcher::new("*.odd").unwrap()
4037 ],
4038 )
4039 .unwrap(),
4040 cx
4041 )
4042 .await
4043 .unwrap(),
4044 HashMap::from_iter([
4045 ("one.ts".to_string(), vec![14..18]),
4046 ("two.ts".to_string(), vec![14..18]),
4047 ]),
4048 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4049 );
4050}
4051
4052#[test]
4053fn test_glob_literal_prefix() {
4054 assert_eq!(glob_literal_prefix("**/*.js"), "");
4055 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4056 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4057 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4058}
4059
4060async fn search(
4061 project: &Model<Project>,
4062 query: SearchQuery,
4063 cx: &mut gpui::TestAppContext,
4064) -> Result<HashMap<String, Vec<Range<usize>>>> {
4065 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4066 let mut result = HashMap::default();
4067 while let Some((buffer, range)) = search_rx.next().await {
4068 result.entry(buffer).or_insert(range);
4069 }
4070 Ok(result
4071 .into_iter()
4072 .map(|(buffer, ranges)| {
4073 buffer.update(cx, |buffer, _| {
4074 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4075 let ranges = ranges
4076 .into_iter()
4077 .map(|range| range.to_offset(buffer))
4078 .collect::<Vec<_>>();
4079 (path, ranges)
4080 })
4081 })
4082 .collect())
4083}
4084
4085fn init_test(cx: &mut gpui::TestAppContext) {
4086 if std::env::var("RUST_LOG").is_ok() {
4087 env_logger::init();
4088 }
4089
4090 cx.update(|cx| {
4091 let settings_store = SettingsStore::test(cx);
4092 cx.set_global(settings_store);
4093 language::init(cx);
4094 Project::init_settings(cx);
4095 });
4096}