1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[gpui::test]
49async fn test_symlinks(cx: &mut gpui::TestAppContext) {
50 init_test(cx);
51 cx.executor().allow_parking();
52
53 let dir = temp_tree(json!({
54 "root": {
55 "apple": "",
56 "banana": {
57 "carrot": {
58 "date": "",
59 "endive": "",
60 }
61 },
62 "fennel": {
63 "grape": "",
64 }
65 }
66 }));
67
68 let root_link_path = dir.path().join("root_link");
69 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
70 os::unix::fs::symlink(
71 &dir.path().join("root/fennel"),
72 &dir.path().join("root/finnochio"),
73 )
74 .unwrap();
75
76 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
77
78 project.update(cx, |project, cx| {
79 let tree = project.worktrees().next().unwrap().read(cx);
80 assert_eq!(tree.file_count(), 5);
81 assert_eq!(
82 tree.inode_for_path("fennel/grape"),
83 tree.inode_for_path("finnochio/grape")
84 );
85 });
86}
87
88#[gpui::test]
89async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91
92 let fs = FakeFs::new(cx.executor());
93 fs.insert_tree(
94 "/the-root",
95 json!({
96 ".zed": {
97 "settings.json": r#"{ "tab_size": 8 }"#
98 },
99 "a": {
100 "a.rs": "fn a() {\n A\n}"
101 },
102 "b": {
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 2 }"#
105 },
106 "b.rs": "fn b() {\n B\n}"
107 }
108 }),
109 )
110 .await;
111
112 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
113 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
114
115 cx.executor().run_until_parked();
116 cx.update(|cx| {
117 let tree = worktree.read(cx);
118
119 let settings_a = language_settings(
120 None,
121 Some(
122 &(File::for_entry(
123 tree.entry_for_path("a/a.rs").unwrap().clone(),
124 worktree.clone(),
125 ) as _),
126 ),
127 cx,
128 );
129 let settings_b = language_settings(
130 None,
131 Some(
132 &(File::for_entry(
133 tree.entry_for_path("b/b.rs").unwrap().clone(),
134 worktree.clone(),
135 ) as _),
136 ),
137 cx,
138 );
139
140 assert_eq!(settings_a.tab_size.get(), 8);
141 assert_eq!(settings_b.tab_size.get(), 2);
142 });
143}
144
145#[gpui::test]
146async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let mut rust_language = Language::new(
150 LanguageConfig {
151 name: "Rust".into(),
152 path_suffixes: vec!["rs".to_string()],
153 ..Default::default()
154 },
155 Some(tree_sitter_rust::language()),
156 );
157 let mut json_language = Language::new(
158 LanguageConfig {
159 name: "JSON".into(),
160 path_suffixes: vec!["json".to_string()],
161 ..Default::default()
162 },
163 None,
164 );
165 let mut fake_rust_servers = rust_language
166 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
167 name: "the-rust-language-server",
168 capabilities: lsp::ServerCapabilities {
169 completion_provider: Some(lsp::CompletionOptions {
170 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
171 ..Default::default()
172 }),
173 ..Default::default()
174 },
175 ..Default::default()
176 }))
177 .await;
178 let mut fake_json_servers = json_language
179 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
180 name: "the-json-language-server",
181 capabilities: lsp::ServerCapabilities {
182 completion_provider: Some(lsp::CompletionOptions {
183 trigger_characters: Some(vec![":".to_string()]),
184 ..Default::default()
185 }),
186 ..Default::default()
187 },
188 ..Default::default()
189 }))
190 .await;
191
192 let fs = FakeFs::new(cx.executor());
193 fs.insert_tree(
194 "/the-root",
195 json!({
196 "test.rs": "const A: i32 = 1;",
197 "test2.rs": "",
198 "Cargo.toml": "a = 1",
199 "package.json": "{\"a\": 1}",
200 }),
201 )
202 .await;
203
204 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
205
206 // Open a buffer without an associated language server.
207 let toml_buffer = project
208 .update(cx, |project, cx| {
209 project.open_local_buffer("/the-root/Cargo.toml", cx)
210 })
211 .await
212 .unwrap();
213
214 // Open a buffer with an associated language server before the language for it has been loaded.
215 let rust_buffer = project
216 .update(cx, |project, cx| {
217 project.open_local_buffer("/the-root/test.rs", cx)
218 })
219 .await
220 .unwrap();
221 rust_buffer.update(cx, |buffer, _| {
222 assert_eq!(buffer.language().map(|l| l.name()), None);
223 });
224
225 // Now we add the languages to the project, and ensure they get assigned to all
226 // the relevant open buffers.
227 project.update(cx, |project, _| {
228 project.languages.add(Arc::new(json_language));
229 project.languages.add(Arc::new(rust_language));
230 });
231 cx.executor().run_until_parked();
232 rust_buffer.update(cx, |buffer, _| {
233 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
234 });
235
236 // A server is started up, and it is notified about Rust files.
237 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
238 assert_eq!(
239 fake_rust_server
240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
241 .await
242 .text_document,
243 lsp::TextDocumentItem {
244 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
245 version: 0,
246 text: "const A: i32 = 1;".to_string(),
247 language_id: Default::default()
248 }
249 );
250
251 // The buffer is configured based on the language server's capabilities.
252 rust_buffer.update(cx, |buffer, _| {
253 assert_eq!(
254 buffer.completion_triggers(),
255 &[".".to_string(), "::".to_string()]
256 );
257 });
258 toml_buffer.update(cx, |buffer, _| {
259 assert!(buffer.completion_triggers().is_empty());
260 });
261
262 // Edit a buffer. The changes are reported to the language server.
263 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidChangeTextDocument>()
267 .await
268 .text_document,
269 lsp::VersionedTextDocumentIdentifier::new(
270 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
271 1
272 )
273 );
274
275 // Open a third buffer with a different associated language server.
276 let json_buffer = project
277 .update(cx, |project, cx| {
278 project.open_local_buffer("/the-root/package.json", cx)
279 })
280 .await
281 .unwrap();
282
283 // A json language server is started up and is only notified about the json buffer.
284 let mut fake_json_server = fake_json_servers.next().await.unwrap();
285 assert_eq!(
286 fake_json_server
287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
288 .await
289 .text_document,
290 lsp::TextDocumentItem {
291 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
292 version: 0,
293 text: "{\"a\": 1}".to_string(),
294 language_id: Default::default()
295 }
296 );
297
298 // This buffer is configured based on the second language server's
299 // capabilities.
300 json_buffer.update(cx, |buffer, _| {
301 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
302 });
303
304 // When opening another buffer whose language server is already running,
305 // it is also configured based on the existing language server's capabilities.
306 let rust_buffer2 = project
307 .update(cx, |project, cx| {
308 project.open_local_buffer("/the-root/test2.rs", cx)
309 })
310 .await
311 .unwrap();
312 rust_buffer2.update(cx, |buffer, _| {
313 assert_eq!(
314 buffer.completion_triggers(),
315 &[".".to_string(), "::".to_string()]
316 );
317 });
318
319 // Changes are reported only to servers matching the buffer's language.
320 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
321 rust_buffer2.update(cx, |buffer, cx| {
322 buffer.edit([(0..0, "let x = 1;")], None, cx)
323 });
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidChangeTextDocument>()
327 .await
328 .text_document,
329 lsp::VersionedTextDocumentIdentifier::new(
330 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
331 1
332 )
333 );
334
335 // Save notifications are reported to all servers.
336 project
337 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
338 .await
339 .unwrap();
340 assert_eq!(
341 fake_rust_server
342 .receive_notification::<lsp::notification::DidSaveTextDocument>()
343 .await
344 .text_document,
345 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
346 );
347 assert_eq!(
348 fake_json_server
349 .receive_notification::<lsp::notification::DidSaveTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
353 );
354
355 // Renames are reported only to servers matching the buffer's language.
356 fs.rename(
357 Path::new("/the-root/test2.rs"),
358 Path::new("/the-root/test3.rs"),
359 Default::default(),
360 )
361 .await
362 .unwrap();
363 assert_eq!(
364 fake_rust_server
365 .receive_notification::<lsp::notification::DidCloseTextDocument>()
366 .await
367 .text_document,
368 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
369 );
370 assert_eq!(
371 fake_rust_server
372 .receive_notification::<lsp::notification::DidOpenTextDocument>()
373 .await
374 .text_document,
375 lsp::TextDocumentItem {
376 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
377 version: 0,
378 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
379 language_id: Default::default()
380 },
381 );
382
383 rust_buffer2.update(cx, |buffer, cx| {
384 buffer.update_diagnostics(
385 LanguageServerId(0),
386 DiagnosticSet::from_sorted_entries(
387 vec![DiagnosticEntry {
388 diagnostic: Default::default(),
389 range: Anchor::MIN..Anchor::MAX,
390 }],
391 &buffer.snapshot(),
392 ),
393 cx,
394 );
395 assert_eq!(
396 buffer
397 .snapshot()
398 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
399 .count(),
400 1
401 );
402 });
403
404 // When the rename changes the extension of the file, the buffer gets closed on the old
405 // language server and gets opened on the new one.
406 fs.rename(
407 Path::new("/the-root/test3.rs"),
408 Path::new("/the-root/test3.json"),
409 Default::default(),
410 )
411 .await
412 .unwrap();
413 assert_eq!(
414 fake_rust_server
415 .receive_notification::<lsp::notification::DidCloseTextDocument>()
416 .await
417 .text_document,
418 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
419 );
420 assert_eq!(
421 fake_json_server
422 .receive_notification::<lsp::notification::DidOpenTextDocument>()
423 .await
424 .text_document,
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
427 version: 0,
428 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 );
432
433 // We clear the diagnostics, since the language has changed.
434 rust_buffer2.update(cx, |buffer, _| {
435 assert_eq!(
436 buffer
437 .snapshot()
438 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
439 .count(),
440 0
441 );
442 });
443
444 // The renamed file's version resets after changing language server.
445 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
446 assert_eq!(
447 fake_json_server
448 .receive_notification::<lsp::notification::DidChangeTextDocument>()
449 .await
450 .text_document,
451 lsp::VersionedTextDocumentIdentifier::new(
452 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
453 1
454 )
455 );
456
457 // Restart language servers
458 project.update(cx, |project, cx| {
459 project.restart_language_servers_for_buffers(
460 vec![rust_buffer.clone(), json_buffer.clone()],
461 cx,
462 );
463 });
464
465 let mut rust_shutdown_requests = fake_rust_server
466 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
467 let mut json_shutdown_requests = fake_json_server
468 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
469 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
470
471 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
472 let mut fake_json_server = fake_json_servers.next().await.unwrap();
473
474 // Ensure rust document is reopened in new rust language server
475 assert_eq!(
476 fake_rust_server
477 .receive_notification::<lsp::notification::DidOpenTextDocument>()
478 .await
479 .text_document,
480 lsp::TextDocumentItem {
481 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
482 version: 0,
483 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
484 language_id: Default::default()
485 }
486 );
487
488 // Ensure json documents are reopened in new json language server
489 assert_set_eq!(
490 [
491 fake_json_server
492 .receive_notification::<lsp::notification::DidOpenTextDocument>()
493 .await
494 .text_document,
495 fake_json_server
496 .receive_notification::<lsp::notification::DidOpenTextDocument>()
497 .await
498 .text_document,
499 ],
500 [
501 lsp::TextDocumentItem {
502 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
503 version: 0,
504 text: json_buffer.update(cx, |buffer, _| buffer.text()),
505 language_id: Default::default()
506 },
507 lsp::TextDocumentItem {
508 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
509 version: 0,
510 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
511 language_id: Default::default()
512 }
513 ]
514 );
515
516 // Close notifications are reported only to servers matching the buffer's language.
517 cx.update(|_| drop(json_buffer));
518 let close_message = lsp::DidCloseTextDocumentParams {
519 text_document: lsp::TextDocumentIdentifier::new(
520 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
521 ),
522 };
523 assert_eq!(
524 fake_json_server
525 .receive_notification::<lsp::notification::DidCloseTextDocument>()
526 .await,
527 close_message,
528 );
529}
530
531#[gpui::test]
532async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
533 init_test(cx);
534
535 let mut language = Language::new(
536 LanguageConfig {
537 name: "Rust".into(),
538 path_suffixes: vec!["rs".to_string()],
539 ..Default::default()
540 },
541 Some(tree_sitter_rust::language()),
542 );
543 let mut fake_servers = language
544 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
545 name: "the-language-server",
546 ..Default::default()
547 }))
548 .await;
549
550 let fs = FakeFs::new(cx.executor());
551 fs.insert_tree(
552 "/the-root",
553 json!({
554 ".gitignore": "target\n",
555 "src": {
556 "a.rs": "",
557 "b.rs": "",
558 },
559 "target": {
560 "x": {
561 "out": {
562 "x.rs": ""
563 }
564 },
565 "y": {
566 "out": {
567 "y.rs": "",
568 }
569 },
570 "z": {
571 "out": {
572 "z.rs": ""
573 }
574 }
575 }
576 }),
577 )
578 .await;
579
580 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
581 project.update(cx, |project, _| {
582 project.languages.add(Arc::new(language));
583 });
584 cx.executor().run_until_parked();
585
586 // Start the language server by opening a buffer with a compatible file extension.
587 let _buffer = project
588 .update(cx, |project, cx| {
589 project.open_local_buffer("/the-root/src/a.rs", cx)
590 })
591 .await
592 .unwrap();
593
594 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
595 project.update(cx, |project, cx| {
596 let worktree = project.worktrees().next().unwrap();
597 assert_eq!(
598 worktree
599 .read(cx)
600 .snapshot()
601 .entries(true)
602 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
603 .collect::<Vec<_>>(),
604 &[
605 (Path::new(""), false),
606 (Path::new(".gitignore"), false),
607 (Path::new("src"), false),
608 (Path::new("src/a.rs"), false),
609 (Path::new("src/b.rs"), false),
610 (Path::new("target"), true),
611 ]
612 );
613 });
614
615 let prev_read_dir_count = fs.read_dir_call_count();
616
617 // Keep track of the FS events reported to the language server.
618 let fake_server = fake_servers.next().await.unwrap();
619 let file_changes = Arc::new(Mutex::new(Vec::new()));
620 fake_server
621 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
622 registrations: vec![lsp::Registration {
623 id: Default::default(),
624 method: "workspace/didChangeWatchedFiles".to_string(),
625 register_options: serde_json::to_value(
626 lsp::DidChangeWatchedFilesRegistrationOptions {
627 watchers: vec![
628 lsp::FileSystemWatcher {
629 glob_pattern: lsp::GlobPattern::String(
630 "/the-root/Cargo.toml".to_string(),
631 ),
632 kind: None,
633 },
634 lsp::FileSystemWatcher {
635 glob_pattern: lsp::GlobPattern::String(
636 "/the-root/src/*.{rs,c}".to_string(),
637 ),
638 kind: None,
639 },
640 lsp::FileSystemWatcher {
641 glob_pattern: lsp::GlobPattern::String(
642 "/the-root/target/y/**/*.rs".to_string(),
643 ),
644 kind: None,
645 },
646 ],
647 },
648 )
649 .ok(),
650 }],
651 })
652 .await
653 .unwrap();
654 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
655 let file_changes = file_changes.clone();
656 move |params, _| {
657 let mut file_changes = file_changes.lock();
658 file_changes.extend(params.changes);
659 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
660 }
661 });
662
663 cx.executor().run_until_parked();
664 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
665 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
666
667 // Now the language server has asked us to watch an ignored directory path,
668 // so we recursively load it.
669 project.update(cx, |project, cx| {
670 let worktree = project.worktrees().next().unwrap();
671 assert_eq!(
672 worktree
673 .read(cx)
674 .snapshot()
675 .entries(true)
676 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
677 .collect::<Vec<_>>(),
678 &[
679 (Path::new(""), false),
680 (Path::new(".gitignore"), false),
681 (Path::new("src"), false),
682 (Path::new("src/a.rs"), false),
683 (Path::new("src/b.rs"), false),
684 (Path::new("target"), true),
685 (Path::new("target/x"), true),
686 (Path::new("target/y"), true),
687 (Path::new("target/y/out"), true),
688 (Path::new("target/y/out/y.rs"), true),
689 (Path::new("target/z"), true),
690 ]
691 );
692 });
693
694 // Perform some file system mutations, two of which match the watched patterns,
695 // and one of which does not.
696 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
697 .await
698 .unwrap();
699 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
700 .await
701 .unwrap();
702 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
703 .await
704 .unwrap();
705 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
706 .await
707 .unwrap();
708 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
709 .await
710 .unwrap();
711
712 // The language server receives events for the FS mutations that match its watch patterns.
713 cx.executor().run_until_parked();
714 assert_eq!(
715 &*file_changes.lock(),
716 &[
717 lsp::FileEvent {
718 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
719 typ: lsp::FileChangeType::DELETED,
720 },
721 lsp::FileEvent {
722 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
723 typ: lsp::FileChangeType::CREATED,
724 },
725 lsp::FileEvent {
726 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
727 typ: lsp::FileChangeType::CREATED,
728 },
729 ]
730 );
731}
732
733#[gpui::test]
734async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
735 init_test(cx);
736
737 let fs = FakeFs::new(cx.executor());
738 fs.insert_tree(
739 "/dir",
740 json!({
741 "a.rs": "let a = 1;",
742 "b.rs": "let b = 2;"
743 }),
744 )
745 .await;
746
747 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
748
749 let buffer_a = project
750 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
751 .await
752 .unwrap();
753 let buffer_b = project
754 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
755 .await
756 .unwrap();
757
758 project.update(cx, |project, cx| {
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/a.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::ERROR),
768 message: "error 1".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 project
777 .update_diagnostics(
778 LanguageServerId(0),
779 lsp::PublishDiagnosticsParams {
780 uri: Url::from_file_path("/dir/b.rs").unwrap(),
781 version: None,
782 diagnostics: vec![lsp::Diagnostic {
783 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
784 severity: Some(lsp::DiagnosticSeverity::WARNING),
785 message: "error 2".to_string(),
786 ..Default::default()
787 }],
788 },
789 &[],
790 cx,
791 )
792 .unwrap();
793 });
794
795 buffer_a.update(cx, |buffer, _| {
796 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
797 assert_eq!(
798 chunks
799 .iter()
800 .map(|(s, d)| (s.as_str(), *d))
801 .collect::<Vec<_>>(),
802 &[
803 ("let ", None),
804 ("a", Some(DiagnosticSeverity::ERROR)),
805 (" = 1;", None),
806 ]
807 );
808 });
809 buffer_b.update(cx, |buffer, _| {
810 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
811 assert_eq!(
812 chunks
813 .iter()
814 .map(|(s, d)| (s.as_str(), *d))
815 .collect::<Vec<_>>(),
816 &[
817 ("let ", None),
818 ("b", Some(DiagnosticSeverity::WARNING)),
819 (" = 2;", None),
820 ]
821 );
822 });
823}
824
825#[gpui::test]
826async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
827 init_test(cx);
828
829 let fs = FakeFs::new(cx.executor());
830 fs.insert_tree(
831 "/root",
832 json!({
833 "dir": {
834 "a.rs": "let a = 1;",
835 },
836 "other.rs": "let b = c;"
837 }),
838 )
839 .await;
840
841 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
842
843 let (worktree, _) = project
844 .update(cx, |project, cx| {
845 project.find_or_create_local_worktree("/root/other.rs", false, cx)
846 })
847 .await
848 .unwrap();
849 let worktree_id = worktree.update(cx, |tree, _| tree.id());
850
851 project.update(cx, |project, cx| {
852 project
853 .update_diagnostics(
854 LanguageServerId(0),
855 lsp::PublishDiagnosticsParams {
856 uri: Url::from_file_path("/root/other.rs").unwrap(),
857 version: None,
858 diagnostics: vec![lsp::Diagnostic {
859 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
860 severity: Some(lsp::DiagnosticSeverity::ERROR),
861 message: "unknown variable 'c'".to_string(),
862 ..Default::default()
863 }],
864 },
865 &[],
866 cx,
867 )
868 .unwrap();
869 });
870
871 let buffer = project
872 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
873 .await
874 .unwrap();
875 buffer.update(cx, |buffer, _| {
876 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
877 assert_eq!(
878 chunks
879 .iter()
880 .map(|(s, d)| (s.as_str(), *d))
881 .collect::<Vec<_>>(),
882 &[
883 ("let b = ", None),
884 ("c", Some(DiagnosticSeverity::ERROR)),
885 (";", None),
886 ]
887 );
888 });
889
890 project.update(cx, |project, cx| {
891 assert_eq!(project.diagnostic_summaries(cx).next(), None);
892 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
893 });
894}
895
896#[gpui::test]
897async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
898 init_test(cx);
899
900 let progress_token = "the-progress-token";
901 let mut language = Language::new(
902 LanguageConfig {
903 name: "Rust".into(),
904 path_suffixes: vec!["rs".to_string()],
905 ..Default::default()
906 },
907 Some(tree_sitter_rust::language()),
908 );
909 let mut fake_servers = language
910 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
911 disk_based_diagnostics_progress_token: Some(progress_token.into()),
912 disk_based_diagnostics_sources: vec!["disk".into()],
913 ..Default::default()
914 }))
915 .await;
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 "/dir",
920 json!({
921 "a.rs": "fn a() { A }",
922 "b.rs": "const y: i32 = 1",
923 }),
924 )
925 .await;
926
927 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
928 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
929 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
930
931 // Cause worktree to start the fake language server
932 let _buffer = project
933 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
934 .await
935 .unwrap();
936
937 let mut events = cx.events(&project);
938
939 let fake_server = fake_servers.next().await.unwrap();
940 assert_eq!(
941 events.next().await.unwrap(),
942 Event::LanguageServerAdded(LanguageServerId(0)),
943 );
944
945 fake_server
946 .start_progress(format!("{}/0", progress_token))
947 .await;
948 assert_eq!(
949 events.next().await.unwrap(),
950 Event::DiskBasedDiagnosticsStarted {
951 language_server_id: LanguageServerId(0),
952 }
953 );
954
955 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
956 uri: Url::from_file_path("/dir/a.rs").unwrap(),
957 version: None,
958 diagnostics: vec![lsp::Diagnostic {
959 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
960 severity: Some(lsp::DiagnosticSeverity::ERROR),
961 message: "undefined variable 'A'".to_string(),
962 ..Default::default()
963 }],
964 });
965 assert_eq!(
966 events.next().await.unwrap(),
967 Event::DiagnosticsUpdated {
968 language_server_id: LanguageServerId(0),
969 path: (worktree_id, Path::new("a.rs")).into()
970 }
971 );
972
973 fake_server.end_progress(format!("{}/0", progress_token));
974 assert_eq!(
975 events.next().await.unwrap(),
976 Event::DiskBasedDiagnosticsFinished {
977 language_server_id: LanguageServerId(0)
978 }
979 );
980
981 let buffer = project
982 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
983 .await
984 .unwrap();
985
986 buffer.update(cx, |buffer, _| {
987 let snapshot = buffer.snapshot();
988 let diagnostics = snapshot
989 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
990 .collect::<Vec<_>>();
991 assert_eq!(
992 diagnostics,
993 &[DiagnosticEntry {
994 range: Point::new(0, 9)..Point::new(0, 10),
995 diagnostic: Diagnostic {
996 severity: lsp::DiagnosticSeverity::ERROR,
997 message: "undefined variable 'A'".to_string(),
998 group_id: 0,
999 is_primary: true,
1000 ..Default::default()
1001 }
1002 }]
1003 )
1004 });
1005
1006 // Ensure publishing empty diagnostics twice only results in one update event.
1007 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1008 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1009 version: None,
1010 diagnostics: Default::default(),
1011 });
1012 assert_eq!(
1013 events.next().await.unwrap(),
1014 Event::DiagnosticsUpdated {
1015 language_server_id: LanguageServerId(0),
1016 path: (worktree_id, Path::new("a.rs")).into()
1017 }
1018 );
1019
1020 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1021 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1022 version: None,
1023 diagnostics: Default::default(),
1024 });
1025 cx.executor().run_until_parked();
1026 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1027}
1028
1029#[gpui::test]
1030async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1031 init_test(cx);
1032
1033 let progress_token = "the-progress-token";
1034 let mut language = Language::new(
1035 LanguageConfig {
1036 path_suffixes: vec!["rs".to_string()],
1037 ..Default::default()
1038 },
1039 None,
1040 );
1041 let mut fake_servers = language
1042 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1043 disk_based_diagnostics_sources: vec!["disk".into()],
1044 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1045 ..Default::default()
1046 }))
1047 .await;
1048
1049 let fs = FakeFs::new(cx.executor());
1050 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1051
1052 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1053 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1054
1055 let buffer = project
1056 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1057 .await
1058 .unwrap();
1059
1060 // Simulate diagnostics starting to update.
1061 let fake_server = fake_servers.next().await.unwrap();
1062 fake_server.start_progress(progress_token).await;
1063
1064 // Restart the server before the diagnostics finish updating.
1065 project.update(cx, |project, cx| {
1066 project.restart_language_servers_for_buffers([buffer], cx);
1067 });
1068 let mut events = cx.events(&project);
1069
1070 // Simulate the newly started server sending more diagnostics.
1071 let fake_server = fake_servers.next().await.unwrap();
1072 assert_eq!(
1073 events.next().await.unwrap(),
1074 Event::LanguageServerAdded(LanguageServerId(1))
1075 );
1076 fake_server.start_progress(progress_token).await;
1077 assert_eq!(
1078 events.next().await.unwrap(),
1079 Event::DiskBasedDiagnosticsStarted {
1080 language_server_id: LanguageServerId(1)
1081 }
1082 );
1083 project.update(cx, |project, _| {
1084 assert_eq!(
1085 project
1086 .language_servers_running_disk_based_diagnostics()
1087 .collect::<Vec<_>>(),
1088 [LanguageServerId(1)]
1089 );
1090 });
1091
1092 // All diagnostics are considered done, despite the old server's diagnostic
1093 // task never completing.
1094 fake_server.end_progress(progress_token);
1095 assert_eq!(
1096 events.next().await.unwrap(),
1097 Event::DiskBasedDiagnosticsFinished {
1098 language_server_id: LanguageServerId(1)
1099 }
1100 );
1101 project.update(cx, |project, _| {
1102 assert_eq!(
1103 project
1104 .language_servers_running_disk_based_diagnostics()
1105 .collect::<Vec<_>>(),
1106 [LanguageServerId(0); 0]
1107 );
1108 });
1109}
1110
1111#[gpui::test]
1112async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1113 init_test(cx);
1114
1115 let mut language = Language::new(
1116 LanguageConfig {
1117 path_suffixes: vec!["rs".to_string()],
1118 ..Default::default()
1119 },
1120 None,
1121 );
1122 let mut fake_servers = language
1123 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1124 ..Default::default()
1125 }))
1126 .await;
1127
1128 let fs = FakeFs::new(cx.executor());
1129 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1130
1131 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1132 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1133
1134 let buffer = project
1135 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1136 .await
1137 .unwrap();
1138
1139 // Publish diagnostics
1140 let fake_server = fake_servers.next().await.unwrap();
1141 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1142 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1143 version: None,
1144 diagnostics: vec![lsp::Diagnostic {
1145 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1146 severity: Some(lsp::DiagnosticSeverity::ERROR),
1147 message: "the message".to_string(),
1148 ..Default::default()
1149 }],
1150 });
1151
1152 cx.executor().run_until_parked();
1153 buffer.update(cx, |buffer, _| {
1154 assert_eq!(
1155 buffer
1156 .snapshot()
1157 .diagnostics_in_range::<_, usize>(0..1, false)
1158 .map(|entry| entry.diagnostic.message.clone())
1159 .collect::<Vec<_>>(),
1160 ["the message".to_string()]
1161 );
1162 });
1163 project.update(cx, |project, cx| {
1164 assert_eq!(
1165 project.diagnostic_summary(cx),
1166 DiagnosticSummary {
1167 error_count: 1,
1168 warning_count: 0,
1169 }
1170 );
1171 });
1172
1173 project.update(cx, |project, cx| {
1174 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1175 });
1176
1177 // The diagnostics are cleared.
1178 cx.executor().run_until_parked();
1179 buffer.update(cx, |buffer, _| {
1180 assert_eq!(
1181 buffer
1182 .snapshot()
1183 .diagnostics_in_range::<_, usize>(0..1, false)
1184 .map(|entry| entry.diagnostic.message.clone())
1185 .collect::<Vec<_>>(),
1186 Vec::<String>::new(),
1187 );
1188 });
1189 project.update(cx, |project, cx| {
1190 assert_eq!(
1191 project.diagnostic_summary(cx),
1192 DiagnosticSummary {
1193 error_count: 0,
1194 warning_count: 0,
1195 }
1196 );
1197 });
1198}
1199
1200#[gpui::test]
1201async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1202 init_test(cx);
1203
1204 let mut language = Language::new(
1205 LanguageConfig {
1206 path_suffixes: vec!["rs".to_string()],
1207 ..Default::default()
1208 },
1209 None,
1210 );
1211 let mut fake_servers = language
1212 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1213 name: "the-lsp",
1214 ..Default::default()
1215 }))
1216 .await;
1217
1218 let fs = FakeFs::new(cx.executor());
1219 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1220
1221 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1222 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1223
1224 let buffer = project
1225 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1226 .await
1227 .unwrap();
1228
1229 // Before restarting the server, report diagnostics with an unknown buffer version.
1230 let fake_server = fake_servers.next().await.unwrap();
1231 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1232 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1233 version: Some(10000),
1234 diagnostics: Vec::new(),
1235 });
1236 cx.executor().run_until_parked();
1237
1238 project.update(cx, |project, cx| {
1239 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1240 });
1241 let mut fake_server = fake_servers.next().await.unwrap();
1242 let notification = fake_server
1243 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1244 .await
1245 .text_document;
1246 assert_eq!(notification.version, 0);
1247}
1248
1249#[gpui::test]
1250async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1251 init_test(cx);
1252
1253 let mut rust = Language::new(
1254 LanguageConfig {
1255 name: Arc::from("Rust"),
1256 path_suffixes: vec!["rs".to_string()],
1257 ..Default::default()
1258 },
1259 None,
1260 );
1261 let mut fake_rust_servers = rust
1262 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1263 name: "rust-lsp",
1264 ..Default::default()
1265 }))
1266 .await;
1267 let mut js = Language::new(
1268 LanguageConfig {
1269 name: Arc::from("JavaScript"),
1270 path_suffixes: vec!["js".to_string()],
1271 ..Default::default()
1272 },
1273 None,
1274 );
1275 let mut fake_js_servers = js
1276 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1277 name: "js-lsp",
1278 ..Default::default()
1279 }))
1280 .await;
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1284 .await;
1285
1286 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1287 project.update(cx, |project, _| {
1288 project.languages.add(Arc::new(rust));
1289 project.languages.add(Arc::new(js));
1290 });
1291
1292 let _rs_buffer = project
1293 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1294 .await
1295 .unwrap();
1296 let _js_buffer = project
1297 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1298 .await
1299 .unwrap();
1300
1301 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1302 assert_eq!(
1303 fake_rust_server_1
1304 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1305 .await
1306 .text_document
1307 .uri
1308 .as_str(),
1309 "file:///dir/a.rs"
1310 );
1311
1312 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1313 assert_eq!(
1314 fake_js_server
1315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1316 .await
1317 .text_document
1318 .uri
1319 .as_str(),
1320 "file:///dir/b.js"
1321 );
1322
1323 // Disable Rust language server, ensuring only that server gets stopped.
1324 cx.update(|cx| {
1325 cx.update_global(|settings: &mut SettingsStore, cx| {
1326 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1327 settings.languages.insert(
1328 Arc::from("Rust"),
1329 LanguageSettingsContent {
1330 enable_language_server: Some(false),
1331 ..Default::default()
1332 },
1333 );
1334 });
1335 })
1336 });
1337 fake_rust_server_1
1338 .receive_notification::<lsp::notification::Exit>()
1339 .await;
1340
1341 // Enable Rust and disable JavaScript language servers, ensuring that the
1342 // former gets started again and that the latter stops.
1343 cx.update(|cx| {
1344 cx.update_global(|settings: &mut SettingsStore, cx| {
1345 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1346 settings.languages.insert(
1347 Arc::from("Rust"),
1348 LanguageSettingsContent {
1349 enable_language_server: Some(true),
1350 ..Default::default()
1351 },
1352 );
1353 settings.languages.insert(
1354 Arc::from("JavaScript"),
1355 LanguageSettingsContent {
1356 enable_language_server: Some(false),
1357 ..Default::default()
1358 },
1359 );
1360 });
1361 })
1362 });
1363 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1364 assert_eq!(
1365 fake_rust_server_2
1366 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1367 .await
1368 .text_document
1369 .uri
1370 .as_str(),
1371 "file:///dir/a.rs"
1372 );
1373 fake_js_server
1374 .receive_notification::<lsp::notification::Exit>()
1375 .await;
1376}
1377
1378#[gpui::test(iterations = 3)]
1379async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1380 init_test(cx);
1381
1382 let mut language = Language::new(
1383 LanguageConfig {
1384 name: "Rust".into(),
1385 path_suffixes: vec!["rs".to_string()],
1386 ..Default::default()
1387 },
1388 Some(tree_sitter_rust::language()),
1389 );
1390 let mut fake_servers = language
1391 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 ..Default::default()
1394 }))
1395 .await;
1396
1397 let text = "
1398 fn a() { A }
1399 fn b() { BB }
1400 fn c() { CCC }
1401 "
1402 .unindent();
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1406
1407 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1408 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1409
1410 let buffer = project
1411 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1412 .await
1413 .unwrap();
1414
1415 let mut fake_server = fake_servers.next().await.unwrap();
1416 let open_notification = fake_server
1417 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1418 .await;
1419
1420 // Edit the buffer, moving the content down
1421 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1422 let change_notification_1 = fake_server
1423 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1424 .await;
1425 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1426
1427 // Report some diagnostics for the initial version of the buffer
1428 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1429 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1430 version: Some(open_notification.text_document.version),
1431 diagnostics: vec![
1432 lsp::Diagnostic {
1433 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1434 severity: Some(DiagnosticSeverity::ERROR),
1435 message: "undefined variable 'A'".to_string(),
1436 source: Some("disk".to_string()),
1437 ..Default::default()
1438 },
1439 lsp::Diagnostic {
1440 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1441 severity: Some(DiagnosticSeverity::ERROR),
1442 message: "undefined variable 'BB'".to_string(),
1443 source: Some("disk".to_string()),
1444 ..Default::default()
1445 },
1446 lsp::Diagnostic {
1447 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1448 severity: Some(DiagnosticSeverity::ERROR),
1449 source: Some("disk".to_string()),
1450 message: "undefined variable 'CCC'".to_string(),
1451 ..Default::default()
1452 },
1453 ],
1454 });
1455
1456 // The diagnostics have moved down since they were created.
1457 cx.executor().run_until_parked();
1458 buffer.update(cx, |buffer, _| {
1459 assert_eq!(
1460 buffer
1461 .snapshot()
1462 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1463 .collect::<Vec<_>>(),
1464 &[
1465 DiagnosticEntry {
1466 range: Point::new(3, 9)..Point::new(3, 11),
1467 diagnostic: Diagnostic {
1468 source: Some("disk".into()),
1469 severity: DiagnosticSeverity::ERROR,
1470 message: "undefined variable 'BB'".to_string(),
1471 is_disk_based: true,
1472 group_id: 1,
1473 is_primary: true,
1474 ..Default::default()
1475 },
1476 },
1477 DiagnosticEntry {
1478 range: Point::new(4, 9)..Point::new(4, 12),
1479 diagnostic: Diagnostic {
1480 source: Some("disk".into()),
1481 severity: DiagnosticSeverity::ERROR,
1482 message: "undefined variable 'CCC'".to_string(),
1483 is_disk_based: true,
1484 group_id: 2,
1485 is_primary: true,
1486 ..Default::default()
1487 }
1488 }
1489 ]
1490 );
1491 assert_eq!(
1492 chunks_with_diagnostics(buffer, 0..buffer.len()),
1493 [
1494 ("\n\nfn a() { ".to_string(), None),
1495 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1496 (" }\nfn b() { ".to_string(), None),
1497 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1498 (" }\nfn c() { ".to_string(), None),
1499 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1500 (" }\n".to_string(), None),
1501 ]
1502 );
1503 assert_eq!(
1504 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1505 [
1506 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1507 (" }\nfn c() { ".to_string(), None),
1508 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1509 ]
1510 );
1511 });
1512
1513 // Ensure overlapping diagnostics are highlighted correctly.
1514 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1515 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1516 version: Some(open_notification.text_document.version),
1517 diagnostics: vec![
1518 lsp::Diagnostic {
1519 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1520 severity: Some(DiagnosticSeverity::ERROR),
1521 message: "undefined variable 'A'".to_string(),
1522 source: Some("disk".to_string()),
1523 ..Default::default()
1524 },
1525 lsp::Diagnostic {
1526 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1527 severity: Some(DiagnosticSeverity::WARNING),
1528 message: "unreachable statement".to_string(),
1529 source: Some("disk".to_string()),
1530 ..Default::default()
1531 },
1532 ],
1533 });
1534
1535 cx.executor().run_until_parked();
1536 buffer.update(cx, |buffer, _| {
1537 assert_eq!(
1538 buffer
1539 .snapshot()
1540 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1541 .collect::<Vec<_>>(),
1542 &[
1543 DiagnosticEntry {
1544 range: Point::new(2, 9)..Point::new(2, 12),
1545 diagnostic: Diagnostic {
1546 source: Some("disk".into()),
1547 severity: DiagnosticSeverity::WARNING,
1548 message: "unreachable statement".to_string(),
1549 is_disk_based: true,
1550 group_id: 4,
1551 is_primary: true,
1552 ..Default::default()
1553 }
1554 },
1555 DiagnosticEntry {
1556 range: Point::new(2, 9)..Point::new(2, 10),
1557 diagnostic: Diagnostic {
1558 source: Some("disk".into()),
1559 severity: DiagnosticSeverity::ERROR,
1560 message: "undefined variable 'A'".to_string(),
1561 is_disk_based: true,
1562 group_id: 3,
1563 is_primary: true,
1564 ..Default::default()
1565 },
1566 }
1567 ]
1568 );
1569 assert_eq!(
1570 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1571 [
1572 ("fn a() { ".to_string(), None),
1573 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1574 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1575 ("\n".to_string(), None),
1576 ]
1577 );
1578 assert_eq!(
1579 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1580 [
1581 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1582 ("\n".to_string(), None),
1583 ]
1584 );
1585 });
1586
1587 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1588 // changes since the last save.
1589 buffer.update(cx, |buffer, cx| {
1590 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1591 buffer.edit(
1592 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1593 None,
1594 cx,
1595 );
1596 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1597 });
1598 let change_notification_2 = fake_server
1599 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1600 .await;
1601 assert!(
1602 change_notification_2.text_document.version > change_notification_1.text_document.version
1603 );
1604
1605 // Handle out-of-order diagnostics
1606 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1607 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1608 version: Some(change_notification_2.text_document.version),
1609 diagnostics: vec![
1610 lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1612 severity: Some(DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'BB'".to_string(),
1614 source: Some("disk".to_string()),
1615 ..Default::default()
1616 },
1617 lsp::Diagnostic {
1618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1619 severity: Some(DiagnosticSeverity::WARNING),
1620 message: "undefined variable 'A'".to_string(),
1621 source: Some("disk".to_string()),
1622 ..Default::default()
1623 },
1624 ],
1625 });
1626
1627 cx.executor().run_until_parked();
1628 buffer.update(cx, |buffer, _| {
1629 assert_eq!(
1630 buffer
1631 .snapshot()
1632 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1633 .collect::<Vec<_>>(),
1634 &[
1635 DiagnosticEntry {
1636 range: Point::new(2, 21)..Point::new(2, 22),
1637 diagnostic: Diagnostic {
1638 source: Some("disk".into()),
1639 severity: DiagnosticSeverity::WARNING,
1640 message: "undefined variable 'A'".to_string(),
1641 is_disk_based: true,
1642 group_id: 6,
1643 is_primary: true,
1644 ..Default::default()
1645 }
1646 },
1647 DiagnosticEntry {
1648 range: Point::new(3, 9)..Point::new(3, 14),
1649 diagnostic: Diagnostic {
1650 source: Some("disk".into()),
1651 severity: DiagnosticSeverity::ERROR,
1652 message: "undefined variable 'BB'".to_string(),
1653 is_disk_based: true,
1654 group_id: 5,
1655 is_primary: true,
1656 ..Default::default()
1657 },
1658 }
1659 ]
1660 );
1661 });
1662}
1663
1664#[gpui::test]
1665async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1666 init_test(cx);
1667
1668 let text = concat!(
1669 "let one = ;\n", //
1670 "let two = \n",
1671 "let three = 3;\n",
1672 );
1673
1674 let fs = FakeFs::new(cx.executor());
1675 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1676
1677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1678 let buffer = project
1679 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1680 .await
1681 .unwrap();
1682
1683 project.update(cx, |project, cx| {
1684 project
1685 .update_buffer_diagnostics(
1686 &buffer,
1687 LanguageServerId(0),
1688 None,
1689 vec![
1690 DiagnosticEntry {
1691 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1692 diagnostic: Diagnostic {
1693 severity: DiagnosticSeverity::ERROR,
1694 message: "syntax error 1".to_string(),
1695 ..Default::default()
1696 },
1697 },
1698 DiagnosticEntry {
1699 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1700 diagnostic: Diagnostic {
1701 severity: DiagnosticSeverity::ERROR,
1702 message: "syntax error 2".to_string(),
1703 ..Default::default()
1704 },
1705 },
1706 ],
1707 cx,
1708 )
1709 .unwrap();
1710 });
1711
1712 // An empty range is extended forward to include the following character.
1713 // At the end of a line, an empty range is extended backward to include
1714 // the preceding character.
1715 buffer.update(cx, |buffer, _| {
1716 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1717 assert_eq!(
1718 chunks
1719 .iter()
1720 .map(|(s, d)| (s.as_str(), *d))
1721 .collect::<Vec<_>>(),
1722 &[
1723 ("let one = ", None),
1724 (";", Some(DiagnosticSeverity::ERROR)),
1725 ("\nlet two =", None),
1726 (" ", Some(DiagnosticSeverity::ERROR)),
1727 ("\nlet three = 3;\n", None)
1728 ]
1729 );
1730 });
1731}
1732
1733#[gpui::test]
1734async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1735 init_test(cx);
1736
1737 let fs = FakeFs::new(cx.executor());
1738 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1739 .await;
1740
1741 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1742
1743 project.update(cx, |project, cx| {
1744 project
1745 .update_diagnostic_entries(
1746 LanguageServerId(0),
1747 Path::new("/dir/a.rs").to_owned(),
1748 None,
1749 vec![DiagnosticEntry {
1750 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1751 diagnostic: Diagnostic {
1752 severity: DiagnosticSeverity::ERROR,
1753 is_primary: true,
1754 message: "syntax error a1".to_string(),
1755 ..Default::default()
1756 },
1757 }],
1758 cx,
1759 )
1760 .unwrap();
1761 project
1762 .update_diagnostic_entries(
1763 LanguageServerId(1),
1764 Path::new("/dir/a.rs").to_owned(),
1765 None,
1766 vec![DiagnosticEntry {
1767 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1768 diagnostic: Diagnostic {
1769 severity: DiagnosticSeverity::ERROR,
1770 is_primary: true,
1771 message: "syntax error b1".to_string(),
1772 ..Default::default()
1773 },
1774 }],
1775 cx,
1776 )
1777 .unwrap();
1778
1779 assert_eq!(
1780 project.diagnostic_summary(cx),
1781 DiagnosticSummary {
1782 error_count: 2,
1783 warning_count: 0,
1784 }
1785 );
1786 });
1787}
1788
1789#[gpui::test]
1790async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1791 init_test(cx);
1792
1793 let mut language = Language::new(
1794 LanguageConfig {
1795 name: "Rust".into(),
1796 path_suffixes: vec!["rs".to_string()],
1797 ..Default::default()
1798 },
1799 Some(tree_sitter_rust::language()),
1800 );
1801 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1802
1803 let text = "
1804 fn a() {
1805 f1();
1806 }
1807 fn b() {
1808 f2();
1809 }
1810 fn c() {
1811 f3();
1812 }
1813 "
1814 .unindent();
1815
1816 let fs = FakeFs::new(cx.executor());
1817 fs.insert_tree(
1818 "/dir",
1819 json!({
1820 "a.rs": text.clone(),
1821 }),
1822 )
1823 .await;
1824
1825 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1826 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1827 let buffer = project
1828 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1829 .await
1830 .unwrap();
1831
1832 let mut fake_server = fake_servers.next().await.unwrap();
1833 let lsp_document_version = fake_server
1834 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1835 .await
1836 .text_document
1837 .version;
1838
1839 // Simulate editing the buffer after the language server computes some edits.
1840 buffer.update(cx, |buffer, cx| {
1841 buffer.edit(
1842 [(
1843 Point::new(0, 0)..Point::new(0, 0),
1844 "// above first function\n",
1845 )],
1846 None,
1847 cx,
1848 );
1849 buffer.edit(
1850 [(
1851 Point::new(2, 0)..Point::new(2, 0),
1852 " // inside first function\n",
1853 )],
1854 None,
1855 cx,
1856 );
1857 buffer.edit(
1858 [(
1859 Point::new(6, 4)..Point::new(6, 4),
1860 "// inside second function ",
1861 )],
1862 None,
1863 cx,
1864 );
1865
1866 assert_eq!(
1867 buffer.text(),
1868 "
1869 // above first function
1870 fn a() {
1871 // inside first function
1872 f1();
1873 }
1874 fn b() {
1875 // inside second function f2();
1876 }
1877 fn c() {
1878 f3();
1879 }
1880 "
1881 .unindent()
1882 );
1883 });
1884
1885 let edits = project
1886 .update(cx, |project, cx| {
1887 project.edits_from_lsp(
1888 &buffer,
1889 vec![
1890 // replace body of first function
1891 lsp::TextEdit {
1892 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1893 new_text: "
1894 fn a() {
1895 f10();
1896 }
1897 "
1898 .unindent(),
1899 },
1900 // edit inside second function
1901 lsp::TextEdit {
1902 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1903 new_text: "00".into(),
1904 },
1905 // edit inside third function via two distinct edits
1906 lsp::TextEdit {
1907 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1908 new_text: "4000".into(),
1909 },
1910 lsp::TextEdit {
1911 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1912 new_text: "".into(),
1913 },
1914 ],
1915 LanguageServerId(0),
1916 Some(lsp_document_version),
1917 cx,
1918 )
1919 })
1920 .await
1921 .unwrap();
1922
1923 buffer.update(cx, |buffer, cx| {
1924 for (range, new_text) in edits {
1925 buffer.edit([(range, new_text)], None, cx);
1926 }
1927 assert_eq!(
1928 buffer.text(),
1929 "
1930 // above first function
1931 fn a() {
1932 // inside first function
1933 f10();
1934 }
1935 fn b() {
1936 // inside second function f200();
1937 }
1938 fn c() {
1939 f4000();
1940 }
1941 "
1942 .unindent()
1943 );
1944 });
1945}
1946
1947#[gpui::test]
1948async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1949 init_test(cx);
1950
1951 let text = "
1952 use a::b;
1953 use a::c;
1954
1955 fn f() {
1956 b();
1957 c();
1958 }
1959 "
1960 .unindent();
1961
1962 let fs = FakeFs::new(cx.executor());
1963 fs.insert_tree(
1964 "/dir",
1965 json!({
1966 "a.rs": text.clone(),
1967 }),
1968 )
1969 .await;
1970
1971 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1972 let buffer = project
1973 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1974 .await
1975 .unwrap();
1976
1977 // Simulate the language server sending us a small edit in the form of a very large diff.
1978 // Rust-analyzer does this when performing a merge-imports code action.
1979 let edits = project
1980 .update(cx, |project, cx| {
1981 project.edits_from_lsp(
1982 &buffer,
1983 [
1984 // Replace the first use statement without editing the semicolon.
1985 lsp::TextEdit {
1986 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1987 new_text: "a::{b, c}".into(),
1988 },
1989 // Reinsert the remainder of the file between the semicolon and the final
1990 // newline of the file.
1991 lsp::TextEdit {
1992 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1993 new_text: "\n\n".into(),
1994 },
1995 lsp::TextEdit {
1996 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1997 new_text: "
1998 fn f() {
1999 b();
2000 c();
2001 }"
2002 .unindent(),
2003 },
2004 // Delete everything after the first newline of the file.
2005 lsp::TextEdit {
2006 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2007 new_text: "".into(),
2008 },
2009 ],
2010 LanguageServerId(0),
2011 None,
2012 cx,
2013 )
2014 })
2015 .await
2016 .unwrap();
2017
2018 buffer.update(cx, |buffer, cx| {
2019 let edits = edits
2020 .into_iter()
2021 .map(|(range, text)| {
2022 (
2023 range.start.to_point(buffer)..range.end.to_point(buffer),
2024 text,
2025 )
2026 })
2027 .collect::<Vec<_>>();
2028
2029 assert_eq!(
2030 edits,
2031 [
2032 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2033 (Point::new(1, 0)..Point::new(2, 0), "".into())
2034 ]
2035 );
2036
2037 for (range, new_text) in edits {
2038 buffer.edit([(range, new_text)], None, cx);
2039 }
2040 assert_eq!(
2041 buffer.text(),
2042 "
2043 use a::{b, c};
2044
2045 fn f() {
2046 b();
2047 c();
2048 }
2049 "
2050 .unindent()
2051 );
2052 });
2053}
2054
2055#[gpui::test]
2056async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2057 init_test(cx);
2058
2059 let text = "
2060 use a::b;
2061 use a::c;
2062
2063 fn f() {
2064 b();
2065 c();
2066 }
2067 "
2068 .unindent();
2069
2070 let fs = FakeFs::new(cx.executor());
2071 fs.insert_tree(
2072 "/dir",
2073 json!({
2074 "a.rs": text.clone(),
2075 }),
2076 )
2077 .await;
2078
2079 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2080 let buffer = project
2081 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2082 .await
2083 .unwrap();
2084
2085 // Simulate the language server sending us edits in a non-ordered fashion,
2086 // with ranges sometimes being inverted or pointing to invalid locations.
2087 let edits = project
2088 .update(cx, |project, cx| {
2089 project.edits_from_lsp(
2090 &buffer,
2091 [
2092 lsp::TextEdit {
2093 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2094 new_text: "\n\n".into(),
2095 },
2096 lsp::TextEdit {
2097 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2098 new_text: "a::{b, c}".into(),
2099 },
2100 lsp::TextEdit {
2101 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2102 new_text: "".into(),
2103 },
2104 lsp::TextEdit {
2105 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2106 new_text: "
2107 fn f() {
2108 b();
2109 c();
2110 }"
2111 .unindent(),
2112 },
2113 ],
2114 LanguageServerId(0),
2115 None,
2116 cx,
2117 )
2118 })
2119 .await
2120 .unwrap();
2121
2122 buffer.update(cx, |buffer, cx| {
2123 let edits = edits
2124 .into_iter()
2125 .map(|(range, text)| {
2126 (
2127 range.start.to_point(buffer)..range.end.to_point(buffer),
2128 text,
2129 )
2130 })
2131 .collect::<Vec<_>>();
2132
2133 assert_eq!(
2134 edits,
2135 [
2136 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2137 (Point::new(1, 0)..Point::new(2, 0), "".into())
2138 ]
2139 );
2140
2141 for (range, new_text) in edits {
2142 buffer.edit([(range, new_text)], None, cx);
2143 }
2144 assert_eq!(
2145 buffer.text(),
2146 "
2147 use a::{b, c};
2148
2149 fn f() {
2150 b();
2151 c();
2152 }
2153 "
2154 .unindent()
2155 );
2156 });
2157}
2158
2159fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2160 buffer: &Buffer,
2161 range: Range<T>,
2162) -> Vec<(String, Option<DiagnosticSeverity>)> {
2163 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2164 for chunk in buffer.snapshot().chunks(range, true) {
2165 if chunks.last().map_or(false, |prev_chunk| {
2166 prev_chunk.1 == chunk.diagnostic_severity
2167 }) {
2168 chunks.last_mut().unwrap().0.push_str(chunk.text);
2169 } else {
2170 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2171 }
2172 }
2173 chunks
2174}
2175
2176#[gpui::test(iterations = 10)]
2177async fn test_definition(cx: &mut gpui::TestAppContext) {
2178 init_test(cx);
2179
2180 let mut language = Language::new(
2181 LanguageConfig {
2182 name: "Rust".into(),
2183 path_suffixes: vec!["rs".to_string()],
2184 ..Default::default()
2185 },
2186 Some(tree_sitter_rust::language()),
2187 );
2188 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2189
2190 let fs = FakeFs::new(cx.executor());
2191 fs.insert_tree(
2192 "/dir",
2193 json!({
2194 "a.rs": "const fn a() { A }",
2195 "b.rs": "const y: i32 = crate::a()",
2196 }),
2197 )
2198 .await;
2199
2200 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2201 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2202
2203 let buffer = project
2204 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2205 .await
2206 .unwrap();
2207
2208 let fake_server = fake_servers.next().await.unwrap();
2209 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2210 let params = params.text_document_position_params;
2211 assert_eq!(
2212 params.text_document.uri.to_file_path().unwrap(),
2213 Path::new("/dir/b.rs"),
2214 );
2215 assert_eq!(params.position, lsp::Position::new(0, 22));
2216
2217 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2218 lsp::Location::new(
2219 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2220 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2221 ),
2222 )))
2223 });
2224
2225 let mut definitions = project
2226 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2227 .await
2228 .unwrap();
2229
2230 // Assert no new language server started
2231 cx.executor().run_until_parked();
2232 assert!(fake_servers.try_next().is_err());
2233
2234 assert_eq!(definitions.len(), 1);
2235 let definition = definitions.pop().unwrap();
2236 cx.update(|cx| {
2237 let target_buffer = definition.target.buffer.read(cx);
2238 assert_eq!(
2239 target_buffer
2240 .file()
2241 .unwrap()
2242 .as_local()
2243 .unwrap()
2244 .abs_path(cx),
2245 Path::new("/dir/a.rs"),
2246 );
2247 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2248 assert_eq!(
2249 list_worktrees(&project, cx),
2250 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2251 );
2252
2253 drop(definition);
2254 });
2255 cx.update(|cx| {
2256 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2257 });
2258
2259 fn list_worktrees<'a>(
2260 project: &'a Model<Project>,
2261 cx: &'a AppContext,
2262 ) -> Vec<(&'a Path, bool)> {
2263 project
2264 .read(cx)
2265 .worktrees()
2266 .map(|worktree| {
2267 let worktree = worktree.read(cx);
2268 (
2269 worktree.as_local().unwrap().abs_path().as_ref(),
2270 worktree.is_visible(),
2271 )
2272 })
2273 .collect::<Vec<_>>()
2274 }
2275}
2276
2277#[gpui::test]
2278async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let mut language = Language::new(
2282 LanguageConfig {
2283 name: "TypeScript".into(),
2284 path_suffixes: vec!["ts".to_string()],
2285 ..Default::default()
2286 },
2287 Some(tree_sitter_typescript::language_typescript()),
2288 );
2289 let mut fake_language_servers = language
2290 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2291 capabilities: lsp::ServerCapabilities {
2292 completion_provider: Some(lsp::CompletionOptions {
2293 trigger_characters: Some(vec![":".to_string()]),
2294 ..Default::default()
2295 }),
2296 ..Default::default()
2297 },
2298 ..Default::default()
2299 }))
2300 .await;
2301
2302 let fs = FakeFs::new(cx.executor());
2303 fs.insert_tree(
2304 "/dir",
2305 json!({
2306 "a.ts": "",
2307 }),
2308 )
2309 .await;
2310
2311 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2312 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2313 let buffer = project
2314 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2315 .await
2316 .unwrap();
2317
2318 let fake_server = fake_language_servers.next().await.unwrap();
2319
2320 let text = "let a = b.fqn";
2321 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2322 let completions = project.update(cx, |project, cx| {
2323 project.completions(&buffer, text.len(), cx)
2324 });
2325
2326 fake_server
2327 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2328 Ok(Some(lsp::CompletionResponse::Array(vec![
2329 lsp::CompletionItem {
2330 label: "fullyQualifiedName?".into(),
2331 insert_text: Some("fullyQualifiedName".into()),
2332 ..Default::default()
2333 },
2334 ])))
2335 })
2336 .next()
2337 .await;
2338 let completions = completions.await.unwrap();
2339 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2340 assert_eq!(completions.len(), 1);
2341 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2342 assert_eq!(
2343 completions[0].old_range.to_offset(&snapshot),
2344 text.len() - 3..text.len()
2345 );
2346
2347 let text = "let a = \"atoms/cmp\"";
2348 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2349 let completions = project.update(cx, |project, cx| {
2350 project.completions(&buffer, text.len() - 1, cx)
2351 });
2352
2353 fake_server
2354 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2355 Ok(Some(lsp::CompletionResponse::Array(vec![
2356 lsp::CompletionItem {
2357 label: "component".into(),
2358 ..Default::default()
2359 },
2360 ])))
2361 })
2362 .next()
2363 .await;
2364 let completions = completions.await.unwrap();
2365 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2366 assert_eq!(completions.len(), 1);
2367 assert_eq!(completions[0].new_text, "component");
2368 assert_eq!(
2369 completions[0].old_range.to_offset(&snapshot),
2370 text.len() - 4..text.len() - 1
2371 );
2372}
2373
2374#[gpui::test]
2375async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2376 init_test(cx);
2377
2378 let mut language = Language::new(
2379 LanguageConfig {
2380 name: "TypeScript".into(),
2381 path_suffixes: vec!["ts".to_string()],
2382 ..Default::default()
2383 },
2384 Some(tree_sitter_typescript::language_typescript()),
2385 );
2386 let mut fake_language_servers = language
2387 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2388 capabilities: lsp::ServerCapabilities {
2389 completion_provider: Some(lsp::CompletionOptions {
2390 trigger_characters: Some(vec![":".to_string()]),
2391 ..Default::default()
2392 }),
2393 ..Default::default()
2394 },
2395 ..Default::default()
2396 }))
2397 .await;
2398
2399 let fs = FakeFs::new(cx.executor());
2400 fs.insert_tree(
2401 "/dir",
2402 json!({
2403 "a.ts": "",
2404 }),
2405 )
2406 .await;
2407
2408 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2409 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2410 let buffer = project
2411 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2412 .await
2413 .unwrap();
2414
2415 let fake_server = fake_language_servers.next().await.unwrap();
2416
2417 let text = "let a = b.fqn";
2418 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2419 let completions = project.update(cx, |project, cx| {
2420 project.completions(&buffer, text.len(), cx)
2421 });
2422
2423 fake_server
2424 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2425 Ok(Some(lsp::CompletionResponse::Array(vec![
2426 lsp::CompletionItem {
2427 label: "fullyQualifiedName?".into(),
2428 insert_text: Some("fully\rQualified\r\nName".into()),
2429 ..Default::default()
2430 },
2431 ])))
2432 })
2433 .next()
2434 .await;
2435 let completions = completions.await.unwrap();
2436 assert_eq!(completions.len(), 1);
2437 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2438}
2439
2440#[gpui::test(iterations = 10)]
2441async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2442 init_test(cx);
2443
2444 let mut language = Language::new(
2445 LanguageConfig {
2446 name: "TypeScript".into(),
2447 path_suffixes: vec!["ts".to_string()],
2448 ..Default::default()
2449 },
2450 None,
2451 );
2452 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2453
2454 let fs = FakeFs::new(cx.executor());
2455 fs.insert_tree(
2456 "/dir",
2457 json!({
2458 "a.ts": "a",
2459 }),
2460 )
2461 .await;
2462
2463 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2464 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2465 let buffer = project
2466 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2467 .await
2468 .unwrap();
2469
2470 let fake_server = fake_language_servers.next().await.unwrap();
2471
2472 // Language server returns code actions that contain commands, and not edits.
2473 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2474 fake_server
2475 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2476 Ok(Some(vec![
2477 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2478 title: "The code action".into(),
2479 command: Some(lsp::Command {
2480 title: "The command".into(),
2481 command: "_the/command".into(),
2482 arguments: Some(vec![json!("the-argument")]),
2483 }),
2484 ..Default::default()
2485 }),
2486 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2487 title: "two".into(),
2488 ..Default::default()
2489 }),
2490 ]))
2491 })
2492 .next()
2493 .await;
2494
2495 let action = actions.await.unwrap()[0].clone();
2496 let apply = project.update(cx, |project, cx| {
2497 project.apply_code_action(buffer.clone(), action, true, cx)
2498 });
2499
2500 // Resolving the code action does not populate its edits. In absence of
2501 // edits, we must execute the given command.
2502 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2503 |action, _| async move { Ok(action) },
2504 );
2505
2506 // While executing the command, the language server sends the editor
2507 // a `workspaceEdit` request.
2508 fake_server
2509 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2510 let fake = fake_server.clone();
2511 move |params, _| {
2512 assert_eq!(params.command, "_the/command");
2513 let fake = fake.clone();
2514 async move {
2515 fake.server
2516 .request::<lsp::request::ApplyWorkspaceEdit>(
2517 lsp::ApplyWorkspaceEditParams {
2518 label: None,
2519 edit: lsp::WorkspaceEdit {
2520 changes: Some(
2521 [(
2522 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2523 vec![lsp::TextEdit {
2524 range: lsp::Range::new(
2525 lsp::Position::new(0, 0),
2526 lsp::Position::new(0, 0),
2527 ),
2528 new_text: "X".into(),
2529 }],
2530 )]
2531 .into_iter()
2532 .collect(),
2533 ),
2534 ..Default::default()
2535 },
2536 },
2537 )
2538 .await
2539 .unwrap();
2540 Ok(Some(json!(null)))
2541 }
2542 }
2543 })
2544 .next()
2545 .await;
2546
2547 // Applying the code action returns a project transaction containing the edits
2548 // sent by the language server in its `workspaceEdit` request.
2549 let transaction = apply.await.unwrap();
2550 assert!(transaction.0.contains_key(&buffer));
2551 buffer.update(cx, |buffer, cx| {
2552 assert_eq!(buffer.text(), "Xa");
2553 buffer.undo(cx);
2554 assert_eq!(buffer.text(), "a");
2555 });
2556}
2557
2558#[gpui::test(iterations = 10)]
2559async fn test_save_file(cx: &mut gpui::TestAppContext) {
2560 init_test(cx);
2561
2562 let fs = FakeFs::new(cx.executor());
2563 fs.insert_tree(
2564 "/dir",
2565 json!({
2566 "file1": "the old contents",
2567 }),
2568 )
2569 .await;
2570
2571 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2572 let buffer = project
2573 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2574 .await
2575 .unwrap();
2576 buffer.update(cx, |buffer, cx| {
2577 assert_eq!(buffer.text(), "the old contents");
2578 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2579 });
2580
2581 project
2582 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2583 .await
2584 .unwrap();
2585
2586 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2587 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2588}
2589
2590#[gpui::test]
2591async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2592 init_test(cx);
2593
2594 let fs = FakeFs::new(cx.executor());
2595 fs.insert_tree(
2596 "/dir",
2597 json!({
2598 "file1": "the old contents",
2599 }),
2600 )
2601 .await;
2602
2603 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2604 let buffer = project
2605 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2606 .await
2607 .unwrap();
2608 buffer.update(cx, |buffer, cx| {
2609 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2610 });
2611
2612 project
2613 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2614 .await
2615 .unwrap();
2616
2617 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2618 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2619}
2620
2621#[gpui::test]
2622async fn test_save_as(cx: &mut gpui::TestAppContext) {
2623 init_test(cx);
2624
2625 let fs = FakeFs::new(cx.executor());
2626 fs.insert_tree("/dir", json!({})).await;
2627
2628 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2629
2630 let languages = project.update(cx, |project, _| project.languages().clone());
2631 languages.register(
2632 "/some/path",
2633 LanguageConfig {
2634 name: "Rust".into(),
2635 path_suffixes: vec!["rs".into()],
2636 ..Default::default()
2637 },
2638 tree_sitter_rust::language(),
2639 vec![],
2640 |_| Default::default(),
2641 );
2642
2643 let buffer = project.update(cx, |project, cx| {
2644 project.create_buffer("", None, cx).unwrap()
2645 });
2646 buffer.update(cx, |buffer, cx| {
2647 buffer.edit([(0..0, "abc")], None, cx);
2648 assert!(buffer.is_dirty());
2649 assert!(!buffer.has_conflict());
2650 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2651 });
2652 project
2653 .update(cx, |project, cx| {
2654 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2655 })
2656 .await
2657 .unwrap();
2658 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2659
2660 cx.executor().run_until_parked();
2661 buffer.update(cx, |buffer, cx| {
2662 assert_eq!(
2663 buffer.file().unwrap().full_path(cx),
2664 Path::new("dir/file1.rs")
2665 );
2666 assert!(!buffer.is_dirty());
2667 assert!(!buffer.has_conflict());
2668 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2669 });
2670
2671 let opened_buffer = project
2672 .update(cx, |project, cx| {
2673 project.open_local_buffer("/dir/file1.rs", cx)
2674 })
2675 .await
2676 .unwrap();
2677 assert_eq!(opened_buffer, buffer);
2678}
2679
2680#[gpui::test(retries = 5)]
2681async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2682 init_test(cx);
2683 cx.executor().allow_parking();
2684
2685 let dir = temp_tree(json!({
2686 "a": {
2687 "file1": "",
2688 "file2": "",
2689 "file3": "",
2690 },
2691 "b": {
2692 "c": {
2693 "file4": "",
2694 "file5": "",
2695 }
2696 }
2697 }));
2698
2699 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2700 let rpc = project.update(cx, |p, _| p.client.clone());
2701
2702 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2703 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2704 async move { buffer.await.unwrap() }
2705 };
2706 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2707 project.update(cx, |project, cx| {
2708 let tree = project.worktrees().next().unwrap();
2709 tree.read(cx)
2710 .entry_for_path(path)
2711 .unwrap_or_else(|| panic!("no entry for path {}", path))
2712 .id
2713 })
2714 };
2715
2716 let buffer2 = buffer_for_path("a/file2", cx).await;
2717 let buffer3 = buffer_for_path("a/file3", cx).await;
2718 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2719 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2720
2721 let file2_id = id_for_path("a/file2", cx);
2722 let file3_id = id_for_path("a/file3", cx);
2723 let file4_id = id_for_path("b/c/file4", cx);
2724
2725 // Create a remote copy of this worktree.
2726 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2727
2728 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2729
2730 let updates = Arc::new(Mutex::new(Vec::new()));
2731 tree.update(cx, |tree, cx| {
2732 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2733 let updates = updates.clone();
2734 move |update| {
2735 updates.lock().push(update);
2736 async { true }
2737 }
2738 });
2739 });
2740
2741 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2742
2743 cx.executor().run_until_parked();
2744
2745 cx.update(|cx| {
2746 assert!(!buffer2.read(cx).is_dirty());
2747 assert!(!buffer3.read(cx).is_dirty());
2748 assert!(!buffer4.read(cx).is_dirty());
2749 assert!(!buffer5.read(cx).is_dirty());
2750 });
2751
2752 // Rename and delete files and directories.
2753 tree.flush_fs_events(cx).await;
2754 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2755 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2756 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2757 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2758 tree.flush_fs_events(cx).await;
2759
2760 let expected_paths = vec![
2761 "a",
2762 "a/file1",
2763 "a/file2.new",
2764 "b",
2765 "d",
2766 "d/file3",
2767 "d/file4",
2768 ];
2769
2770 cx.update(|app| {
2771 assert_eq!(
2772 tree.read(app)
2773 .paths()
2774 .map(|p| p.to_str().unwrap())
2775 .collect::<Vec<_>>(),
2776 expected_paths
2777 );
2778 });
2779
2780 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2781 assert_eq!(id_for_path("d/file3", cx), file3_id);
2782 assert_eq!(id_for_path("d/file4", cx), file4_id);
2783
2784 cx.update(|cx| {
2785 assert_eq!(
2786 buffer2.read(cx).file().unwrap().path().as_ref(),
2787 Path::new("a/file2.new")
2788 );
2789 assert_eq!(
2790 buffer3.read(cx).file().unwrap().path().as_ref(),
2791 Path::new("d/file3")
2792 );
2793 assert_eq!(
2794 buffer4.read(cx).file().unwrap().path().as_ref(),
2795 Path::new("d/file4")
2796 );
2797 assert_eq!(
2798 buffer5.read(cx).file().unwrap().path().as_ref(),
2799 Path::new("b/c/file5")
2800 );
2801
2802 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2803 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2804 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2805 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2806 });
2807
2808 // Update the remote worktree. Check that it becomes consistent with the
2809 // local worktree.
2810 cx.executor().run_until_parked();
2811
2812 remote.update(cx, |remote, _| {
2813 for update in updates.lock().drain(..) {
2814 remote.as_remote_mut().unwrap().update_from_remote(update);
2815 }
2816 });
2817 cx.executor().run_until_parked();
2818 remote.update(cx, |remote, _| {
2819 assert_eq!(
2820 remote
2821 .paths()
2822 .map(|p| p.to_str().unwrap())
2823 .collect::<Vec<_>>(),
2824 expected_paths
2825 );
2826 });
2827}
2828
2829#[gpui::test(iterations = 10)]
2830async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2831 init_test(cx);
2832
2833 let fs = FakeFs::new(cx.executor());
2834 fs.insert_tree(
2835 "/dir",
2836 json!({
2837 "a": {
2838 "file1": "",
2839 }
2840 }),
2841 )
2842 .await;
2843
2844 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2845 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2846 let tree_id = tree.update(cx, |tree, _| tree.id());
2847
2848 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2849 project.update(cx, |project, cx| {
2850 let tree = project.worktrees().next().unwrap();
2851 tree.read(cx)
2852 .entry_for_path(path)
2853 .unwrap_or_else(|| panic!("no entry for path {}", path))
2854 .id
2855 })
2856 };
2857
2858 let dir_id = id_for_path("a", cx);
2859 let file_id = id_for_path("a/file1", cx);
2860 let buffer = project
2861 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2862 .await
2863 .unwrap();
2864 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2865
2866 project
2867 .update(cx, |project, cx| {
2868 project.rename_entry(dir_id, Path::new("b"), cx)
2869 })
2870 .unwrap()
2871 .await
2872 .unwrap();
2873 cx.executor().run_until_parked();
2874
2875 assert_eq!(id_for_path("b", cx), dir_id);
2876 assert_eq!(id_for_path("b/file1", cx), file_id);
2877 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2878}
2879
2880#[gpui::test]
2881async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2882 init_test(cx);
2883
2884 let fs = FakeFs::new(cx.executor());
2885 fs.insert_tree(
2886 "/dir",
2887 json!({
2888 "a.txt": "a-contents",
2889 "b.txt": "b-contents",
2890 }),
2891 )
2892 .await;
2893
2894 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2895
2896 // Spawn multiple tasks to open paths, repeating some paths.
2897 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2898 (
2899 p.open_local_buffer("/dir/a.txt", cx),
2900 p.open_local_buffer("/dir/b.txt", cx),
2901 p.open_local_buffer("/dir/a.txt", cx),
2902 )
2903 });
2904
2905 let buffer_a_1 = buffer_a_1.await.unwrap();
2906 let buffer_a_2 = buffer_a_2.await.unwrap();
2907 let buffer_b = buffer_b.await.unwrap();
2908 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
2909 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
2910
2911 // There is only one buffer per path.
2912 let buffer_a_id = buffer_a_1.entity_id();
2913 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
2914
2915 // Open the same path again while it is still open.
2916 drop(buffer_a_1);
2917 let buffer_a_3 = project
2918 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2919 .await
2920 .unwrap();
2921
2922 // There's still only one buffer per path.
2923 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
2924}
2925
2926#[gpui::test]
2927async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2928 init_test(cx);
2929
2930 let fs = FakeFs::new(cx.executor());
2931 fs.insert_tree(
2932 "/dir",
2933 json!({
2934 "file1": "abc",
2935 "file2": "def",
2936 "file3": "ghi",
2937 }),
2938 )
2939 .await;
2940
2941 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2942
2943 let buffer1 = project
2944 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2945 .await
2946 .unwrap();
2947 let events = Arc::new(Mutex::new(Vec::new()));
2948
2949 // initially, the buffer isn't dirty.
2950 buffer1.update(cx, |buffer, cx| {
2951 cx.subscribe(&buffer1, {
2952 let events = events.clone();
2953 move |_, _, event, _| match event {
2954 BufferEvent::Operation(_) => {}
2955 _ => events.lock().push(event.clone()),
2956 }
2957 })
2958 .detach();
2959
2960 assert!(!buffer.is_dirty());
2961 assert!(events.lock().is_empty());
2962
2963 buffer.edit([(1..2, "")], None, cx);
2964 });
2965
2966 // after the first edit, the buffer is dirty, and emits a dirtied event.
2967 buffer1.update(cx, |buffer, cx| {
2968 assert!(buffer.text() == "ac");
2969 assert!(buffer.is_dirty());
2970 assert_eq!(
2971 *events.lock(),
2972 &[language::Event::Edited, language::Event::DirtyChanged]
2973 );
2974 events.lock().clear();
2975 buffer.did_save(
2976 buffer.version(),
2977 buffer.as_rope().fingerprint(),
2978 buffer.file().unwrap().mtime(),
2979 cx,
2980 );
2981 });
2982
2983 // after saving, the buffer is not dirty, and emits a saved event.
2984 buffer1.update(cx, |buffer, cx| {
2985 assert!(!buffer.is_dirty());
2986 assert_eq!(*events.lock(), &[language::Event::Saved]);
2987 events.lock().clear();
2988
2989 buffer.edit([(1..1, "B")], None, cx);
2990 buffer.edit([(2..2, "D")], None, cx);
2991 });
2992
2993 // after editing again, the buffer is dirty, and emits another dirty event.
2994 buffer1.update(cx, |buffer, cx| {
2995 assert!(buffer.text() == "aBDc");
2996 assert!(buffer.is_dirty());
2997 assert_eq!(
2998 *events.lock(),
2999 &[
3000 language::Event::Edited,
3001 language::Event::DirtyChanged,
3002 language::Event::Edited,
3003 ],
3004 );
3005 events.lock().clear();
3006
3007 // After restoring the buffer to its previously-saved state,
3008 // the buffer is not considered dirty anymore.
3009 buffer.edit([(1..3, "")], None, cx);
3010 assert!(buffer.text() == "ac");
3011 assert!(!buffer.is_dirty());
3012 });
3013
3014 assert_eq!(
3015 *events.lock(),
3016 &[language::Event::Edited, language::Event::DirtyChanged]
3017 );
3018
3019 // When a file is deleted, the buffer is considered dirty.
3020 let events = Arc::new(Mutex::new(Vec::new()));
3021 let buffer2 = project
3022 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3023 .await
3024 .unwrap();
3025 buffer2.update(cx, |_, cx| {
3026 cx.subscribe(&buffer2, {
3027 let events = events.clone();
3028 move |_, _, event, _| events.lock().push(event.clone())
3029 })
3030 .detach();
3031 });
3032
3033 fs.remove_file("/dir/file2".as_ref(), Default::default())
3034 .await
3035 .unwrap();
3036 cx.executor().run_until_parked();
3037 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3038 assert_eq!(
3039 *events.lock(),
3040 &[
3041 language::Event::DirtyChanged,
3042 language::Event::FileHandleChanged
3043 ]
3044 );
3045
3046 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3047 let events = Arc::new(Mutex::new(Vec::new()));
3048 let buffer3 = project
3049 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3050 .await
3051 .unwrap();
3052 buffer3.update(cx, |_, cx| {
3053 cx.subscribe(&buffer3, {
3054 let events = events.clone();
3055 move |_, _, event, _| events.lock().push(event.clone())
3056 })
3057 .detach();
3058 });
3059
3060 buffer3.update(cx, |buffer, cx| {
3061 buffer.edit([(0..0, "x")], None, cx);
3062 });
3063 events.lock().clear();
3064 fs.remove_file("/dir/file3".as_ref(), Default::default())
3065 .await
3066 .unwrap();
3067 cx.executor().run_until_parked();
3068 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3069 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3070}
3071
3072#[gpui::test]
3073async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3074 init_test(cx);
3075
3076 let initial_contents = "aaa\nbbbbb\nc\n";
3077 let fs = FakeFs::new(cx.executor());
3078 fs.insert_tree(
3079 "/dir",
3080 json!({
3081 "the-file": initial_contents,
3082 }),
3083 )
3084 .await;
3085 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3086 let buffer = project
3087 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3088 .await
3089 .unwrap();
3090
3091 let anchors = (0..3)
3092 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3093 .collect::<Vec<_>>();
3094
3095 // Change the file on disk, adding two new lines of text, and removing
3096 // one line.
3097 buffer.update(cx, |buffer, _| {
3098 assert!(!buffer.is_dirty());
3099 assert!(!buffer.has_conflict());
3100 });
3101 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3102 fs.save(
3103 "/dir/the-file".as_ref(),
3104 &new_contents.into(),
3105 LineEnding::Unix,
3106 )
3107 .await
3108 .unwrap();
3109
3110 // Because the buffer was not modified, it is reloaded from disk. Its
3111 // contents are edited according to the diff between the old and new
3112 // file contents.
3113 cx.executor().run_until_parked();
3114 buffer.update(cx, |buffer, _| {
3115 assert_eq!(buffer.text(), new_contents);
3116 assert!(!buffer.is_dirty());
3117 assert!(!buffer.has_conflict());
3118
3119 let anchor_positions = anchors
3120 .iter()
3121 .map(|anchor| anchor.to_point(&*buffer))
3122 .collect::<Vec<_>>();
3123 assert_eq!(
3124 anchor_positions,
3125 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3126 );
3127 });
3128
3129 // Modify the buffer
3130 buffer.update(cx, |buffer, cx| {
3131 buffer.edit([(0..0, " ")], None, cx);
3132 assert!(buffer.is_dirty());
3133 assert!(!buffer.has_conflict());
3134 });
3135
3136 // Change the file on disk again, adding blank lines to the beginning.
3137 fs.save(
3138 "/dir/the-file".as_ref(),
3139 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3140 LineEnding::Unix,
3141 )
3142 .await
3143 .unwrap();
3144
3145 // Because the buffer is modified, it doesn't reload from disk, but is
3146 // marked as having a conflict.
3147 cx.executor().run_until_parked();
3148 buffer.update(cx, |buffer, _| {
3149 assert!(buffer.has_conflict());
3150 });
3151}
3152
3153#[gpui::test]
3154async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3155 init_test(cx);
3156
3157 let fs = FakeFs::new(cx.executor());
3158 fs.insert_tree(
3159 "/dir",
3160 json!({
3161 "file1": "a\nb\nc\n",
3162 "file2": "one\r\ntwo\r\nthree\r\n",
3163 }),
3164 )
3165 .await;
3166
3167 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3168 let buffer1 = project
3169 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3170 .await
3171 .unwrap();
3172 let buffer2 = project
3173 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3174 .await
3175 .unwrap();
3176
3177 buffer1.update(cx, |buffer, _| {
3178 assert_eq!(buffer.text(), "a\nb\nc\n");
3179 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3180 });
3181 buffer2.update(cx, |buffer, _| {
3182 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3183 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3184 });
3185
3186 // Change a file's line endings on disk from unix to windows. The buffer's
3187 // state updates correctly.
3188 fs.save(
3189 "/dir/file1".as_ref(),
3190 &"aaa\nb\nc\n".into(),
3191 LineEnding::Windows,
3192 )
3193 .await
3194 .unwrap();
3195 cx.executor().run_until_parked();
3196 buffer1.update(cx, |buffer, _| {
3197 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3198 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3199 });
3200
3201 // Save a file with windows line endings. The file is written correctly.
3202 buffer2.update(cx, |buffer, cx| {
3203 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3204 });
3205 project
3206 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3207 .await
3208 .unwrap();
3209 assert_eq!(
3210 fs.load("/dir/file2".as_ref()).await.unwrap(),
3211 "one\r\ntwo\r\nthree\r\nfour\r\n",
3212 );
3213}
3214
3215#[gpui::test]
3216async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3217 init_test(cx);
3218
3219 let fs = FakeFs::new(cx.executor());
3220 fs.insert_tree(
3221 "/the-dir",
3222 json!({
3223 "a.rs": "
3224 fn foo(mut v: Vec<usize>) {
3225 for x in &v {
3226 v.push(1);
3227 }
3228 }
3229 "
3230 .unindent(),
3231 }),
3232 )
3233 .await;
3234
3235 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3236 let buffer = project
3237 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3238 .await
3239 .unwrap();
3240
3241 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3242 let message = lsp::PublishDiagnosticsParams {
3243 uri: buffer_uri.clone(),
3244 diagnostics: vec![
3245 lsp::Diagnostic {
3246 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3247 severity: Some(DiagnosticSeverity::WARNING),
3248 message: "error 1".to_string(),
3249 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3250 location: lsp::Location {
3251 uri: buffer_uri.clone(),
3252 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3253 },
3254 message: "error 1 hint 1".to_string(),
3255 }]),
3256 ..Default::default()
3257 },
3258 lsp::Diagnostic {
3259 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3260 severity: Some(DiagnosticSeverity::HINT),
3261 message: "error 1 hint 1".to_string(),
3262 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3263 location: lsp::Location {
3264 uri: buffer_uri.clone(),
3265 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3266 },
3267 message: "original diagnostic".to_string(),
3268 }]),
3269 ..Default::default()
3270 },
3271 lsp::Diagnostic {
3272 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3273 severity: Some(DiagnosticSeverity::ERROR),
3274 message: "error 2".to_string(),
3275 related_information: Some(vec![
3276 lsp::DiagnosticRelatedInformation {
3277 location: lsp::Location {
3278 uri: buffer_uri.clone(),
3279 range: lsp::Range::new(
3280 lsp::Position::new(1, 13),
3281 lsp::Position::new(1, 15),
3282 ),
3283 },
3284 message: "error 2 hint 1".to_string(),
3285 },
3286 lsp::DiagnosticRelatedInformation {
3287 location: lsp::Location {
3288 uri: buffer_uri.clone(),
3289 range: lsp::Range::new(
3290 lsp::Position::new(1, 13),
3291 lsp::Position::new(1, 15),
3292 ),
3293 },
3294 message: "error 2 hint 2".to_string(),
3295 },
3296 ]),
3297 ..Default::default()
3298 },
3299 lsp::Diagnostic {
3300 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3301 severity: Some(DiagnosticSeverity::HINT),
3302 message: "error 2 hint 1".to_string(),
3303 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3304 location: lsp::Location {
3305 uri: buffer_uri.clone(),
3306 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3307 },
3308 message: "original diagnostic".to_string(),
3309 }]),
3310 ..Default::default()
3311 },
3312 lsp::Diagnostic {
3313 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3314 severity: Some(DiagnosticSeverity::HINT),
3315 message: "error 2 hint 2".to_string(),
3316 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3317 location: lsp::Location {
3318 uri: buffer_uri,
3319 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3320 },
3321 message: "original diagnostic".to_string(),
3322 }]),
3323 ..Default::default()
3324 },
3325 ],
3326 version: None,
3327 };
3328
3329 project
3330 .update(cx, |p, cx| {
3331 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3332 })
3333 .unwrap();
3334 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3335
3336 assert_eq!(
3337 buffer
3338 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3339 .collect::<Vec<_>>(),
3340 &[
3341 DiagnosticEntry {
3342 range: Point::new(1, 8)..Point::new(1, 9),
3343 diagnostic: Diagnostic {
3344 severity: DiagnosticSeverity::WARNING,
3345 message: "error 1".to_string(),
3346 group_id: 1,
3347 is_primary: true,
3348 ..Default::default()
3349 }
3350 },
3351 DiagnosticEntry {
3352 range: Point::new(1, 8)..Point::new(1, 9),
3353 diagnostic: Diagnostic {
3354 severity: DiagnosticSeverity::HINT,
3355 message: "error 1 hint 1".to_string(),
3356 group_id: 1,
3357 is_primary: false,
3358 ..Default::default()
3359 }
3360 },
3361 DiagnosticEntry {
3362 range: Point::new(1, 13)..Point::new(1, 15),
3363 diagnostic: Diagnostic {
3364 severity: DiagnosticSeverity::HINT,
3365 message: "error 2 hint 1".to_string(),
3366 group_id: 0,
3367 is_primary: false,
3368 ..Default::default()
3369 }
3370 },
3371 DiagnosticEntry {
3372 range: Point::new(1, 13)..Point::new(1, 15),
3373 diagnostic: Diagnostic {
3374 severity: DiagnosticSeverity::HINT,
3375 message: "error 2 hint 2".to_string(),
3376 group_id: 0,
3377 is_primary: false,
3378 ..Default::default()
3379 }
3380 },
3381 DiagnosticEntry {
3382 range: Point::new(2, 8)..Point::new(2, 17),
3383 diagnostic: Diagnostic {
3384 severity: DiagnosticSeverity::ERROR,
3385 message: "error 2".to_string(),
3386 group_id: 0,
3387 is_primary: true,
3388 ..Default::default()
3389 }
3390 }
3391 ]
3392 );
3393
3394 assert_eq!(
3395 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3396 &[
3397 DiagnosticEntry {
3398 range: Point::new(1, 13)..Point::new(1, 15),
3399 diagnostic: Diagnostic {
3400 severity: DiagnosticSeverity::HINT,
3401 message: "error 2 hint 1".to_string(),
3402 group_id: 0,
3403 is_primary: false,
3404 ..Default::default()
3405 }
3406 },
3407 DiagnosticEntry {
3408 range: Point::new(1, 13)..Point::new(1, 15),
3409 diagnostic: Diagnostic {
3410 severity: DiagnosticSeverity::HINT,
3411 message: "error 2 hint 2".to_string(),
3412 group_id: 0,
3413 is_primary: false,
3414 ..Default::default()
3415 }
3416 },
3417 DiagnosticEntry {
3418 range: Point::new(2, 8)..Point::new(2, 17),
3419 diagnostic: Diagnostic {
3420 severity: DiagnosticSeverity::ERROR,
3421 message: "error 2".to_string(),
3422 group_id: 0,
3423 is_primary: true,
3424 ..Default::default()
3425 }
3426 }
3427 ]
3428 );
3429
3430 assert_eq!(
3431 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3432 &[
3433 DiagnosticEntry {
3434 range: Point::new(1, 8)..Point::new(1, 9),
3435 diagnostic: Diagnostic {
3436 severity: DiagnosticSeverity::WARNING,
3437 message: "error 1".to_string(),
3438 group_id: 1,
3439 is_primary: true,
3440 ..Default::default()
3441 }
3442 },
3443 DiagnosticEntry {
3444 range: Point::new(1, 8)..Point::new(1, 9),
3445 diagnostic: Diagnostic {
3446 severity: DiagnosticSeverity::HINT,
3447 message: "error 1 hint 1".to_string(),
3448 group_id: 1,
3449 is_primary: false,
3450 ..Default::default()
3451 }
3452 },
3453 ]
3454 );
3455}
3456
3457#[gpui::test]
3458async fn test_rename(cx: &mut gpui::TestAppContext) {
3459 init_test(cx);
3460
3461 let mut language = Language::new(
3462 LanguageConfig {
3463 name: "Rust".into(),
3464 path_suffixes: vec!["rs".to_string()],
3465 ..Default::default()
3466 },
3467 Some(tree_sitter_rust::language()),
3468 );
3469 let mut fake_servers = language
3470 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3471 capabilities: lsp::ServerCapabilities {
3472 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3473 prepare_provider: Some(true),
3474 work_done_progress_options: Default::default(),
3475 })),
3476 ..Default::default()
3477 },
3478 ..Default::default()
3479 }))
3480 .await;
3481
3482 let fs = FakeFs::new(cx.executor());
3483 fs.insert_tree(
3484 "/dir",
3485 json!({
3486 "one.rs": "const ONE: usize = 1;",
3487 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3488 }),
3489 )
3490 .await;
3491
3492 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3493 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3494 let buffer = project
3495 .update(cx, |project, cx| {
3496 project.open_local_buffer("/dir/one.rs", cx)
3497 })
3498 .await
3499 .unwrap();
3500
3501 let fake_server = fake_servers.next().await.unwrap();
3502
3503 let response = project.update(cx, |project, cx| {
3504 project.prepare_rename(buffer.clone(), 7, cx)
3505 });
3506 fake_server
3507 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3508 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3509 assert_eq!(params.position, lsp::Position::new(0, 7));
3510 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3511 lsp::Position::new(0, 6),
3512 lsp::Position::new(0, 9),
3513 ))))
3514 })
3515 .next()
3516 .await
3517 .unwrap();
3518 let range = response.await.unwrap().unwrap();
3519 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3520 assert_eq!(range, 6..9);
3521
3522 let response = project.update(cx, |project, cx| {
3523 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3524 });
3525 fake_server
3526 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3527 assert_eq!(
3528 params.text_document_position.text_document.uri.as_str(),
3529 "file:///dir/one.rs"
3530 );
3531 assert_eq!(
3532 params.text_document_position.position,
3533 lsp::Position::new(0, 7)
3534 );
3535 assert_eq!(params.new_name, "THREE");
3536 Ok(Some(lsp::WorkspaceEdit {
3537 changes: Some(
3538 [
3539 (
3540 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3541 vec![lsp::TextEdit::new(
3542 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3543 "THREE".to_string(),
3544 )],
3545 ),
3546 (
3547 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3548 vec![
3549 lsp::TextEdit::new(
3550 lsp::Range::new(
3551 lsp::Position::new(0, 24),
3552 lsp::Position::new(0, 27),
3553 ),
3554 "THREE".to_string(),
3555 ),
3556 lsp::TextEdit::new(
3557 lsp::Range::new(
3558 lsp::Position::new(0, 35),
3559 lsp::Position::new(0, 38),
3560 ),
3561 "THREE".to_string(),
3562 ),
3563 ],
3564 ),
3565 ]
3566 .into_iter()
3567 .collect(),
3568 ),
3569 ..Default::default()
3570 }))
3571 })
3572 .next()
3573 .await
3574 .unwrap();
3575 let mut transaction = response.await.unwrap().0;
3576 assert_eq!(transaction.len(), 2);
3577 assert_eq!(
3578 transaction
3579 .remove_entry(&buffer)
3580 .unwrap()
3581 .0
3582 .update(cx, |buffer, _| buffer.text()),
3583 "const THREE: usize = 1;"
3584 );
3585 assert_eq!(
3586 transaction
3587 .into_keys()
3588 .next()
3589 .unwrap()
3590 .update(cx, |buffer, _| buffer.text()),
3591 "const TWO: usize = one::THREE + one::THREE;"
3592 );
3593}
3594
3595#[gpui::test]
3596async fn test_search(cx: &mut gpui::TestAppContext) {
3597 init_test(cx);
3598
3599 let fs = FakeFs::new(cx.executor());
3600 fs.insert_tree(
3601 "/dir",
3602 json!({
3603 "one.rs": "const ONE: usize = 1;",
3604 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3605 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3606 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3607 }),
3608 )
3609 .await;
3610 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3611 assert_eq!(
3612 search(
3613 &project,
3614 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3615 cx
3616 )
3617 .await
3618 .unwrap(),
3619 HashMap::from_iter([
3620 ("two.rs".to_string(), vec![6..9]),
3621 ("three.rs".to_string(), vec![37..40])
3622 ])
3623 );
3624
3625 let buffer_4 = project
3626 .update(cx, |project, cx| {
3627 project.open_local_buffer("/dir/four.rs", cx)
3628 })
3629 .await
3630 .unwrap();
3631 buffer_4.update(cx, |buffer, cx| {
3632 let text = "two::TWO";
3633 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3634 });
3635
3636 assert_eq!(
3637 search(
3638 &project,
3639 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3640 cx
3641 )
3642 .await
3643 .unwrap(),
3644 HashMap::from_iter([
3645 ("two.rs".to_string(), vec![6..9]),
3646 ("three.rs".to_string(), vec![37..40]),
3647 ("four.rs".to_string(), vec![25..28, 36..39])
3648 ])
3649 );
3650}
3651
3652#[gpui::test]
3653async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3654 init_test(cx);
3655
3656 let search_query = "file";
3657
3658 let fs = FakeFs::new(cx.executor());
3659 fs.insert_tree(
3660 "/dir",
3661 json!({
3662 "one.rs": r#"// Rust file one"#,
3663 "one.ts": r#"// TypeScript file one"#,
3664 "two.rs": r#"// Rust file two"#,
3665 "two.ts": r#"// TypeScript file two"#,
3666 }),
3667 )
3668 .await;
3669 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3670
3671 assert!(
3672 search(
3673 &project,
3674 SearchQuery::text(
3675 search_query,
3676 false,
3677 true,
3678 vec![PathMatcher::new("*.odd").unwrap()],
3679 Vec::new()
3680 )
3681 .unwrap(),
3682 cx
3683 )
3684 .await
3685 .unwrap()
3686 .is_empty(),
3687 "If no inclusions match, no files should be returned"
3688 );
3689
3690 assert_eq!(
3691 search(
3692 &project,
3693 SearchQuery::text(
3694 search_query,
3695 false,
3696 true,
3697 vec![PathMatcher::new("*.rs").unwrap()],
3698 Vec::new()
3699 )
3700 .unwrap(),
3701 cx
3702 )
3703 .await
3704 .unwrap(),
3705 HashMap::from_iter([
3706 ("one.rs".to_string(), vec![8..12]),
3707 ("two.rs".to_string(), vec![8..12]),
3708 ]),
3709 "Rust only search should give only Rust files"
3710 );
3711
3712 assert_eq!(
3713 search(
3714 &project,
3715 SearchQuery::text(
3716 search_query,
3717 false,
3718 true,
3719 vec![
3720 PathMatcher::new("*.ts").unwrap(),
3721 PathMatcher::new("*.odd").unwrap(),
3722 ],
3723 Vec::new()
3724 ).unwrap(),
3725 cx
3726 )
3727 .await
3728 .unwrap(),
3729 HashMap::from_iter([
3730 ("one.ts".to_string(), vec![14..18]),
3731 ("two.ts".to_string(), vec![14..18]),
3732 ]),
3733 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3734 );
3735
3736 assert_eq!(
3737 search(
3738 &project,
3739 SearchQuery::text(
3740 search_query,
3741 false,
3742 true,
3743 vec![
3744 PathMatcher::new("*.rs").unwrap(),
3745 PathMatcher::new("*.ts").unwrap(),
3746 PathMatcher::new("*.odd").unwrap(),
3747 ],
3748 Vec::new()
3749 ).unwrap(),
3750 cx
3751 )
3752 .await
3753 .unwrap(),
3754 HashMap::from_iter([
3755 ("one.rs".to_string(), vec![8..12]),
3756 ("one.ts".to_string(), vec![14..18]),
3757 ("two.rs".to_string(), vec![8..12]),
3758 ("two.ts".to_string(), vec![14..18]),
3759 ]),
3760 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3761 );
3762}
3763
3764#[gpui::test]
3765async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3766 init_test(cx);
3767
3768 let search_query = "file";
3769
3770 let fs = FakeFs::new(cx.executor());
3771 fs.insert_tree(
3772 "/dir",
3773 json!({
3774 "one.rs": r#"// Rust file one"#,
3775 "one.ts": r#"// TypeScript file one"#,
3776 "two.rs": r#"// Rust file two"#,
3777 "two.ts": r#"// TypeScript file two"#,
3778 }),
3779 )
3780 .await;
3781 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3782
3783 assert_eq!(
3784 search(
3785 &project,
3786 SearchQuery::text(
3787 search_query,
3788 false,
3789 true,
3790 Vec::new(),
3791 vec![PathMatcher::new("*.odd").unwrap()],
3792 )
3793 .unwrap(),
3794 cx
3795 )
3796 .await
3797 .unwrap(),
3798 HashMap::from_iter([
3799 ("one.rs".to_string(), vec![8..12]),
3800 ("one.ts".to_string(), vec![14..18]),
3801 ("two.rs".to_string(), vec![8..12]),
3802 ("two.ts".to_string(), vec![14..18]),
3803 ]),
3804 "If no exclusions match, all files should be returned"
3805 );
3806
3807 assert_eq!(
3808 search(
3809 &project,
3810 SearchQuery::text(
3811 search_query,
3812 false,
3813 true,
3814 Vec::new(),
3815 vec![PathMatcher::new("*.rs").unwrap()],
3816 )
3817 .unwrap(),
3818 cx
3819 )
3820 .await
3821 .unwrap(),
3822 HashMap::from_iter([
3823 ("one.ts".to_string(), vec![14..18]),
3824 ("two.ts".to_string(), vec![14..18]),
3825 ]),
3826 "Rust exclusion search should give only TypeScript files"
3827 );
3828
3829 assert_eq!(
3830 search(
3831 &project,
3832 SearchQuery::text(
3833 search_query,
3834 false,
3835 true,
3836 Vec::new(),
3837 vec![
3838 PathMatcher::new("*.ts").unwrap(),
3839 PathMatcher::new("*.odd").unwrap(),
3840 ],
3841 ).unwrap(),
3842 cx
3843 )
3844 .await
3845 .unwrap(),
3846 HashMap::from_iter([
3847 ("one.rs".to_string(), vec![8..12]),
3848 ("two.rs".to_string(), vec![8..12]),
3849 ]),
3850 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3851 );
3852
3853 assert!(
3854 search(
3855 &project,
3856 SearchQuery::text(
3857 search_query,
3858 false,
3859 true,
3860 Vec::new(),
3861 vec![
3862 PathMatcher::new("*.rs").unwrap(),
3863 PathMatcher::new("*.ts").unwrap(),
3864 PathMatcher::new("*.odd").unwrap(),
3865 ],
3866 ).unwrap(),
3867 cx
3868 )
3869 .await
3870 .unwrap().is_empty(),
3871 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3872 );
3873}
3874
3875#[gpui::test]
3876async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3877 init_test(cx);
3878
3879 let search_query = "file";
3880
3881 let fs = FakeFs::new(cx.executor());
3882 fs.insert_tree(
3883 "/dir",
3884 json!({
3885 "one.rs": r#"// Rust file one"#,
3886 "one.ts": r#"// TypeScript file one"#,
3887 "two.rs": r#"// Rust file two"#,
3888 "two.ts": r#"// TypeScript file two"#,
3889 }),
3890 )
3891 .await;
3892 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3893
3894 assert!(
3895 search(
3896 &project,
3897 SearchQuery::text(
3898 search_query,
3899 false,
3900 true,
3901 vec![PathMatcher::new("*.odd").unwrap()],
3902 vec![PathMatcher::new("*.odd").unwrap()],
3903 )
3904 .unwrap(),
3905 cx
3906 )
3907 .await
3908 .unwrap()
3909 .is_empty(),
3910 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3911 );
3912
3913 assert!(
3914 search(
3915 &project,
3916 SearchQuery::text(
3917 search_query,
3918 false,
3919 true,
3920 vec![PathMatcher::new("*.ts").unwrap()],
3921 vec![PathMatcher::new("*.ts").unwrap()],
3922 ).unwrap(),
3923 cx
3924 )
3925 .await
3926 .unwrap()
3927 .is_empty(),
3928 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3929 );
3930
3931 assert!(
3932 search(
3933 &project,
3934 SearchQuery::text(
3935 search_query,
3936 false,
3937 true,
3938 vec![
3939 PathMatcher::new("*.ts").unwrap(),
3940 PathMatcher::new("*.odd").unwrap()
3941 ],
3942 vec![
3943 PathMatcher::new("*.ts").unwrap(),
3944 PathMatcher::new("*.odd").unwrap()
3945 ],
3946 )
3947 .unwrap(),
3948 cx
3949 )
3950 .await
3951 .unwrap()
3952 .is_empty(),
3953 "Non-matching inclusions and exclusions should not change that."
3954 );
3955
3956 assert_eq!(
3957 search(
3958 &project,
3959 SearchQuery::text(
3960 search_query,
3961 false,
3962 true,
3963 vec![
3964 PathMatcher::new("*.ts").unwrap(),
3965 PathMatcher::new("*.odd").unwrap()
3966 ],
3967 vec![
3968 PathMatcher::new("*.rs").unwrap(),
3969 PathMatcher::new("*.odd").unwrap()
3970 ],
3971 )
3972 .unwrap(),
3973 cx
3974 )
3975 .await
3976 .unwrap(),
3977 HashMap::from_iter([
3978 ("one.ts".to_string(), vec![14..18]),
3979 ("two.ts".to_string(), vec![14..18]),
3980 ]),
3981 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3982 );
3983}
3984
3985#[test]
3986fn test_glob_literal_prefix() {
3987 assert_eq!(glob_literal_prefix("**/*.js"), "");
3988 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
3989 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
3990 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
3991}
3992
3993async fn search(
3994 project: &Model<Project>,
3995 query: SearchQuery,
3996 cx: &mut gpui::TestAppContext,
3997) -> Result<HashMap<String, Vec<Range<usize>>>> {
3998 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
3999 let mut result = HashMap::default();
4000 while let Some((buffer, range)) = search_rx.next().await {
4001 result.entry(buffer).or_insert(range);
4002 }
4003 Ok(result
4004 .into_iter()
4005 .map(|(buffer, ranges)| {
4006 buffer.update(cx, |buffer, _| {
4007 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4008 let ranges = ranges
4009 .into_iter()
4010 .map(|range| range.to_offset(buffer))
4011 .collect::<Vec<_>>();
4012 (path, ranges)
4013 })
4014 })
4015 .collect())
4016}
4017
4018fn init_test(cx: &mut gpui::TestAppContext) {
4019 if std::env::var("RUST_LOG").is_ok() {
4020 env_logger::init();
4021 }
4022
4023 cx.update(|cx| {
4024 let settings_store = SettingsStore::test(cx);
4025 cx.set_global(settings_store);
4026 language::init(cx);
4027 Project::init_settings(cx);
4028 });
4029}