1// use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
2// use fs::{FakeFs, RealFs};
3// use futures::{future, StreamExt};
4// use gpui::{executor::Deterministic, test::subscribe, AppContext};
5// use language2::{
6// language_settings::{AllLanguageSettings, LanguageSettingsContent},
7// tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8// LineEnding, OffsetRangeExt, Point, ToPoint,
9// };
10// use lsp2::Url;
11// use parking_lot::Mutex;
12// use pretty_assertions::assert_eq;
13// use serde_json::json;
14// use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
15// use unindent::Unindent as _;
16// use util::{assert_set_eq, test::temp_tree};
17
18// #[cfg(test)]
19// #[ctor::ctor]
20// fn init_logger() {
21// if std::env::var("RUST_LOG").is_ok() {
22// env_logger::init();
23// }
24// }
25
26// #[gpui::test]
27// async fn test_symlinks(cx: &mut gpui::TestAppContext) {
28// init_test(cx);
29// cx.foreground().allow_parking();
30
31// let dir = temp_tree(json!({
32// "root": {
33// "apple": "",
34// "banana": {
35// "carrot": {
36// "date": "",
37// "endive": "",
38// }
39// },
40// "fennel": {
41// "grape": "",
42// }
43// }
44// }));
45
46// let root_link_path = dir.path().join("root_link");
47// unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
48// unix::fs::symlink(
49// &dir.path().join("root/fennel"),
50// &dir.path().join("root/finnochio"),
51// )
52// .unwrap();
53
54// let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
55// project.read_with(cx, |project, cx| {
56// let tree = project.worktrees(cx).next().unwrap().read(cx);
57// assert_eq!(tree.file_count(), 5);
58// assert_eq!(
59// tree.inode_for_path("fennel/grape"),
60// tree.inode_for_path("finnochio/grape")
61// );
62// });
63// }
64
65// #[gpui::test]
66// async fn test_managing_project_specific_settings(
67// deterministic: Arc<Deterministic>,
68// cx: &mut gpui::TestAppContext,
69// ) {
70// init_test(cx);
71
72// let fs = FakeFs::new(cx.background());
73// fs.insert_tree(
74// "/the-root",
75// json!({
76// ".zed": {
77// "settings.json": r#"{ "tab_size": 8 }"#
78// },
79// "a": {
80// "a.rs": "fn a() {\n A\n}"
81// },
82// "b": {
83// ".zed": {
84// "settings.json": r#"{ "tab_size": 2 }"#
85// },
86// "b.rs": "fn b() {\n B\n}"
87// }
88// }),
89// )
90// .await;
91
92// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
93// let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
94
95// deterministic.run_until_parked();
96// cx.read(|cx| {
97// let tree = worktree.read(cx);
98
99// let settings_a = language_settings(
100// None,
101// Some(
102// &(File::for_entry(
103// tree.entry_for_path("a/a.rs").unwrap().clone(),
104// worktree.clone(),
105// ) as _),
106// ),
107// cx,
108// );
109// let settings_b = language_settings(
110// None,
111// Some(
112// &(File::for_entry(
113// tree.entry_for_path("b/b.rs").unwrap().clone(),
114// worktree.clone(),
115// ) as _),
116// ),
117// cx,
118// );
119
120// assert_eq!(settings_a.tab_size.get(), 8);
121// assert_eq!(settings_b.tab_size.get(), 2);
122// });
123// }
124
125// #[gpui::test]
126// async fn test_managing_language_servers(
127// deterministic: Arc<Deterministic>,
128// cx: &mut gpui::TestAppContext,
129// ) {
130// init_test(cx);
131
132// let mut rust_language = Language::new(
133// LanguageConfig {
134// name: "Rust".into(),
135// path_suffixes: vec!["rs".to_string()],
136// ..Default::default()
137// },
138// Some(tree_sitter_rust::language()),
139// );
140// let mut json_language = Language::new(
141// LanguageConfig {
142// name: "JSON".into(),
143// path_suffixes: vec!["json".to_string()],
144// ..Default::default()
145// },
146// None,
147// );
148// let mut fake_rust_servers = rust_language
149// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
150// name: "the-rust-language-server",
151// capabilities: lsp::ServerCapabilities {
152// completion_provider: Some(lsp::CompletionOptions {
153// trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
154// ..Default::default()
155// }),
156// ..Default::default()
157// },
158// ..Default::default()
159// }))
160// .await;
161// let mut fake_json_servers = json_language
162// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
163// name: "the-json-language-server",
164// capabilities: lsp::ServerCapabilities {
165// completion_provider: Some(lsp::CompletionOptions {
166// trigger_characters: Some(vec![":".to_string()]),
167// ..Default::default()
168// }),
169// ..Default::default()
170// },
171// ..Default::default()
172// }))
173// .await;
174
175// let fs = FakeFs::new(cx.background());
176// fs.insert_tree(
177// "/the-root",
178// json!({
179// "test.rs": "const A: i32 = 1;",
180// "test2.rs": "",
181// "Cargo.toml": "a = 1",
182// "package.json": "{\"a\": 1}",
183// }),
184// )
185// .await;
186
187// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
188
189// // Open a buffer without an associated language server.
190// let toml_buffer = project
191// .update(cx, |project, cx| {
192// project.open_local_buffer("/the-root/Cargo.toml", cx)
193// })
194// .await
195// .unwrap();
196
197// // Open a buffer with an associated language server before the language for it has been loaded.
198// let rust_buffer = project
199// .update(cx, |project, cx| {
200// project.open_local_buffer("/the-root/test.rs", cx)
201// })
202// .await
203// .unwrap();
204// rust_buffer.read_with(cx, |buffer, _| {
205// assert_eq!(buffer.language().map(|l| l.name()), None);
206// });
207
208// // Now we add the languages to the project, and ensure they get assigned to all
209// // the relevant open buffers.
210// project.update(cx, |project, _| {
211// project.languages.add(Arc::new(json_language));
212// project.languages.add(Arc::new(rust_language));
213// });
214// deterministic.run_until_parked();
215// rust_buffer.read_with(cx, |buffer, _| {
216// assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
217// });
218
219// // A server is started up, and it is notified about Rust files.
220// let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
221// assert_eq!(
222// fake_rust_server
223// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
224// .await
225// .text_document,
226// lsp2::TextDocumentItem {
227// uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
228// version: 0,
229// text: "const A: i32 = 1;".to_string(),
230// language_id: Default::default()
231// }
232// );
233
234// // The buffer is configured based on the language server's capabilities.
235// rust_buffer.read_with(cx, |buffer, _| {
236// assert_eq!(
237// buffer.completion_triggers(),
238// &[".".to_string(), "::".to_string()]
239// );
240// });
241// toml_buffer.read_with(cx, |buffer, _| {
242// assert!(buffer.completion_triggers().is_empty());
243// });
244
245// // Edit a buffer. The changes are reported to the language server.
246// rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
247// assert_eq!(
248// fake_rust_server
249// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
250// .await
251// .text_document,
252// lsp2::VersionedTextDocumentIdentifier::new(
253// lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
254// 1
255// )
256// );
257
258// // Open a third buffer with a different associated language server.
259// let json_buffer = project
260// .update(cx, |project, cx| {
261// project.open_local_buffer("/the-root/package.json", cx)
262// })
263// .await
264// .unwrap();
265
266// // A json language server is started up and is only notified about the json buffer.
267// let mut fake_json_server = fake_json_servers.next().await.unwrap();
268// assert_eq!(
269// fake_json_server
270// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
271// .await
272// .text_document,
273// lsp2::TextDocumentItem {
274// uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
275// version: 0,
276// text: "{\"a\": 1}".to_string(),
277// language_id: Default::default()
278// }
279// );
280
281// // This buffer is configured based on the second language server's
282// // capabilities.
283// json_buffer.read_with(cx, |buffer, _| {
284// assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
285// });
286
287// // When opening another buffer whose language server is already running,
288// // it is also configured based on the existing language server's capabilities.
289// let rust_buffer2 = project
290// .update(cx, |project, cx| {
291// project.open_local_buffer("/the-root/test2.rs", cx)
292// })
293// .await
294// .unwrap();
295// rust_buffer2.read_with(cx, |buffer, _| {
296// assert_eq!(
297// buffer.completion_triggers(),
298// &[".".to_string(), "::".to_string()]
299// );
300// });
301
302// // Changes are reported only to servers matching the buffer's language.
303// toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
304// rust_buffer2.update(cx, |buffer, cx| {
305// buffer.edit([(0..0, "let x = 1;")], None, cx)
306// });
307// assert_eq!(
308// fake_rust_server
309// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
310// .await
311// .text_document,
312// lsp2::VersionedTextDocumentIdentifier::new(
313// lsp2::Url::from_file_path("/the-root/test2.rs").unwrap(),
314// 1
315// )
316// );
317
318// // Save notifications are reported to all servers.
319// project
320// .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
321// .await
322// .unwrap();
323// assert_eq!(
324// fake_rust_server
325// .receive_notification::<lsp2::notification::DidSaveTextDocument>()
326// .await
327// .text_document,
328// lsp2::TextDocumentIdentifier::new(
329// lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
330// )
331// );
332// assert_eq!(
333// fake_json_server
334// .receive_notification::<lsp2::notification::DidSaveTextDocument>()
335// .await
336// .text_document,
337// lsp2::TextDocumentIdentifier::new(
338// lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
339// )
340// );
341
342// // Renames are reported only to servers matching the buffer's language.
343// fs.rename(
344// Path::new("/the-root/test2.rs"),
345// Path::new("/the-root/test3.rs"),
346// Default::default(),
347// )
348// .await
349// .unwrap();
350// assert_eq!(
351// fake_rust_server
352// .receive_notification::<lsp2::notification::DidCloseTextDocument>()
353// .await
354// .text_document,
355// lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test2.rs").unwrap()),
356// );
357// assert_eq!(
358// fake_rust_server
359// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
360// .await
361// .text_document,
362// lsp2::TextDocumentItem {
363// uri: lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),
364// version: 0,
365// text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
366// language_id: Default::default()
367// },
368// );
369
370// rust_buffer2.update(cx, |buffer, cx| {
371// buffer.update_diagnostics(
372// LanguageServerId(0),
373// DiagnosticSet::from_sorted_entries(
374// vec![DiagnosticEntry {
375// diagnostic: Default::default(),
376// range: Anchor::MIN..Anchor::MAX,
377// }],
378// &buffer.snapshot(),
379// ),
380// cx,
381// );
382// assert_eq!(
383// buffer
384// .snapshot()
385// .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
386// .count(),
387// 1
388// );
389// });
390
391// // When the rename changes the extension of the file, the buffer gets closed on the old
392// // language server and gets opened on the new one.
393// fs.rename(
394// Path::new("/the-root/test3.rs"),
395// Path::new("/the-root/test3.json"),
396// Default::default(),
397// )
398// .await
399// .unwrap();
400// assert_eq!(
401// fake_rust_server
402// .receive_notification::<lsp2::notification::DidCloseTextDocument>()
403// .await
404// .text_document,
405// lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),),
406// );
407// assert_eq!(
408// fake_json_server
409// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
410// .await
411// .text_document,
412// lsp2::TextDocumentItem {
413// uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
414// version: 0,
415// text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
416// language_id: Default::default()
417// },
418// );
419
420// // We clear the diagnostics, since the language has changed.
421// rust_buffer2.read_with(cx, |buffer, _| {
422// assert_eq!(
423// buffer
424// .snapshot()
425// .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
426// .count(),
427// 0
428// );
429// });
430
431// // The renamed file's version resets after changing language server.
432// rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
433// assert_eq!(
434// fake_json_server
435// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
436// .await
437// .text_document,
438// lsp2::VersionedTextDocumentIdentifier::new(
439// lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
440// 1
441// )
442// );
443
444// // Restart language servers
445// project.update(cx, |project, cx| {
446// project.restart_language_servers_for_buffers(
447// vec![rust_buffer.clone(), json_buffer.clone()],
448// cx,
449// );
450// });
451
452// let mut rust_shutdown_requests = fake_rust_server
453// .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
454// let mut json_shutdown_requests = fake_json_server
455// .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
456// futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
457
458// let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
459// let mut fake_json_server = fake_json_servers.next().await.unwrap();
460
461// // Ensure rust document is reopened in new rust language server
462// assert_eq!(
463// fake_rust_server
464// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
465// .await
466// .text_document,
467// lsp2::TextDocumentItem {
468// uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
469// version: 0,
470// text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
471// language_id: Default::default()
472// }
473// );
474
475// // Ensure json documents are reopened in new json language server
476// assert_set_eq!(
477// [
478// fake_json_server
479// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
480// .await
481// .text_document,
482// fake_json_server
483// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
484// .await
485// .text_document,
486// ],
487// [
488// lsp2::TextDocumentItem {
489// uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
490// version: 0,
491// text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
492// language_id: Default::default()
493// },
494// lsp2::TextDocumentItem {
495// uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
496// version: 0,
497// text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
498// language_id: Default::default()
499// }
500// ]
501// );
502
503// // Close notifications are reported only to servers matching the buffer's language.
504// cx.update(|_| drop(json_buffer));
505// let close_message = lsp2::DidCloseTextDocumentParams {
506// text_document: lsp2::TextDocumentIdentifier::new(
507// lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
508// ),
509// };
510// assert_eq!(
511// fake_json_server
512// .receive_notification::<lsp2::notification::DidCloseTextDocument>()
513// .await,
514// close_message,
515// );
516// }
517
518// #[gpui::test]
519// async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
520// init_test(cx);
521
522// let mut language = Language::new(
523// LanguageConfig {
524// name: "Rust".into(),
525// path_suffixes: vec!["rs".to_string()],
526// ..Default::default()
527// },
528// Some(tree_sitter_rust::language()),
529// );
530// let mut fake_servers = language
531// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
532// name: "the-language-server",
533// ..Default::default()
534// }))
535// .await;
536
537// let fs = FakeFs::new(cx.background());
538// fs.insert_tree(
539// "/the-root",
540// json!({
541// ".gitignore": "target\n",
542// "src": {
543// "a.rs": "",
544// "b.rs": "",
545// },
546// "target": {
547// "x": {
548// "out": {
549// "x.rs": ""
550// }
551// },
552// "y": {
553// "out": {
554// "y.rs": "",
555// }
556// },
557// "z": {
558// "out": {
559// "z.rs": ""
560// }
561// }
562// }
563// }),
564// )
565// .await;
566
567// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
568// project.update(cx, |project, _| {
569// project.languages.add(Arc::new(language));
570// });
571// cx.foreground().run_until_parked();
572
573// // Start the language server by opening a buffer with a compatible file extension.
574// let _buffer = project
575// .update(cx, |project, cx| {
576// project.open_local_buffer("/the-root/src/a.rs", cx)
577// })
578// .await
579// .unwrap();
580
581// // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
582// project.read_with(cx, |project, cx| {
583// let worktree = project.worktrees(cx).next().unwrap();
584// assert_eq!(
585// worktree
586// .read(cx)
587// .snapshot()
588// .entries(true)
589// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
590// .collect::<Vec<_>>(),
591// &[
592// (Path::new(""), false),
593// (Path::new(".gitignore"), false),
594// (Path::new("src"), false),
595// (Path::new("src/a.rs"), false),
596// (Path::new("src/b.rs"), false),
597// (Path::new("target"), true),
598// ]
599// );
600// });
601
602// let prev_read_dir_count = fs.read_dir_call_count();
603
604// // Keep track of the FS events reported to the language server.
605// let fake_server = fake_servers.next().await.unwrap();
606// let file_changes = Arc::new(Mutex::new(Vec::new()));
607// fake_server
608// .request::<lsp2::request::RegisterCapability>(lsp2::RegistrationParams {
609// registrations: vec![lsp2::Registration {
610// id: Default::default(),
611// method: "workspace/didChangeWatchedFiles".to_string(),
612// register_options: serde_json::to_value(
613// lsp::DidChangeWatchedFilesRegistrationOptions {
614// watchers: vec![
615// lsp2::FileSystemWatcher {
616// glob_pattern: lsp2::GlobPattern::String(
617// "/the-root/Cargo.toml".to_string(),
618// ),
619// kind: None,
620// },
621// lsp2::FileSystemWatcher {
622// glob_pattern: lsp2::GlobPattern::String(
623// "/the-root/src/*.{rs,c}".to_string(),
624// ),
625// kind: None,
626// },
627// lsp2::FileSystemWatcher {
628// glob_pattern: lsp2::GlobPattern::String(
629// "/the-root/target/y/**/*.rs".to_string(),
630// ),
631// kind: None,
632// },
633// ],
634// },
635// )
636// .ok(),
637// }],
638// })
639// .await
640// .unwrap();
641// fake_server.handle_notification::<lsp2::notification::DidChangeWatchedFiles, _>({
642// let file_changes = file_changes.clone();
643// move |params, _| {
644// let mut file_changes = file_changes.lock();
645// file_changes.extend(params.changes);
646// file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
647// }
648// });
649
650// cx.foreground().run_until_parked();
651// assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
652// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
653
654// // Now the language server has asked us to watch an ignored directory path,
655// // so we recursively load it.
656// project.read_with(cx, |project, cx| {
657// let worktree = project.worktrees(cx).next().unwrap();
658// assert_eq!(
659// worktree
660// .read(cx)
661// .snapshot()
662// .entries(true)
663// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
664// .collect::<Vec<_>>(),
665// &[
666// (Path::new(""), false),
667// (Path::new(".gitignore"), false),
668// (Path::new("src"), false),
669// (Path::new("src/a.rs"), false),
670// (Path::new("src/b.rs"), false),
671// (Path::new("target"), true),
672// (Path::new("target/x"), true),
673// (Path::new("target/y"), true),
674// (Path::new("target/y/out"), true),
675// (Path::new("target/y/out/y.rs"), true),
676// (Path::new("target/z"), true),
677// ]
678// );
679// });
680
681// // Perform some file system mutations, two of which match the watched patterns,
682// // and one of which does not.
683// fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
684// .await
685// .unwrap();
686// fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
687// .await
688// .unwrap();
689// fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
690// .await
691// .unwrap();
692// fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
693// .await
694// .unwrap();
695// fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
696// .await
697// .unwrap();
698
699// // The language server receives events for the FS mutations that match its watch patterns.
700// cx.foreground().run_until_parked();
701// assert_eq!(
702// &*file_changes.lock(),
703// &[
704// lsp2::FileEvent {
705// uri: lsp2::Url::from_file_path("/the-root/src/b.rs").unwrap(),
706// typ: lsp2::FileChangeType::DELETED,
707// },
708// lsp2::FileEvent {
709// uri: lsp2::Url::from_file_path("/the-root/src/c.rs").unwrap(),
710// typ: lsp2::FileChangeType::CREATED,
711// },
712// lsp2::FileEvent {
713// uri: lsp2::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
714// typ: lsp2::FileChangeType::CREATED,
715// },
716// ]
717// );
718// }
719
720// #[gpui::test]
721// async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
722// init_test(cx);
723
724// let fs = FakeFs::new(cx.background());
725// fs.insert_tree(
726// "/dir",
727// json!({
728// "a.rs": "let a = 1;",
729// "b.rs": "let b = 2;"
730// }),
731// )
732// .await;
733
734// let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
735
736// let buffer_a = project
737// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
738// .await
739// .unwrap();
740// let buffer_b = project
741// .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
742// .await
743// .unwrap();
744
745// project.update(cx, |project, cx| {
746// project
747// .update_diagnostics(
748// LanguageServerId(0),
749// lsp::PublishDiagnosticsParams {
750// uri: Url::from_file_path("/dir/a.rs").unwrap(),
751// version: None,
752// diagnostics: vec![lsp2::Diagnostic {
753// range: lsp2::Range::new(
754// lsp2::Position::new(0, 4),
755// lsp2::Position::new(0, 5),
756// ),
757// severity: Some(lsp2::DiagnosticSeverity::ERROR),
758// message: "error 1".to_string(),
759// ..Default::default()
760// }],
761// },
762// &[],
763// cx,
764// )
765// .unwrap();
766// project
767// .update_diagnostics(
768// LanguageServerId(0),
769// lsp::PublishDiagnosticsParams {
770// uri: Url::from_file_path("/dir/b.rs").unwrap(),
771// version: None,
772// diagnostics: vec![lsp2::Diagnostic {
773// range: lsp2::Range::new(
774// lsp2::Position::new(0, 4),
775// lsp2::Position::new(0, 5),
776// ),
777// severity: Some(lsp2::DiagnosticSeverity::WARNING),
778// message: "error 2".to_string(),
779// ..Default::default()
780// }],
781// },
782// &[],
783// cx,
784// )
785// .unwrap();
786// });
787
788// buffer_a.read_with(cx, |buffer, _| {
789// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
790// assert_eq!(
791// chunks
792// .iter()
793// .map(|(s, d)| (s.as_str(), *d))
794// .collect::<Vec<_>>(),
795// &[
796// ("let ", None),
797// ("a", Some(DiagnosticSeverity::ERROR)),
798// (" = 1;", None),
799// ]
800// );
801// });
802// buffer_b.read_with(cx, |buffer, _| {
803// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
804// assert_eq!(
805// chunks
806// .iter()
807// .map(|(s, d)| (s.as_str(), *d))
808// .collect::<Vec<_>>(),
809// &[
810// ("let ", None),
811// ("b", Some(DiagnosticSeverity::WARNING)),
812// (" = 2;", None),
813// ]
814// );
815// });
816// }
817
818// #[gpui::test]
819// async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
820// init_test(cx);
821
822// let fs = FakeFs::new(cx.background());
823// fs.insert_tree(
824// "/root",
825// json!({
826// "dir": {
827// "a.rs": "let a = 1;",
828// },
829// "other.rs": "let b = c;"
830// }),
831// )
832// .await;
833
834// let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
835
836// let (worktree, _) = project
837// .update(cx, |project, cx| {
838// project.find_or_create_local_worktree("/root/other.rs", false, cx)
839// })
840// .await
841// .unwrap();
842// let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
843
844// project.update(cx, |project, cx| {
845// project
846// .update_diagnostics(
847// LanguageServerId(0),
848// lsp::PublishDiagnosticsParams {
849// uri: Url::from_file_path("/root/other.rs").unwrap(),
850// version: None,
851// diagnostics: vec![lsp2::Diagnostic {
852// range: lsp2::Range::new(
853// lsp2::Position::new(0, 8),
854// lsp2::Position::new(0, 9),
855// ),
856// severity: Some(lsp2::DiagnosticSeverity::ERROR),
857// message: "unknown variable 'c'".to_string(),
858// ..Default::default()
859// }],
860// },
861// &[],
862// cx,
863// )
864// .unwrap();
865// });
866
867// let buffer = project
868// .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
869// .await
870// .unwrap();
871// buffer.read_with(cx, |buffer, _| {
872// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
873// assert_eq!(
874// chunks
875// .iter()
876// .map(|(s, d)| (s.as_str(), *d))
877// .collect::<Vec<_>>(),
878// &[
879// ("let b = ", None),
880// ("c", Some(DiagnosticSeverity::ERROR)),
881// (";", None),
882// ]
883// );
884// });
885
886// project.read_with(cx, |project, cx| {
887// assert_eq!(project.diagnostic_summaries(cx).next(), None);
888// assert_eq!(project.diagnostic_summary(cx).error_count, 0);
889// });
890// }
891
892// #[gpui::test]
893// async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
894// init_test(cx);
895
896// let progress_token = "the-progress-token";
897// let mut language = Language::new(
898// LanguageConfig {
899// name: "Rust".into(),
900// path_suffixes: vec!["rs".to_string()],
901// ..Default::default()
902// },
903// Some(tree_sitter_rust::language()),
904// );
905// let mut fake_servers = language
906// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
907// disk_based_diagnostics_progress_token: Some(progress_token.into()),
908// disk_based_diagnostics_sources: vec!["disk".into()],
909// ..Default::default()
910// }))
911// .await;
912
913// let fs = FakeFs::new(cx.background());
914// fs.insert_tree(
915// "/dir",
916// json!({
917// "a.rs": "fn a() { A }",
918// "b.rs": "const y: i32 = 1",
919// }),
920// )
921// .await;
922
923// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
924// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
925// let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
926
927// // Cause worktree to start the fake language server
928// let _buffer = project
929// .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
930// .await
931// .unwrap();
932
933// let mut events = subscribe(&project, cx);
934
935// let fake_server = fake_servers.next().await.unwrap();
936// assert_eq!(
937// events.next().await.unwrap(),
938// Event::LanguageServerAdded(LanguageServerId(0)),
939// );
940
941// fake_server
942// .start_progress(format!("{}/0", progress_token))
943// .await;
944// assert_eq!(
945// events.next().await.unwrap(),
946// Event::DiskBasedDiagnosticsStarted {
947// language_server_id: LanguageServerId(0),
948// }
949// );
950
951// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
952// uri: Url::from_file_path("/dir/a.rs").unwrap(),
953// version: None,
954// diagnostics: vec![lsp2::Diagnostic {
955// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
956// severity: Some(lsp2::DiagnosticSeverity::ERROR),
957// message: "undefined variable 'A'".to_string(),
958// ..Default::default()
959// }],
960// });
961// assert_eq!(
962// events.next().await.unwrap(),
963// Event::DiagnosticsUpdated {
964// language_server_id: LanguageServerId(0),
965// path: (worktree_id, Path::new("a.rs")).into()
966// }
967// );
968
969// fake_server.end_progress(format!("{}/0", progress_token));
970// assert_eq!(
971// events.next().await.unwrap(),
972// Event::DiskBasedDiagnosticsFinished {
973// language_server_id: LanguageServerId(0)
974// }
975// );
976
977// let buffer = project
978// .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
979// .await
980// .unwrap();
981
982// buffer.read_with(cx, |buffer, _| {
983// let snapshot = buffer.snapshot();
984// let diagnostics = snapshot
985// .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
986// .collect::<Vec<_>>();
987// assert_eq!(
988// diagnostics,
989// &[DiagnosticEntry {
990// range: Point::new(0, 9)..Point::new(0, 10),
991// diagnostic: Diagnostic {
992// severity: lsp2::DiagnosticSeverity::ERROR,
993// message: "undefined variable 'A'".to_string(),
994// group_id: 0,
995// is_primary: true,
996// ..Default::default()
997// }
998// }]
999// )
1000// });
1001
1002// // Ensure publishing empty diagnostics twice only results in one update event.
1003// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1004// uri: Url::from_file_path("/dir/a.rs").unwrap(),
1005// version: None,
1006// diagnostics: Default::default(),
1007// });
1008// assert_eq!(
1009// events.next().await.unwrap(),
1010// Event::DiagnosticsUpdated {
1011// language_server_id: LanguageServerId(0),
1012// path: (worktree_id, Path::new("a.rs")).into()
1013// }
1014// );
1015
1016// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1017// uri: Url::from_file_path("/dir/a.rs").unwrap(),
1018// version: None,
1019// diagnostics: Default::default(),
1020// });
1021// cx.foreground().run_until_parked();
1022// assert_eq!(futures::poll!(events.next()), Poll::Pending);
1023// }
1024
1025// #[gpui::test]
1026// async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1027// init_test(cx);
1028
1029// let progress_token = "the-progress-token";
1030// let mut language = Language::new(
1031// LanguageConfig {
1032// path_suffixes: vec!["rs".to_string()],
1033// ..Default::default()
1034// },
1035// None,
1036// );
1037// let mut fake_servers = language
1038// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1039// disk_based_diagnostics_sources: vec!["disk".into()],
1040// disk_based_diagnostics_progress_token: Some(progress_token.into()),
1041// ..Default::default()
1042// }))
1043// .await;
1044
1045// let fs = FakeFs::new(cx.background());
1046// fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1047
1048// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1049// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1050
1051// let buffer = project
1052// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1053// .await
1054// .unwrap();
1055
1056// // Simulate diagnostics starting to update.
1057// let fake_server = fake_servers.next().await.unwrap();
1058// fake_server.start_progress(progress_token).await;
1059
1060// // Restart the server before the diagnostics finish updating.
1061// project.update(cx, |project, cx| {
1062// project.restart_language_servers_for_buffers([buffer], cx);
1063// });
1064// let mut events = subscribe(&project, cx);
1065
1066// // Simulate the newly started server sending more diagnostics.
1067// let fake_server = fake_servers.next().await.unwrap();
1068// assert_eq!(
1069// events.next().await.unwrap(),
1070// Event::LanguageServerAdded(LanguageServerId(1))
1071// );
1072// fake_server.start_progress(progress_token).await;
1073// assert_eq!(
1074// events.next().await.unwrap(),
1075// Event::DiskBasedDiagnosticsStarted {
1076// language_server_id: LanguageServerId(1)
1077// }
1078// );
1079// project.read_with(cx, |project, _| {
1080// assert_eq!(
1081// project
1082// .language_servers_running_disk_based_diagnostics()
1083// .collect::<Vec<_>>(),
1084// [LanguageServerId(1)]
1085// );
1086// });
1087
1088// // All diagnostics are considered done, despite the old server's diagnostic
1089// // task never completing.
1090// fake_server.end_progress(progress_token);
1091// assert_eq!(
1092// events.next().await.unwrap(),
1093// Event::DiskBasedDiagnosticsFinished {
1094// language_server_id: LanguageServerId(1)
1095// }
1096// );
1097// project.read_with(cx, |project, _| {
1098// assert_eq!(
1099// project
1100// .language_servers_running_disk_based_diagnostics()
1101// .collect::<Vec<_>>(),
1102// [LanguageServerId(0); 0]
1103// );
1104// });
1105// }
1106
1107// #[gpui::test]
1108// async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1109// init_test(cx);
1110
1111// let mut language = Language::new(
1112// LanguageConfig {
1113// path_suffixes: vec!["rs".to_string()],
1114// ..Default::default()
1115// },
1116// None,
1117// );
1118// let mut fake_servers = language
1119// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1120// ..Default::default()
1121// }))
1122// .await;
1123
1124// let fs = FakeFs::new(cx.background());
1125// fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1126
1127// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1128// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1129
1130// let buffer = project
1131// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1132// .await
1133// .unwrap();
1134
1135// // Publish diagnostics
1136// let fake_server = fake_servers.next().await.unwrap();
1137// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1138// uri: Url::from_file_path("/dir/a.rs").unwrap(),
1139// version: None,
1140// diagnostics: vec![lsp2::Diagnostic {
1141// range: lsp2::Range::new(lsp2::Position::new(0, 0), lsp2::Position::new(0, 0)),
1142// severity: Some(lsp2::DiagnosticSeverity::ERROR),
1143// message: "the message".to_string(),
1144// ..Default::default()
1145// }],
1146// });
1147
1148// cx.foreground().run_until_parked();
1149// buffer.read_with(cx, |buffer, _| {
1150// assert_eq!(
1151// buffer
1152// .snapshot()
1153// .diagnostics_in_range::<_, usize>(0..1, false)
1154// .map(|entry| entry.diagnostic.message.clone())
1155// .collect::<Vec<_>>(),
1156// ["the message".to_string()]
1157// );
1158// });
1159// project.read_with(cx, |project, cx| {
1160// assert_eq!(
1161// project.diagnostic_summary(cx),
1162// DiagnosticSummary {
1163// error_count: 1,
1164// warning_count: 0,
1165// }
1166// );
1167// });
1168
1169// project.update(cx, |project, cx| {
1170// project.restart_language_servers_for_buffers([buffer.clone()], cx);
1171// });
1172
1173// // The diagnostics are cleared.
1174// cx.foreground().run_until_parked();
1175// buffer.read_with(cx, |buffer, _| {
1176// assert_eq!(
1177// buffer
1178// .snapshot()
1179// .diagnostics_in_range::<_, usize>(0..1, false)
1180// .map(|entry| entry.diagnostic.message.clone())
1181// .collect::<Vec<_>>(),
1182// Vec::<String>::new(),
1183// );
1184// });
1185// project.read_with(cx, |project, cx| {
1186// assert_eq!(
1187// project.diagnostic_summary(cx),
1188// DiagnosticSummary {
1189// error_count: 0,
1190// warning_count: 0,
1191// }
1192// );
1193// });
1194// }
1195
1196// #[gpui::test]
1197// async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1198// init_test(cx);
1199
1200// let mut language = Language::new(
1201// LanguageConfig {
1202// path_suffixes: vec!["rs".to_string()],
1203// ..Default::default()
1204// },
1205// None,
1206// );
1207// let mut fake_servers = language
1208// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1209// name: "the-lsp",
1210// ..Default::default()
1211// }))
1212// .await;
1213
1214// let fs = FakeFs::new(cx.background());
1215// fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1216
1217// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1218// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1219
1220// let buffer = project
1221// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1222// .await
1223// .unwrap();
1224
1225// // Before restarting the server, report diagnostics with an unknown buffer version.
1226// let fake_server = fake_servers.next().await.unwrap();
1227// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1228// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1229// version: Some(10000),
1230// diagnostics: Vec::new(),
1231// });
1232// cx.foreground().run_until_parked();
1233
1234// project.update(cx, |project, cx| {
1235// project.restart_language_servers_for_buffers([buffer.clone()], cx);
1236// });
1237// let mut fake_server = fake_servers.next().await.unwrap();
1238// let notification = fake_server
1239// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1240// .await
1241// .text_document;
1242// assert_eq!(notification.version, 0);
1243// }
1244
1245// #[gpui::test]
1246// async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1247// init_test(cx);
1248
1249// let mut rust = Language::new(
1250// LanguageConfig {
1251// name: Arc::from("Rust"),
1252// path_suffixes: vec!["rs".to_string()],
1253// ..Default::default()
1254// },
1255// None,
1256// );
1257// let mut fake_rust_servers = rust
1258// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1259// name: "rust-lsp",
1260// ..Default::default()
1261// }))
1262// .await;
1263// let mut js = Language::new(
1264// LanguageConfig {
1265// name: Arc::from("JavaScript"),
1266// path_suffixes: vec!["js".to_string()],
1267// ..Default::default()
1268// },
1269// None,
1270// );
1271// let mut fake_js_servers = js
1272// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1273// name: "js-lsp",
1274// ..Default::default()
1275// }))
1276// .await;
1277
1278// let fs = FakeFs::new(cx.background());
1279// fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1280// .await;
1281
1282// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1283// project.update(cx, |project, _| {
1284// project.languages.add(Arc::new(rust));
1285// project.languages.add(Arc::new(js));
1286// });
1287
1288// let _rs_buffer = project
1289// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1290// .await
1291// .unwrap();
1292// let _js_buffer = project
1293// .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1294// .await
1295// .unwrap();
1296
1297// let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1298// assert_eq!(
1299// fake_rust_server_1
1300// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1301// .await
1302// .text_document
1303// .uri
1304// .as_str(),
1305// "file:///dir/a.rs"
1306// );
1307
1308// let mut fake_js_server = fake_js_servers.next().await.unwrap();
1309// assert_eq!(
1310// fake_js_server
1311// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1312// .await
1313// .text_document
1314// .uri
1315// .as_str(),
1316// "file:///dir/b.js"
1317// );
1318
1319// // Disable Rust language server, ensuring only that server gets stopped.
1320// cx.update(|cx| {
1321// cx.update_global(|settings: &mut SettingsStore, cx| {
1322// settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1323// settings.languages.insert(
1324// Arc::from("Rust"),
1325// LanguageSettingsContent {
1326// enable_language_server: Some(false),
1327// ..Default::default()
1328// },
1329// );
1330// });
1331// })
1332// });
1333// fake_rust_server_1
1334// .receive_notification::<lsp2::notification::Exit>()
1335// .await;
1336
1337// // Enable Rust and disable JavaScript language servers, ensuring that the
1338// // former gets started again and that the latter stops.
1339// cx.update(|cx| {
1340// cx.update_global(|settings: &mut SettingsStore, cx| {
1341// settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1342// settings.languages.insert(
1343// Arc::from("Rust"),
1344// LanguageSettingsContent {
1345// enable_language_server: Some(true),
1346// ..Default::default()
1347// },
1348// );
1349// settings.languages.insert(
1350// Arc::from("JavaScript"),
1351// LanguageSettingsContent {
1352// enable_language_server: Some(false),
1353// ..Default::default()
1354// },
1355// );
1356// });
1357// })
1358// });
1359// let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1360// assert_eq!(
1361// fake_rust_server_2
1362// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1363// .await
1364// .text_document
1365// .uri
1366// .as_str(),
1367// "file:///dir/a.rs"
1368// );
1369// fake_js_server
1370// .receive_notification::<lsp2::notification::Exit>()
1371// .await;
1372// }
1373
1374// #[gpui::test(iterations = 3)]
1375// async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1376// init_test(cx);
1377
1378// let mut language = Language::new(
1379// LanguageConfig {
1380// name: "Rust".into(),
1381// path_suffixes: vec!["rs".to_string()],
1382// ..Default::default()
1383// },
1384// Some(tree_sitter_rust::language()),
1385// );
1386// let mut fake_servers = language
1387// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1388// disk_based_diagnostics_sources: vec!["disk".into()],
1389// ..Default::default()
1390// }))
1391// .await;
1392
1393// let text = "
1394// fn a() { A }
1395// fn b() { BB }
1396// fn c() { CCC }
1397// "
1398// .unindent();
1399
1400// let fs = FakeFs::new(cx.background());
1401// fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1402
1403// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1404// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1405
1406// let buffer = project
1407// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1408// .await
1409// .unwrap();
1410
1411// let mut fake_server = fake_servers.next().await.unwrap();
1412// let open_notification = fake_server
1413// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1414// .await;
1415
1416// // Edit the buffer, moving the content down
1417// buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1418// let change_notification_1 = fake_server
1419// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
1420// .await;
1421// assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1422
1423// // Report some diagnostics for the initial version of the buffer
1424// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1425// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1426// version: Some(open_notification.text_document.version),
1427// diagnostics: vec![
1428// lsp2::Diagnostic {
1429// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
1430// severity: Some(DiagnosticSeverity::ERROR),
1431// message: "undefined variable 'A'".to_string(),
1432// source: Some("disk".to_string()),
1433// ..Default::default()
1434// },
1435// lsp2::Diagnostic {
1436// range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
1437// severity: Some(DiagnosticSeverity::ERROR),
1438// message: "undefined variable 'BB'".to_string(),
1439// source: Some("disk".to_string()),
1440// ..Default::default()
1441// },
1442// lsp2::Diagnostic {
1443// range: lsp2::Range::new(lsp2::Position::new(2, 9), lsp2::Position::new(2, 12)),
1444// severity: Some(DiagnosticSeverity::ERROR),
1445// source: Some("disk".to_string()),
1446// message: "undefined variable 'CCC'".to_string(),
1447// ..Default::default()
1448// },
1449// ],
1450// });
1451
1452// // The diagnostics have moved down since they were created.
1453// buffer.next_notification(cx).await;
1454// cx.foreground().run_until_parked();
1455// buffer.read_with(cx, |buffer, _| {
1456// assert_eq!(
1457// buffer
1458// .snapshot()
1459// .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1460// .collect::<Vec<_>>(),
1461// &[
1462// DiagnosticEntry {
1463// range: Point::new(3, 9)..Point::new(3, 11),
1464// diagnostic: Diagnostic {
1465// source: Some("disk".into()),
1466// severity: DiagnosticSeverity::ERROR,
1467// message: "undefined variable 'BB'".to_string(),
1468// is_disk_based: true,
1469// group_id: 1,
1470// is_primary: true,
1471// ..Default::default()
1472// },
1473// },
1474// DiagnosticEntry {
1475// range: Point::new(4, 9)..Point::new(4, 12),
1476// diagnostic: Diagnostic {
1477// source: Some("disk".into()),
1478// severity: DiagnosticSeverity::ERROR,
1479// message: "undefined variable 'CCC'".to_string(),
1480// is_disk_based: true,
1481// group_id: 2,
1482// is_primary: true,
1483// ..Default::default()
1484// }
1485// }
1486// ]
1487// );
1488// assert_eq!(
1489// chunks_with_diagnostics(buffer, 0..buffer.len()),
1490// [
1491// ("\n\nfn a() { ".to_string(), None),
1492// ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1493// (" }\nfn b() { ".to_string(), None),
1494// ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1495// (" }\nfn c() { ".to_string(), None),
1496// ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1497// (" }\n".to_string(), None),
1498// ]
1499// );
1500// assert_eq!(
1501// chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1502// [
1503// ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1504// (" }\nfn c() { ".to_string(), None),
1505// ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1506// ]
1507// );
1508// });
1509
1510// // Ensure overlapping diagnostics are highlighted correctly.
1511// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1512// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1513// version: Some(open_notification.text_document.version),
1514// diagnostics: vec![
1515// lsp2::Diagnostic {
1516// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
1517// severity: Some(DiagnosticSeverity::ERROR),
1518// message: "undefined variable 'A'".to_string(),
1519// source: Some("disk".to_string()),
1520// ..Default::default()
1521// },
1522// lsp2::Diagnostic {
1523// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 12)),
1524// severity: Some(DiagnosticSeverity::WARNING),
1525// message: "unreachable statement".to_string(),
1526// source: Some("disk".to_string()),
1527// ..Default::default()
1528// },
1529// ],
1530// });
1531
1532// buffer.next_notification(cx).await;
1533// cx.foreground().run_until_parked();
1534// buffer.read_with(cx, |buffer, _| {
1535// assert_eq!(
1536// buffer
1537// .snapshot()
1538// .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1539// .collect::<Vec<_>>(),
1540// &[
1541// DiagnosticEntry {
1542// range: Point::new(2, 9)..Point::new(2, 12),
1543// diagnostic: Diagnostic {
1544// source: Some("disk".into()),
1545// severity: DiagnosticSeverity::WARNING,
1546// message: "unreachable statement".to_string(),
1547// is_disk_based: true,
1548// group_id: 4,
1549// is_primary: true,
1550// ..Default::default()
1551// }
1552// },
1553// DiagnosticEntry {
1554// range: Point::new(2, 9)..Point::new(2, 10),
1555// diagnostic: Diagnostic {
1556// source: Some("disk".into()),
1557// severity: DiagnosticSeverity::ERROR,
1558// message: "undefined variable 'A'".to_string(),
1559// is_disk_based: true,
1560// group_id: 3,
1561// is_primary: true,
1562// ..Default::default()
1563// },
1564// }
1565// ]
1566// );
1567// assert_eq!(
1568// chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1569// [
1570// ("fn a() { ".to_string(), None),
1571// ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1572// (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1573// ("\n".to_string(), None),
1574// ]
1575// );
1576// assert_eq!(
1577// chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1578// [
1579// (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1580// ("\n".to_string(), None),
1581// ]
1582// );
1583// });
1584
1585// // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1586// // changes since the last save.
1587// buffer.update(cx, |buffer, cx| {
1588// buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1589// buffer.edit(
1590// [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1591// None,
1592// cx,
1593// );
1594// buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1595// });
1596// let change_notification_2 = fake_server
1597// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
1598// .await;
1599// assert!(
1600// change_notification_2.text_document.version > change_notification_1.text_document.version
1601// );
1602
1603// // Handle out-of-order diagnostics
1604// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1605// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1606// version: Some(change_notification_2.text_document.version),
1607// diagnostics: vec![
1608// lsp2::Diagnostic {
1609// range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
1610// severity: Some(DiagnosticSeverity::ERROR),
1611// message: "undefined variable 'BB'".to_string(),
1612// source: Some("disk".to_string()),
1613// ..Default::default()
1614// },
1615// lsp2::Diagnostic {
1616// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
1617// severity: Some(DiagnosticSeverity::WARNING),
1618// message: "undefined variable 'A'".to_string(),
1619// source: Some("disk".to_string()),
1620// ..Default::default()
1621// },
1622// ],
1623// });
1624
1625// buffer.next_notification(cx).await;
1626// cx.foreground().run_until_parked();
1627// buffer.read_with(cx, |buffer, _| {
1628// assert_eq!(
1629// buffer
1630// .snapshot()
1631// .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1632// .collect::<Vec<_>>(),
1633// &[
1634// DiagnosticEntry {
1635// range: Point::new(2, 21)..Point::new(2, 22),
1636// diagnostic: Diagnostic {
1637// source: Some("disk".into()),
1638// severity: DiagnosticSeverity::WARNING,
1639// message: "undefined variable 'A'".to_string(),
1640// is_disk_based: true,
1641// group_id: 6,
1642// is_primary: true,
1643// ..Default::default()
1644// }
1645// },
1646// DiagnosticEntry {
1647// range: Point::new(3, 9)..Point::new(3, 14),
1648// diagnostic: Diagnostic {
1649// source: Some("disk".into()),
1650// severity: DiagnosticSeverity::ERROR,
1651// message: "undefined variable 'BB'".to_string(),
1652// is_disk_based: true,
1653// group_id: 5,
1654// is_primary: true,
1655// ..Default::default()
1656// },
1657// }
1658// ]
1659// );
1660// });
1661// }
1662
1663// #[gpui::test]
1664// async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1665// init_test(cx);
1666
1667// let text = concat!(
1668// "let one = ;\n", //
1669// "let two = \n",
1670// "let three = 3;\n",
1671// );
1672
1673// let fs = FakeFs::new(cx.background());
1674// fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1675
1676// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1677// let buffer = project
1678// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1679// .await
1680// .unwrap();
1681
1682// project.update(cx, |project, cx| {
1683// project
1684// .update_buffer_diagnostics(
1685// &buffer,
1686// LanguageServerId(0),
1687// None,
1688// vec![
1689// DiagnosticEntry {
1690// range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1691// diagnostic: Diagnostic {
1692// severity: DiagnosticSeverity::ERROR,
1693// message: "syntax error 1".to_string(),
1694// ..Default::default()
1695// },
1696// },
1697// DiagnosticEntry {
1698// range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1699// diagnostic: Diagnostic {
1700// severity: DiagnosticSeverity::ERROR,
1701// message: "syntax error 2".to_string(),
1702// ..Default::default()
1703// },
1704// },
1705// ],
1706// cx,
1707// )
1708// .unwrap();
1709// });
1710
1711// // An empty range is extended forward to include the following character.
1712// // At the end of a line, an empty range is extended backward to include
1713// // the preceding character.
1714// buffer.read_with(cx, |buffer, _| {
1715// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1716// assert_eq!(
1717// chunks
1718// .iter()
1719// .map(|(s, d)| (s.as_str(), *d))
1720// .collect::<Vec<_>>(),
1721// &[
1722// ("let one = ", None),
1723// (";", Some(DiagnosticSeverity::ERROR)),
1724// ("\nlet two =", None),
1725// (" ", Some(DiagnosticSeverity::ERROR)),
1726// ("\nlet three = 3;\n", None)
1727// ]
1728// );
1729// });
1730// }
1731
1732// #[gpui::test]
1733// async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1734// init_test(cx);
1735
1736// let fs = FakeFs::new(cx.background());
1737// fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1738// .await;
1739
1740// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1741
1742// project.update(cx, |project, cx| {
1743// project
1744// .update_diagnostic_entries(
1745// LanguageServerId(0),
1746// Path::new("/dir/a.rs").to_owned(),
1747// None,
1748// vec![DiagnosticEntry {
1749// range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1750// diagnostic: Diagnostic {
1751// severity: DiagnosticSeverity::ERROR,
1752// is_primary: true,
1753// message: "syntax error a1".to_string(),
1754// ..Default::default()
1755// },
1756// }],
1757// cx,
1758// )
1759// .unwrap();
1760// project
1761// .update_diagnostic_entries(
1762// LanguageServerId(1),
1763// Path::new("/dir/a.rs").to_owned(),
1764// None,
1765// vec![DiagnosticEntry {
1766// range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1767// diagnostic: Diagnostic {
1768// severity: DiagnosticSeverity::ERROR,
1769// is_primary: true,
1770// message: "syntax error b1".to_string(),
1771// ..Default::default()
1772// },
1773// }],
1774// cx,
1775// )
1776// .unwrap();
1777
1778// assert_eq!(
1779// project.diagnostic_summary(cx),
1780// DiagnosticSummary {
1781// error_count: 2,
1782// warning_count: 0,
1783// }
1784// );
1785// });
1786// }
1787
1788// #[gpui::test]
1789// async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1790// init_test(cx);
1791
1792// let mut language = Language::new(
1793// LanguageConfig {
1794// name: "Rust".into(),
1795// path_suffixes: vec!["rs".to_string()],
1796// ..Default::default()
1797// },
1798// Some(tree_sitter_rust::language()),
1799// );
1800// let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1801
1802// let text = "
1803// fn a() {
1804// f1();
1805// }
1806// fn b() {
1807// f2();
1808// }
1809// fn c() {
1810// f3();
1811// }
1812// "
1813// .unindent();
1814
1815// let fs = FakeFs::new(cx.background());
1816// fs.insert_tree(
1817// "/dir",
1818// json!({
1819// "a.rs": text.clone(),
1820// }),
1821// )
1822// .await;
1823
1824// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1825// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1826// let buffer = project
1827// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1828// .await
1829// .unwrap();
1830
1831// let mut fake_server = fake_servers.next().await.unwrap();
1832// let lsp_document_version = fake_server
1833// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1834// .await
1835// .text_document
1836// .version;
1837
1838// // Simulate editing the buffer after the language server computes some edits.
1839// buffer.update(cx, |buffer, cx| {
1840// buffer.edit(
1841// [(
1842// Point::new(0, 0)..Point::new(0, 0),
1843// "// above first function\n",
1844// )],
1845// None,
1846// cx,
1847// );
1848// buffer.edit(
1849// [(
1850// Point::new(2, 0)..Point::new(2, 0),
1851// " // inside first function\n",
1852// )],
1853// None,
1854// cx,
1855// );
1856// buffer.edit(
1857// [(
1858// Point::new(6, 4)..Point::new(6, 4),
1859// "// inside second function ",
1860// )],
1861// None,
1862// cx,
1863// );
1864
1865// assert_eq!(
1866// buffer.text(),
1867// "
1868// // above first function
1869// fn a() {
1870// // inside first function
1871// f1();
1872// }
1873// fn b() {
1874// // inside second function f2();
1875// }
1876// fn c() {
1877// f3();
1878// }
1879// "
1880// .unindent()
1881// );
1882// });
1883
1884// let edits = project
1885// .update(cx, |project, cx| {
1886// project.edits_from_lsp(
1887// &buffer,
1888// vec![
1889// // replace body of first function
1890// lsp2::TextEdit {
1891// range: lsp2::Range::new(
1892// lsp2::Position::new(0, 0),
1893// lsp2::Position::new(3, 0),
1894// ),
1895// new_text: "
1896// fn a() {
1897// f10();
1898// }
1899// "
1900// .unindent(),
1901// },
1902// // edit inside second function
1903// lsp2::TextEdit {
1904// range: lsp2::Range::new(
1905// lsp2::Position::new(4, 6),
1906// lsp2::Position::new(4, 6),
1907// ),
1908// new_text: "00".into(),
1909// },
1910// // edit inside third function via two distinct edits
1911// lsp2::TextEdit {
1912// range: lsp2::Range::new(
1913// lsp2::Position::new(7, 5),
1914// lsp2::Position::new(7, 5),
1915// ),
1916// new_text: "4000".into(),
1917// },
1918// lsp2::TextEdit {
1919// range: lsp2::Range::new(
1920// lsp2::Position::new(7, 5),
1921// lsp2::Position::new(7, 6),
1922// ),
1923// new_text: "".into(),
1924// },
1925// ],
1926// LanguageServerId(0),
1927// Some(lsp_document_version),
1928// cx,
1929// )
1930// })
1931// .await
1932// .unwrap();
1933
1934// buffer.update(cx, |buffer, cx| {
1935// for (range, new_text) in edits {
1936// buffer.edit([(range, new_text)], None, cx);
1937// }
1938// assert_eq!(
1939// buffer.text(),
1940// "
1941// // above first function
1942// fn a() {
1943// // inside first function
1944// f10();
1945// }
1946// fn b() {
1947// // inside second function f200();
1948// }
1949// fn c() {
1950// f4000();
1951// }
1952// "
1953// .unindent()
1954// );
1955// });
1956// }
1957
1958// #[gpui::test]
1959// async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1960// init_test(cx);
1961
1962// let text = "
1963// use a::b;
1964// use a::c;
1965
1966// fn f() {
1967// b();
1968// c();
1969// }
1970// "
1971// .unindent();
1972
1973// let fs = FakeFs::new(cx.background());
1974// fs.insert_tree(
1975// "/dir",
1976// json!({
1977// "a.rs": text.clone(),
1978// }),
1979// )
1980// .await;
1981
1982// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1983// let buffer = project
1984// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1985// .await
1986// .unwrap();
1987
1988// // Simulate the language server sending us a small edit in the form of a very large diff.
1989// // Rust-analyzer does this when performing a merge-imports code action.
1990// let edits = project
1991// .update(cx, |project, cx| {
1992// project.edits_from_lsp(
1993// &buffer,
1994// [
1995// // Replace the first use statement without editing the semicolon.
1996// lsp2::TextEdit {
1997// range: lsp2::Range::new(
1998// lsp2::Position::new(0, 4),
1999// lsp2::Position::new(0, 8),
2000// ),
2001// new_text: "a::{b, c}".into(),
2002// },
2003// // Reinsert the remainder of the file between the semicolon and the final
2004// // newline of the file.
2005// lsp2::TextEdit {
2006// range: lsp2::Range::new(
2007// lsp2::Position::new(0, 9),
2008// lsp2::Position::new(0, 9),
2009// ),
2010// new_text: "\n\n".into(),
2011// },
2012// lsp2::TextEdit {
2013// range: lsp2::Range::new(
2014// lsp2::Position::new(0, 9),
2015// lsp2::Position::new(0, 9),
2016// ),
2017// new_text: "
2018// fn f() {
2019// b();
2020// c();
2021// }"
2022// .unindent(),
2023// },
2024// // Delete everything after the first newline of the file.
2025// lsp2::TextEdit {
2026// range: lsp2::Range::new(
2027// lsp2::Position::new(1, 0),
2028// lsp2::Position::new(7, 0),
2029// ),
2030// new_text: "".into(),
2031// },
2032// ],
2033// LanguageServerId(0),
2034// None,
2035// cx,
2036// )
2037// })
2038// .await
2039// .unwrap();
2040
2041// buffer.update(cx, |buffer, cx| {
2042// let edits = edits
2043// .into_iter()
2044// .map(|(range, text)| {
2045// (
2046// range.start.to_point(buffer)..range.end.to_point(buffer),
2047// text,
2048// )
2049// })
2050// .collect::<Vec<_>>();
2051
2052// assert_eq!(
2053// edits,
2054// [
2055// (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2056// (Point::new(1, 0)..Point::new(2, 0), "".into())
2057// ]
2058// );
2059
2060// for (range, new_text) in edits {
2061// buffer.edit([(range, new_text)], None, cx);
2062// }
2063// assert_eq!(
2064// buffer.text(),
2065// "
2066// use a::{b, c};
2067
2068// fn f() {
2069// b();
2070// c();
2071// }
2072// "
2073// .unindent()
2074// );
2075// });
2076// }
2077
2078// #[gpui::test]
2079// async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2080// init_test(cx);
2081
2082// let text = "
2083// use a::b;
2084// use a::c;
2085
2086// fn f() {
2087// b();
2088// c();
2089// }
2090// "
2091// .unindent();
2092
2093// let fs = FakeFs::new(cx.background());
2094// fs.insert_tree(
2095// "/dir",
2096// json!({
2097// "a.rs": text.clone(),
2098// }),
2099// )
2100// .await;
2101
2102// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2103// let buffer = project
2104// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2105// .await
2106// .unwrap();
2107
2108// // Simulate the language server sending us edits in a non-ordered fashion,
2109// // with ranges sometimes being inverted or pointing to invalid locations.
2110// let edits = project
2111// .update(cx, |project, cx| {
2112// project.edits_from_lsp(
2113// &buffer,
2114// [
2115// lsp2::TextEdit {
2116// range: lsp2::Range::new(
2117// lsp2::Position::new(0, 9),
2118// lsp2::Position::new(0, 9),
2119// ),
2120// new_text: "\n\n".into(),
2121// },
2122// lsp2::TextEdit {
2123// range: lsp2::Range::new(
2124// lsp2::Position::new(0, 8),
2125// lsp2::Position::new(0, 4),
2126// ),
2127// new_text: "a::{b, c}".into(),
2128// },
2129// lsp2::TextEdit {
2130// range: lsp2::Range::new(
2131// lsp2::Position::new(1, 0),
2132// lsp2::Position::new(99, 0),
2133// ),
2134// new_text: "".into(),
2135// },
2136// lsp2::TextEdit {
2137// range: lsp2::Range::new(
2138// lsp2::Position::new(0, 9),
2139// lsp2::Position::new(0, 9),
2140// ),
2141// new_text: "
2142// fn f() {
2143// b();
2144// c();
2145// }"
2146// .unindent(),
2147// },
2148// ],
2149// LanguageServerId(0),
2150// None,
2151// cx,
2152// )
2153// })
2154// .await
2155// .unwrap();
2156
2157// buffer.update(cx, |buffer, cx| {
2158// let edits = edits
2159// .into_iter()
2160// .map(|(range, text)| {
2161// (
2162// range.start.to_point(buffer)..range.end.to_point(buffer),
2163// text,
2164// )
2165// })
2166// .collect::<Vec<_>>();
2167
2168// assert_eq!(
2169// edits,
2170// [
2171// (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2172// (Point::new(1, 0)..Point::new(2, 0), "".into())
2173// ]
2174// );
2175
2176// for (range, new_text) in edits {
2177// buffer.edit([(range, new_text)], None, cx);
2178// }
2179// assert_eq!(
2180// buffer.text(),
2181// "
2182// use a::{b, c};
2183
2184// fn f() {
2185// b();
2186// c();
2187// }
2188// "
2189// .unindent()
2190// );
2191// });
2192// }
2193
2194// fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2195// buffer: &Buffer,
2196// range: Range<T>,
2197// ) -> Vec<(String, Option<DiagnosticSeverity>)> {
2198// let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2199// for chunk in buffer.snapshot().chunks(range, true) {
2200// if chunks.last().map_or(false, |prev_chunk| {
2201// prev_chunk.1 == chunk.diagnostic_severity
2202// }) {
2203// chunks.last_mut().unwrap().0.push_str(chunk.text);
2204// } else {
2205// chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2206// }
2207// }
2208// chunks
2209// }
2210
2211// #[gpui::test(iterations = 10)]
2212// async fn test_definition(cx: &mut gpui::TestAppContext) {
2213// init_test(cx);
2214
2215// let mut language = Language::new(
2216// LanguageConfig {
2217// name: "Rust".into(),
2218// path_suffixes: vec!["rs".to_string()],
2219// ..Default::default()
2220// },
2221// Some(tree_sitter_rust::language()),
2222// );
2223// let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2224
2225// let fs = FakeFs::new(cx.background());
2226// fs.insert_tree(
2227// "/dir",
2228// json!({
2229// "a.rs": "const fn a() { A }",
2230// "b.rs": "const y: i32 = crate::a()",
2231// }),
2232// )
2233// .await;
2234
2235// let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2236// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2237
2238// let buffer = project
2239// .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2240// .await
2241// .unwrap();
2242
2243// let fake_server = fake_servers.next().await.unwrap();
2244// fake_server.handle_request::<lsp2::request::GotoDefinition, _, _>(|params, _| async move {
2245// let params = params.text_document_position_params;
2246// assert_eq!(
2247// params.text_document.uri.to_file_path().unwrap(),
2248// Path::new("/dir/b.rs"),
2249// );
2250// assert_eq!(params.position, lsp2::Position::new(0, 22));
2251
2252// Ok(Some(lsp2::GotoDefinitionResponse::Scalar(
2253// lsp2::Location::new(
2254// lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
2255// lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
2256// ),
2257// )))
2258// });
2259
2260// let mut definitions = project
2261// .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2262// .await
2263// .unwrap();
2264
2265// // Assert no new language server started
2266// cx.foreground().run_until_parked();
2267// assert!(fake_servers.try_next().is_err());
2268
2269// assert_eq!(definitions.len(), 1);
2270// let definition = definitions.pop().unwrap();
2271// cx.update(|cx| {
2272// let target_buffer = definition.target.buffer.read(cx);
2273// assert_eq!(
2274// target_buffer
2275// .file()
2276// .unwrap()
2277// .as_local()
2278// .unwrap()
2279// .abs_path(cx),
2280// Path::new("/dir/a.rs"),
2281// );
2282// assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2283// assert_eq!(
2284// list_worktrees(&project, cx),
2285// [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2286// );
2287
2288// drop(definition);
2289// });
2290// cx.read(|cx| {
2291// assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2292// });
2293
2294// fn list_worktrees<'a>(
2295// project: &'a ModelHandle<Project>,
2296// cx: &'a AppContext,
2297// ) -> Vec<(&'a Path, bool)> {
2298// project
2299// .read(cx)
2300// .worktrees(cx)
2301// .map(|worktree| {
2302// let worktree = worktree.read(cx);
2303// (
2304// worktree.as_local().unwrap().abs_path().as_ref(),
2305// worktree.is_visible(),
2306// )
2307// })
2308// .collect::<Vec<_>>()
2309// }
2310// }
2311
2312// #[gpui::test]
2313// async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2314// init_test(cx);
2315
2316// let mut language = Language::new(
2317// LanguageConfig {
2318// name: "TypeScript".into(),
2319// path_suffixes: vec!["ts".to_string()],
2320// ..Default::default()
2321// },
2322// Some(tree_sitter_typescript::language_typescript()),
2323// );
2324// let mut fake_language_servers = language
2325// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2326// capabilities: lsp::ServerCapabilities {
2327// completion_provider: Some(lsp::CompletionOptions {
2328// trigger_characters: Some(vec![":".to_string()]),
2329// ..Default::default()
2330// }),
2331// ..Default::default()
2332// },
2333// ..Default::default()
2334// }))
2335// .await;
2336
2337// let fs = FakeFs::new(cx.background());
2338// fs.insert_tree(
2339// "/dir",
2340// json!({
2341// "a.ts": "",
2342// }),
2343// )
2344// .await;
2345
2346// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2347// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2348// let buffer = project
2349// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2350// .await
2351// .unwrap();
2352
2353// let fake_server = fake_language_servers.next().await.unwrap();
2354
2355// let text = "let a = b.fqn";
2356// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2357// let completions = project.update(cx, |project, cx| {
2358// project.completions(&buffer, text.len(), cx)
2359// });
2360
2361// fake_server
2362// .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
2363// Ok(Some(lsp2::CompletionResponse::Array(vec![
2364// lsp2::CompletionItem {
2365// label: "fullyQualifiedName?".into(),
2366// insert_text: Some("fullyQualifiedName".into()),
2367// ..Default::default()
2368// },
2369// ])))
2370// })
2371// .next()
2372// .await;
2373// let completions = completions.await.unwrap();
2374// let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2375// assert_eq!(completions.len(), 1);
2376// assert_eq!(completions[0].new_text, "fullyQualifiedName");
2377// assert_eq!(
2378// completions[0].old_range.to_offset(&snapshot),
2379// text.len() - 3..text.len()
2380// );
2381
2382// let text = "let a = \"atoms/cmp\"";
2383// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2384// let completions = project.update(cx, |project, cx| {
2385// project.completions(&buffer, text.len() - 1, cx)
2386// });
2387
2388// fake_server
2389// .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
2390// Ok(Some(lsp2::CompletionResponse::Array(vec![
2391// lsp2::CompletionItem {
2392// label: "component".into(),
2393// ..Default::default()
2394// },
2395// ])))
2396// })
2397// .next()
2398// .await;
2399// let completions = completions.await.unwrap();
2400// let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2401// assert_eq!(completions.len(), 1);
2402// assert_eq!(completions[0].new_text, "component");
2403// assert_eq!(
2404// completions[0].old_range.to_offset(&snapshot),
2405// text.len() - 4..text.len() - 1
2406// );
2407// }
2408
2409// #[gpui::test]
2410// async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2411// init_test(cx);
2412
2413// let mut language = Language::new(
2414// LanguageConfig {
2415// name: "TypeScript".into(),
2416// path_suffixes: vec!["ts".to_string()],
2417// ..Default::default()
2418// },
2419// Some(tree_sitter_typescript::language_typescript()),
2420// );
2421// let mut fake_language_servers = language
2422// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2423// capabilities: lsp::ServerCapabilities {
2424// completion_provider: Some(lsp::CompletionOptions {
2425// trigger_characters: Some(vec![":".to_string()]),
2426// ..Default::default()
2427// }),
2428// ..Default::default()
2429// },
2430// ..Default::default()
2431// }))
2432// .await;
2433
2434// let fs = FakeFs::new(cx.background());
2435// fs.insert_tree(
2436// "/dir",
2437// json!({
2438// "a.ts": "",
2439// }),
2440// )
2441// .await;
2442
2443// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2444// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2445// let buffer = project
2446// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2447// .await
2448// .unwrap();
2449
2450// let fake_server = fake_language_servers.next().await.unwrap();
2451
2452// let text = "let a = b.fqn";
2453// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2454// let completions = project.update(cx, |project, cx| {
2455// project.completions(&buffer, text.len(), cx)
2456// });
2457
2458// fake_server
2459// .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
2460// Ok(Some(lsp2::CompletionResponse::Array(vec![
2461// lsp2::CompletionItem {
2462// label: "fullyQualifiedName?".into(),
2463// insert_text: Some("fully\rQualified\r\nName".into()),
2464// ..Default::default()
2465// },
2466// ])))
2467// })
2468// .next()
2469// .await;
2470// let completions = completions.await.unwrap();
2471// assert_eq!(completions.len(), 1);
2472// assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2473// }
2474
2475// #[gpui::test(iterations = 10)]
2476// async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2477// init_test(cx);
2478
2479// let mut language = Language::new(
2480// LanguageConfig {
2481// name: "TypeScript".into(),
2482// path_suffixes: vec!["ts".to_string()],
2483// ..Default::default()
2484// },
2485// None,
2486// );
2487// let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2488
2489// let fs = FakeFs::new(cx.background());
2490// fs.insert_tree(
2491// "/dir",
2492// json!({
2493// "a.ts": "a",
2494// }),
2495// )
2496// .await;
2497
2498// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2499// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2500// let buffer = project
2501// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2502// .await
2503// .unwrap();
2504
2505// let fake_server = fake_language_servers.next().await.unwrap();
2506
2507// // Language server returns code actions that contain commands, and not edits.
2508// let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2509// fake_server
2510// .handle_request::<lsp2::request::CodeActionRequest, _, _>(|_, _| async move {
2511// Ok(Some(vec![
2512// lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
2513// title: "The code action".into(),
2514// command: Some(lsp::Command {
2515// title: "The command".into(),
2516// command: "_the/command".into(),
2517// arguments: Some(vec![json!("the-argument")]),
2518// }),
2519// ..Default::default()
2520// }),
2521// lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
2522// title: "two".into(),
2523// ..Default::default()
2524// }),
2525// ]))
2526// })
2527// .next()
2528// .await;
2529
2530// let action = actions.await.unwrap()[0].clone();
2531// let apply = project.update(cx, |project, cx| {
2532// project.apply_code_action(buffer.clone(), action, true, cx)
2533// });
2534
2535// // Resolving the code action does not populate its edits. In absence of
2536// // edits, we must execute the given command.
2537// fake_server.handle_request::<lsp2::request::CodeActionResolveRequest, _, _>(
2538// |action, _| async move { Ok(action) },
2539// );
2540
2541// // While executing the command, the language server sends the editor
2542// // a `workspaceEdit` request.
2543// fake_server
2544// .handle_request::<lsp2::request::ExecuteCommand, _, _>({
2545// let fake = fake_server.clone();
2546// move |params, _| {
2547// assert_eq!(params.command, "_the/command");
2548// let fake = fake.clone();
2549// async move {
2550// fake.server
2551// .request::<lsp2::request::ApplyWorkspaceEdit>(
2552// lsp2::ApplyWorkspaceEditParams {
2553// label: None,
2554// edit: lsp::WorkspaceEdit {
2555// changes: Some(
2556// [(
2557// lsp2::Url::from_file_path("/dir/a.ts").unwrap(),
2558// vec![lsp2::TextEdit {
2559// range: lsp2::Range::new(
2560// lsp2::Position::new(0, 0),
2561// lsp2::Position::new(0, 0),
2562// ),
2563// new_text: "X".into(),
2564// }],
2565// )]
2566// .into_iter()
2567// .collect(),
2568// ),
2569// ..Default::default()
2570// },
2571// },
2572// )
2573// .await
2574// .unwrap();
2575// Ok(Some(json!(null)))
2576// }
2577// }
2578// })
2579// .next()
2580// .await;
2581
2582// // Applying the code action returns a project transaction containing the edits
2583// // sent by the language server in its `workspaceEdit` request.
2584// let transaction = apply.await.unwrap();
2585// assert!(transaction.0.contains_key(&buffer));
2586// buffer.update(cx, |buffer, cx| {
2587// assert_eq!(buffer.text(), "Xa");
2588// buffer.undo(cx);
2589// assert_eq!(buffer.text(), "a");
2590// });
2591// }
2592
2593// #[gpui::test(iterations = 10)]
2594// async fn test_save_file(cx: &mut gpui::TestAppContext) {
2595// init_test(cx);
2596
2597// let fs = FakeFs::new(cx.background());
2598// fs.insert_tree(
2599// "/dir",
2600// json!({
2601// "file1": "the old contents",
2602// }),
2603// )
2604// .await;
2605
2606// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2607// let buffer = project
2608// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2609// .await
2610// .unwrap();
2611// buffer.update(cx, |buffer, cx| {
2612// assert_eq!(buffer.text(), "the old contents");
2613// buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2614// });
2615
2616// project
2617// .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2618// .await
2619// .unwrap();
2620
2621// let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2622// assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2623// }
2624
2625// #[gpui::test]
2626// async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2627// init_test(cx);
2628
2629// let fs = FakeFs::new(cx.background());
2630// fs.insert_tree(
2631// "/dir",
2632// json!({
2633// "file1": "the old contents",
2634// }),
2635// )
2636// .await;
2637
2638// let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2639// let buffer = project
2640// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2641// .await
2642// .unwrap();
2643// buffer.update(cx, |buffer, cx| {
2644// buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2645// });
2646
2647// project
2648// .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2649// .await
2650// .unwrap();
2651
2652// let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2653// assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2654// }
2655
2656// #[gpui::test]
2657// async fn test_save_as(cx: &mut gpui::TestAppContext) {
2658// init_test(cx);
2659
2660// let fs = FakeFs::new(cx.background());
2661// fs.insert_tree("/dir", json!({})).await;
2662
2663// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2664
2665// let languages = project.read_with(cx, |project, _| project.languages().clone());
2666// languages.register(
2667// "/some/path",
2668// LanguageConfig {
2669// name: "Rust".into(),
2670// path_suffixes: vec!["rs".into()],
2671// ..Default::default()
2672// },
2673// tree_sitter_rust::language(),
2674// vec![],
2675// |_| Default::default(),
2676// );
2677
2678// let buffer = project.update(cx, |project, cx| {
2679// project.create_buffer("", None, cx).unwrap()
2680// });
2681// buffer.update(cx, |buffer, cx| {
2682// buffer.edit([(0..0, "abc")], None, cx);
2683// assert!(buffer.is_dirty());
2684// assert!(!buffer.has_conflict());
2685// assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2686// });
2687// project
2688// .update(cx, |project, cx| {
2689// project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2690// })
2691// .await
2692// .unwrap();
2693// assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2694
2695// cx.foreground().run_until_parked();
2696// buffer.read_with(cx, |buffer, cx| {
2697// assert_eq!(
2698// buffer.file().unwrap().full_path(cx),
2699// Path::new("dir/file1.rs")
2700// );
2701// assert!(!buffer.is_dirty());
2702// assert!(!buffer.has_conflict());
2703// assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2704// });
2705
2706// let opened_buffer = project
2707// .update(cx, |project, cx| {
2708// project.open_local_buffer("/dir/file1.rs", cx)
2709// })
2710// .await
2711// .unwrap();
2712// assert_eq!(opened_buffer, buffer);
2713// }
2714
2715// #[gpui::test(retries = 5)]
2716// async fn test_rescan_and_remote_updates(
2717// deterministic: Arc<Deterministic>,
2718// cx: &mut gpui::TestAppContext,
2719// ) {
2720// init_test(cx);
2721// cx.foreground().allow_parking();
2722
2723// let dir = temp_tree(json!({
2724// "a": {
2725// "file1": "",
2726// "file2": "",
2727// "file3": "",
2728// },
2729// "b": {
2730// "c": {
2731// "file4": "",
2732// "file5": "",
2733// }
2734// }
2735// }));
2736
2737// let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2738// let rpc = project.read_with(cx, |p, _| p.client.clone());
2739
2740// let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
2741// let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2742// async move { buffer.await.unwrap() }
2743// };
2744// let id_for_path = |path: &'static str, cx: &gpui2::TestAppContext| {
2745// project.read_with(cx, |project, cx| {
2746// let tree = project.worktrees(cx).next().unwrap();
2747// tree.read(cx)
2748// .entry_for_path(path)
2749// .unwrap_or_else(|| panic!("no entry for path {}", path))
2750// .id
2751// })
2752// };
2753
2754// let buffer2 = buffer_for_path("a/file2", cx).await;
2755// let buffer3 = buffer_for_path("a/file3", cx).await;
2756// let buffer4 = buffer_for_path("b/c/file4", cx).await;
2757// let buffer5 = buffer_for_path("b/c/file5", cx).await;
2758
2759// let file2_id = id_for_path("a/file2", cx);
2760// let file3_id = id_for_path("a/file3", cx);
2761// let file4_id = id_for_path("b/c/file4", cx);
2762
2763// // Create a remote copy of this worktree.
2764// let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2765
2766// let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2767
2768// let updates = Arc::new(Mutex::new(Vec::new()));
2769// tree.update(cx, |tree, cx| {
2770// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2771// let updates = updates.clone();
2772// move |update| {
2773// updates.lock().push(update);
2774// async { true }
2775// }
2776// });
2777// });
2778
2779// let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2780// deterministic.run_until_parked();
2781
2782// cx.read(|cx| {
2783// assert!(!buffer2.read(cx).is_dirty());
2784// assert!(!buffer3.read(cx).is_dirty());
2785// assert!(!buffer4.read(cx).is_dirty());
2786// assert!(!buffer5.read(cx).is_dirty());
2787// });
2788
2789// // Rename and delete files and directories.
2790// tree.flush_fs_events(cx).await;
2791// std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2792// std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2793// std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2794// std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2795// tree.flush_fs_events(cx).await;
2796
2797// let expected_paths = vec![
2798// "a",
2799// "a/file1",
2800// "a/file2.new",
2801// "b",
2802// "d",
2803// "d/file3",
2804// "d/file4",
2805// ];
2806
2807// cx.read(|app| {
2808// assert_eq!(
2809// tree.read(app)
2810// .paths()
2811// .map(|p| p.to_str().unwrap())
2812// .collect::<Vec<_>>(),
2813// expected_paths
2814// );
2815
2816// assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2817// assert_eq!(id_for_path("d/file3", cx), file3_id);
2818// assert_eq!(id_for_path("d/file4", cx), file4_id);
2819
2820// assert_eq!(
2821// buffer2.read(app).file().unwrap().path().as_ref(),
2822// Path::new("a/file2.new")
2823// );
2824// assert_eq!(
2825// buffer3.read(app).file().unwrap().path().as_ref(),
2826// Path::new("d/file3")
2827// );
2828// assert_eq!(
2829// buffer4.read(app).file().unwrap().path().as_ref(),
2830// Path::new("d/file4")
2831// );
2832// assert_eq!(
2833// buffer5.read(app).file().unwrap().path().as_ref(),
2834// Path::new("b/c/file5")
2835// );
2836
2837// assert!(!buffer2.read(app).file().unwrap().is_deleted());
2838// assert!(!buffer3.read(app).file().unwrap().is_deleted());
2839// assert!(!buffer4.read(app).file().unwrap().is_deleted());
2840// assert!(buffer5.read(app).file().unwrap().is_deleted());
2841// });
2842
2843// // Update the remote worktree. Check that it becomes consistent with the
2844// // local worktree.
2845// deterministic.run_until_parked();
2846// remote.update(cx, |remote, _| {
2847// for update in updates.lock().drain(..) {
2848// remote.as_remote_mut().unwrap().update_from_remote(update);
2849// }
2850// });
2851// deterministic.run_until_parked();
2852// remote.read_with(cx, |remote, _| {
2853// assert_eq!(
2854// remote
2855// .paths()
2856// .map(|p| p.to_str().unwrap())
2857// .collect::<Vec<_>>(),
2858// expected_paths
2859// );
2860// });
2861// }
2862
2863// #[gpui::test(iterations = 10)]
2864// async fn test_buffer_identity_across_renames(
2865// deterministic: Arc<Deterministic>,
2866// cx: &mut gpui::TestAppContext,
2867// ) {
2868// init_test(cx);
2869
2870// let fs = FakeFs::new(cx.background());
2871// fs.insert_tree(
2872// "/dir",
2873// json!({
2874// "a": {
2875// "file1": "",
2876// }
2877// }),
2878// )
2879// .await;
2880
2881// let project = Project::test(fs, [Path::new("/dir")], cx).await;
2882// let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2883// let tree_id = tree.read_with(cx, |tree, _| tree.id());
2884
2885// let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2886// project.read_with(cx, |project, cx| {
2887// let tree = project.worktrees(cx).next().unwrap();
2888// tree.read(cx)
2889// .entry_for_path(path)
2890// .unwrap_or_else(|| panic!("no entry for path {}", path))
2891// .id
2892// })
2893// };
2894
2895// let dir_id = id_for_path("a", cx);
2896// let file_id = id_for_path("a/file1", cx);
2897// let buffer = project
2898// .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2899// .await
2900// .unwrap();
2901// buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2902
2903// project
2904// .update(cx, |project, cx| {
2905// project.rename_entry(dir_id, Path::new("b"), cx)
2906// })
2907// .unwrap()
2908// .await
2909// .unwrap();
2910// deterministic.run_until_parked();
2911// assert_eq!(id_for_path("b", cx), dir_id);
2912// assert_eq!(id_for_path("b/file1", cx), file_id);
2913// buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2914// }
2915
2916// #[gpui2::test]
2917// async fn test_buffer_deduping(cx: &mut gpui2::TestAppContext) {
2918// init_test(cx);
2919
2920// let fs = FakeFs::new(cx.background());
2921// fs.insert_tree(
2922// "/dir",
2923// json!({
2924// "a.txt": "a-contents",
2925// "b.txt": "b-contents",
2926// }),
2927// )
2928// .await;
2929
2930// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2931
2932// // Spawn multiple tasks to open paths, repeating some paths.
2933// let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2934// (
2935// p.open_local_buffer("/dir/a.txt", cx),
2936// p.open_local_buffer("/dir/b.txt", cx),
2937// p.open_local_buffer("/dir/a.txt", cx),
2938// )
2939// });
2940
2941// let buffer_a_1 = buffer_a_1.await.unwrap();
2942// let buffer_a_2 = buffer_a_2.await.unwrap();
2943// let buffer_b = buffer_b.await.unwrap();
2944// assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2945// assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2946
2947// // There is only one buffer per path.
2948// let buffer_a_id = buffer_a_1.id();
2949// assert_eq!(buffer_a_2.id(), buffer_a_id);
2950
2951// // Open the same path again while it is still open.
2952// drop(buffer_a_1);
2953// let buffer_a_3 = project
2954// .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2955// .await
2956// .unwrap();
2957
2958// // There's still only one buffer per path.
2959// assert_eq!(buffer_a_3.id(), buffer_a_id);
2960// }
2961
2962// #[gpui2::test]
2963// async fn test_buffer_is_dirty(cx: &mut gpui2::TestAppContext) {
2964// init_test(cx);
2965
2966// let fs = FakeFs::new(cx.background());
2967// fs.insert_tree(
2968// "/dir",
2969// json!({
2970// "file1": "abc",
2971// "file2": "def",
2972// "file3": "ghi",
2973// }),
2974// )
2975// .await;
2976
2977// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2978
2979// let buffer1 = project
2980// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2981// .await
2982// .unwrap();
2983// let events = Rc::new(RefCell::new(Vec::new()));
2984
2985// // initially, the buffer isn't dirty.
2986// buffer1.update(cx, |buffer, cx| {
2987// cx.subscribe(&buffer1, {
2988// let events = events.clone();
2989// move |_, _, event, _| match event {
2990// BufferEvent::Operation(_) => {}
2991// _ => events.borrow_mut().push(event.clone()),
2992// }
2993// })
2994// .detach();
2995
2996// assert!(!buffer.is_dirty());
2997// assert!(events.borrow().is_empty());
2998
2999// buffer.edit([(1..2, "")], None, cx);
3000// });
3001
3002// // after the first edit, the buffer is dirty, and emits a dirtied event.
3003// buffer1.update(cx, |buffer, cx| {
3004// assert!(buffer.text() == "ac");
3005// assert!(buffer.is_dirty());
3006// assert_eq!(
3007// *events.borrow(),
3008// &[language2::Event::Edited, language2::Event::DirtyChanged]
3009// );
3010// events.borrow_mut().clear();
3011// buffer.did_save(
3012// buffer.version(),
3013// buffer.as_rope().fingerprint(),
3014// buffer.file().unwrap().mtime(),
3015// cx,
3016// );
3017// });
3018
3019// // after saving, the buffer is not dirty, and emits a saved event.
3020// buffer1.update(cx, |buffer, cx| {
3021// assert!(!buffer.is_dirty());
3022// assert_eq!(*events.borrow(), &[language2::Event::Saved]);
3023// events.borrow_mut().clear();
3024
3025// buffer.edit([(1..1, "B")], None, cx);
3026// buffer.edit([(2..2, "D")], None, cx);
3027// });
3028
3029// // after editing again, the buffer is dirty, and emits another dirty event.
3030// buffer1.update(cx, |buffer, cx| {
3031// assert!(buffer.text() == "aBDc");
3032// assert!(buffer.is_dirty());
3033// assert_eq!(
3034// *events.borrow(),
3035// &[
3036// language2::Event::Edited,
3037// language2::Event::DirtyChanged,
3038// language2::Event::Edited,
3039// ],
3040// );
3041// events.borrow_mut().clear();
3042
3043// // After restoring the buffer to its previously-saved state,
3044// // the buffer is not considered dirty anymore.
3045// buffer.edit([(1..3, "")], None, cx);
3046// assert!(buffer.text() == "ac");
3047// assert!(!buffer.is_dirty());
3048// });
3049
3050// assert_eq!(
3051// *events.borrow(),
3052// &[language2::Event::Edited, language2::Event::DirtyChanged]
3053// );
3054
3055// // When a file is deleted, the buffer is considered dirty.
3056// let events = Rc::new(RefCell::new(Vec::new()));
3057// let buffer2 = project
3058// .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3059// .await
3060// .unwrap();
3061// buffer2.update(cx, |_, cx| {
3062// cx.subscribe(&buffer2, {
3063// let events = events.clone();
3064// move |_, _, event, _| events.borrow_mut().push(event.clone())
3065// })
3066// .detach();
3067// });
3068
3069// fs.remove_file("/dir/file2".as_ref(), Default::default())
3070// .await
3071// .unwrap();
3072// cx.foreground().run_until_parked();
3073// buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
3074// assert_eq!(
3075// *events.borrow(),
3076// &[
3077// language2::Event::DirtyChanged,
3078// language2::Event::FileHandleChanged
3079// ]
3080// );
3081
3082// // When a file is already dirty when deleted, we don't emit a Dirtied event.
3083// let events = Rc::new(RefCell::new(Vec::new()));
3084// let buffer3 = project
3085// .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3086// .await
3087// .unwrap();
3088// buffer3.update(cx, |_, cx| {
3089// cx.subscribe(&buffer3, {
3090// let events = events.clone();
3091// move |_, _, event, _| events.borrow_mut().push(event.clone())
3092// })
3093// .detach();
3094// });
3095
3096// buffer3.update(cx, |buffer, cx| {
3097// buffer.edit([(0..0, "x")], None, cx);
3098// });
3099// events.borrow_mut().clear();
3100// fs.remove_file("/dir/file3".as_ref(), Default::default())
3101// .await
3102// .unwrap();
3103// cx.foreground().run_until_parked();
3104// assert_eq!(*events.borrow(), &[language2::Event::FileHandleChanged]);
3105// cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3106// }
3107
3108// #[gpui::test]
3109// async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3110// init_test(cx);
3111
3112// let initial_contents = "aaa\nbbbbb\nc\n";
3113// let fs = FakeFs::new(cx.background());
3114// fs.insert_tree(
3115// "/dir",
3116// json!({
3117// "the-file": initial_contents,
3118// }),
3119// )
3120// .await;
3121// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3122// let buffer = project
3123// .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3124// .await
3125// .unwrap();
3126
3127// let anchors = (0..3)
3128// .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3129// .collect::<Vec<_>>();
3130
3131// // Change the file on disk, adding two new lines of text, and removing
3132// // one line.
3133// buffer.read_with(cx, |buffer, _| {
3134// assert!(!buffer.is_dirty());
3135// assert!(!buffer.has_conflict());
3136// });
3137// let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3138// fs.save(
3139// "/dir/the-file".as_ref(),
3140// &new_contents.into(),
3141// LineEnding::Unix,
3142// )
3143// .await
3144// .unwrap();
3145
3146// // Because the buffer was not modified, it is reloaded from disk. Its
3147// // contents are edited according to the diff between the old and new
3148// // file contents.
3149// cx.foreground().run_until_parked();
3150// buffer.update(cx, |buffer, _| {
3151// assert_eq!(buffer.text(), new_contents);
3152// assert!(!buffer.is_dirty());
3153// assert!(!buffer.has_conflict());
3154
3155// let anchor_positions = anchors
3156// .iter()
3157// .map(|anchor| anchor.to_point(&*buffer))
3158// .collect::<Vec<_>>();
3159// assert_eq!(
3160// anchor_positions,
3161// [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3162// );
3163// });
3164
3165// // Modify the buffer
3166// buffer.update(cx, |buffer, cx| {
3167// buffer.edit([(0..0, " ")], None, cx);
3168// assert!(buffer.is_dirty());
3169// assert!(!buffer.has_conflict());
3170// });
3171
3172// // Change the file on disk again, adding blank lines to the beginning.
3173// fs.save(
3174// "/dir/the-file".as_ref(),
3175// &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3176// LineEnding::Unix,
3177// )
3178// .await
3179// .unwrap();
3180
3181// // Because the buffer is modified, it doesn't reload from disk, but is
3182// // marked as having a conflict.
3183// cx.foreground().run_until_parked();
3184// buffer.read_with(cx, |buffer, _| {
3185// assert!(buffer.has_conflict());
3186// });
3187// }
3188
3189// #[gpui::test]
3190// async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3191// init_test(cx);
3192
3193// let fs = FakeFs::new(cx.background());
3194// fs.insert_tree(
3195// "/dir",
3196// json!({
3197// "file1": "a\nb\nc\n",
3198// "file2": "one\r\ntwo\r\nthree\r\n",
3199// }),
3200// )
3201// .await;
3202
3203// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3204// let buffer1 = project
3205// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3206// .await
3207// .unwrap();
3208// let buffer2 = project
3209// .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3210// .await
3211// .unwrap();
3212
3213// buffer1.read_with(cx, |buffer, _| {
3214// assert_eq!(buffer.text(), "a\nb\nc\n");
3215// assert_eq!(buffer.line_ending(), LineEnding::Unix);
3216// });
3217// buffer2.read_with(cx, |buffer, _| {
3218// assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3219// assert_eq!(buffer.line_ending(), LineEnding::Windows);
3220// });
3221
3222// // Change a file's line endings on disk from unix to windows. The buffer's
3223// // state updates correctly.
3224// fs.save(
3225// "/dir/file1".as_ref(),
3226// &"aaa\nb\nc\n".into(),
3227// LineEnding::Windows,
3228// )
3229// .await
3230// .unwrap();
3231// cx.foreground().run_until_parked();
3232// buffer1.read_with(cx, |buffer, _| {
3233// assert_eq!(buffer.text(), "aaa\nb\nc\n");
3234// assert_eq!(buffer.line_ending(), LineEnding::Windows);
3235// });
3236
3237// // Save a file with windows line endings. The file is written correctly.
3238// buffer2.update(cx, |buffer, cx| {
3239// buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3240// });
3241// project
3242// .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3243// .await
3244// .unwrap();
3245// assert_eq!(
3246// fs.load("/dir/file2".as_ref()).await.unwrap(),
3247// "one\r\ntwo\r\nthree\r\nfour\r\n",
3248// );
3249// }
3250
3251// #[gpui::test]
3252// async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3253// init_test(cx);
3254
3255// let fs = FakeFs::new(cx.background());
3256// fs.insert_tree(
3257// "/the-dir",
3258// json!({
3259// "a.rs": "
3260// fn foo(mut v: Vec<usize>) {
3261// for x in &v {
3262// v.push(1);
3263// }
3264// }
3265// "
3266// .unindent(),
3267// }),
3268// )
3269// .await;
3270
3271// let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3272// let buffer = project
3273// .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3274// .await
3275// .unwrap();
3276
3277// let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3278// let message = lsp::PublishDiagnosticsParams {
3279// uri: buffer_uri.clone(),
3280// diagnostics: vec![
3281// lsp2::Diagnostic {
3282// range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
3283// severity: Some(DiagnosticSeverity::WARNING),
3284// message: "error 1".to_string(),
3285// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3286// location: lsp::Location {
3287// uri: buffer_uri.clone(),
3288// range: lsp2::Range::new(
3289// lsp2::Position::new(1, 8),
3290// lsp2::Position::new(1, 9),
3291// ),
3292// },
3293// message: "error 1 hint 1".to_string(),
3294// }]),
3295// ..Default::default()
3296// },
3297// lsp2::Diagnostic {
3298// range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
3299// severity: Some(DiagnosticSeverity::HINT),
3300// message: "error 1 hint 1".to_string(),
3301// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3302// location: lsp::Location {
3303// uri: buffer_uri.clone(),
3304// range: lsp2::Range::new(
3305// lsp2::Position::new(1, 8),
3306// lsp2::Position::new(1, 9),
3307// ),
3308// },
3309// message: "original diagnostic".to_string(),
3310// }]),
3311// ..Default::default()
3312// },
3313// lsp2::Diagnostic {
3314// range: lsp2::Range::new(lsp2::Position::new(2, 8), lsp2::Position::new(2, 17)),
3315// severity: Some(DiagnosticSeverity::ERROR),
3316// message: "error 2".to_string(),
3317// related_information: Some(vec![
3318// lsp::DiagnosticRelatedInformation {
3319// location: lsp::Location {
3320// uri: buffer_uri.clone(),
3321// range: lsp2::Range::new(
3322// lsp2::Position::new(1, 13),
3323// lsp2::Position::new(1, 15),
3324// ),
3325// },
3326// message: "error 2 hint 1".to_string(),
3327// },
3328// lsp::DiagnosticRelatedInformation {
3329// location: lsp::Location {
3330// uri: buffer_uri.clone(),
3331// range: lsp2::Range::new(
3332// lsp2::Position::new(1, 13),
3333// lsp2::Position::new(1, 15),
3334// ),
3335// },
3336// message: "error 2 hint 2".to_string(),
3337// },
3338// ]),
3339// ..Default::default()
3340// },
3341// lsp2::Diagnostic {
3342// range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
3343// severity: Some(DiagnosticSeverity::HINT),
3344// message: "error 2 hint 1".to_string(),
3345// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3346// location: lsp::Location {
3347// uri: buffer_uri.clone(),
3348// range: lsp2::Range::new(
3349// lsp2::Position::new(2, 8),
3350// lsp2::Position::new(2, 17),
3351// ),
3352// },
3353// message: "original diagnostic".to_string(),
3354// }]),
3355// ..Default::default()
3356// },
3357// lsp2::Diagnostic {
3358// range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
3359// severity: Some(DiagnosticSeverity::HINT),
3360// message: "error 2 hint 2".to_string(),
3361// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3362// location: lsp::Location {
3363// uri: buffer_uri,
3364// range: lsp2::Range::new(
3365// lsp2::Position::new(2, 8),
3366// lsp2::Position::new(2, 17),
3367// ),
3368// },
3369// message: "original diagnostic".to_string(),
3370// }]),
3371// ..Default::default()
3372// },
3373// ],
3374// version: None,
3375// };
3376
3377// project
3378// .update(cx, |p, cx| {
3379// p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3380// })
3381// .unwrap();
3382// let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3383
3384// assert_eq!(
3385// buffer
3386// .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3387// .collect::<Vec<_>>(),
3388// &[
3389// DiagnosticEntry {
3390// range: Point::new(1, 8)..Point::new(1, 9),
3391// diagnostic: Diagnostic {
3392// severity: DiagnosticSeverity::WARNING,
3393// message: "error 1".to_string(),
3394// group_id: 1,
3395// is_primary: true,
3396// ..Default::default()
3397// }
3398// },
3399// DiagnosticEntry {
3400// range: Point::new(1, 8)..Point::new(1, 9),
3401// diagnostic: Diagnostic {
3402// severity: DiagnosticSeverity::HINT,
3403// message: "error 1 hint 1".to_string(),
3404// group_id: 1,
3405// is_primary: false,
3406// ..Default::default()
3407// }
3408// },
3409// DiagnosticEntry {
3410// range: Point::new(1, 13)..Point::new(1, 15),
3411// diagnostic: Diagnostic {
3412// severity: DiagnosticSeverity::HINT,
3413// message: "error 2 hint 1".to_string(),
3414// group_id: 0,
3415// is_primary: false,
3416// ..Default::default()
3417// }
3418// },
3419// DiagnosticEntry {
3420// range: Point::new(1, 13)..Point::new(1, 15),
3421// diagnostic: Diagnostic {
3422// severity: DiagnosticSeverity::HINT,
3423// message: "error 2 hint 2".to_string(),
3424// group_id: 0,
3425// is_primary: false,
3426// ..Default::default()
3427// }
3428// },
3429// DiagnosticEntry {
3430// range: Point::new(2, 8)..Point::new(2, 17),
3431// diagnostic: Diagnostic {
3432// severity: DiagnosticSeverity::ERROR,
3433// message: "error 2".to_string(),
3434// group_id: 0,
3435// is_primary: true,
3436// ..Default::default()
3437// }
3438// }
3439// ]
3440// );
3441
3442// assert_eq!(
3443// buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3444// &[
3445// DiagnosticEntry {
3446// range: Point::new(1, 13)..Point::new(1, 15),
3447// diagnostic: Diagnostic {
3448// severity: DiagnosticSeverity::HINT,
3449// message: "error 2 hint 1".to_string(),
3450// group_id: 0,
3451// is_primary: false,
3452// ..Default::default()
3453// }
3454// },
3455// DiagnosticEntry {
3456// range: Point::new(1, 13)..Point::new(1, 15),
3457// diagnostic: Diagnostic {
3458// severity: DiagnosticSeverity::HINT,
3459// message: "error 2 hint 2".to_string(),
3460// group_id: 0,
3461// is_primary: false,
3462// ..Default::default()
3463// }
3464// },
3465// DiagnosticEntry {
3466// range: Point::new(2, 8)..Point::new(2, 17),
3467// diagnostic: Diagnostic {
3468// severity: DiagnosticSeverity::ERROR,
3469// message: "error 2".to_string(),
3470// group_id: 0,
3471// is_primary: true,
3472// ..Default::default()
3473// }
3474// }
3475// ]
3476// );
3477
3478// assert_eq!(
3479// buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3480// &[
3481// DiagnosticEntry {
3482// range: Point::new(1, 8)..Point::new(1, 9),
3483// diagnostic: Diagnostic {
3484// severity: DiagnosticSeverity::WARNING,
3485// message: "error 1".to_string(),
3486// group_id: 1,
3487// is_primary: true,
3488// ..Default::default()
3489// }
3490// },
3491// DiagnosticEntry {
3492// range: Point::new(1, 8)..Point::new(1, 9),
3493// diagnostic: Diagnostic {
3494// severity: DiagnosticSeverity::HINT,
3495// message: "error 1 hint 1".to_string(),
3496// group_id: 1,
3497// is_primary: false,
3498// ..Default::default()
3499// }
3500// },
3501// ]
3502// );
3503// }
3504
3505// #[gpui::test]
3506// async fn test_rename(cx: &mut gpui::TestAppContext) {
3507// init_test(cx);
3508
3509// let mut language = Language::new(
3510// LanguageConfig {
3511// name: "Rust".into(),
3512// path_suffixes: vec!["rs".to_string()],
3513// ..Default::default()
3514// },
3515// Some(tree_sitter_rust::language()),
3516// );
3517// let mut fake_servers = language
3518// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3519// capabilities: lsp2::ServerCapabilities {
3520// rename_provider: Some(lsp2::OneOf::Right(lsp2::RenameOptions {
3521// prepare_provider: Some(true),
3522// work_done_progress_options: Default::default(),
3523// })),
3524// ..Default::default()
3525// },
3526// ..Default::default()
3527// }))
3528// .await;
3529
3530// let fs = FakeFs::new(cx.background());
3531// fs.insert_tree(
3532// "/dir",
3533// json!({
3534// "one.rs": "const ONE: usize = 1;",
3535// "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3536// }),
3537// )
3538// .await;
3539
3540// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3541// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3542// let buffer = project
3543// .update(cx, |project, cx| {
3544// project.open_local_buffer("/dir/one.rs", cx)
3545// })
3546// .await
3547// .unwrap();
3548
3549// let fake_server = fake_servers.next().await.unwrap();
3550
3551// let response = project.update(cx, |project, cx| {
3552// project.prepare_rename(buffer.clone(), 7, cx)
3553// });
3554// fake_server
3555// .handle_request::<lsp2::request::PrepareRenameRequest, _, _>(|params, _| async move {
3556// assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3557// assert_eq!(params.position, lsp2::Position::new(0, 7));
3558// Ok(Some(lsp2::PrepareRenameResponse::Range(lsp2::Range::new(
3559// lsp2::Position::new(0, 6),
3560// lsp2::Position::new(0, 9),
3561// ))))
3562// })
3563// .next()
3564// .await
3565// .unwrap();
3566// let range = response.await.unwrap().unwrap();
3567// let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3568// assert_eq!(range, 6..9);
3569
3570// let response = project.update(cx, |project, cx| {
3571// project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3572// });
3573// fake_server
3574// .handle_request::<lsp2::request::Rename, _, _>(|params, _| async move {
3575// assert_eq!(
3576// params.text_document_position.text_document.uri.as_str(),
3577// "file:///dir/one.rs"
3578// );
3579// assert_eq!(
3580// params.text_document_position.position,
3581// lsp2::Position::new(0, 7)
3582// );
3583// assert_eq!(params.new_name, "THREE");
3584// Ok(Some(lsp::WorkspaceEdit {
3585// changes: Some(
3586// [
3587// (
3588// lsp2::Url::from_file_path("/dir/one.rs").unwrap(),
3589// vec![lsp2::TextEdit::new(
3590// lsp2::Range::new(
3591// lsp2::Position::new(0, 6),
3592// lsp2::Position::new(0, 9),
3593// ),
3594// "THREE".to_string(),
3595// )],
3596// ),
3597// (
3598// lsp2::Url::from_file_path("/dir/two.rs").unwrap(),
3599// vec![
3600// lsp2::TextEdit::new(
3601// lsp2::Range::new(
3602// lsp2::Position::new(0, 24),
3603// lsp2::Position::new(0, 27),
3604// ),
3605// "THREE".to_string(),
3606// ),
3607// lsp2::TextEdit::new(
3608// lsp2::Range::new(
3609// lsp2::Position::new(0, 35),
3610// lsp2::Position::new(0, 38),
3611// ),
3612// "THREE".to_string(),
3613// ),
3614// ],
3615// ),
3616// ]
3617// .into_iter()
3618// .collect(),
3619// ),
3620// ..Default::default()
3621// }))
3622// })
3623// .next()
3624// .await
3625// .unwrap();
3626// let mut transaction = response.await.unwrap().0;
3627// assert_eq!(transaction.len(), 2);
3628// assert_eq!(
3629// transaction
3630// .remove_entry(&buffer)
3631// .unwrap()
3632// .0
3633// .read_with(cx, |buffer, _| buffer.text()),
3634// "const THREE: usize = 1;"
3635// );
3636// assert_eq!(
3637// transaction
3638// .into_keys()
3639// .next()
3640// .unwrap()
3641// .read_with(cx, |buffer, _| buffer.text()),
3642// "const TWO: usize = one::THREE + one::THREE;"
3643// );
3644// }
3645
3646// #[gpui::test]
3647// async fn test_search(cx: &mut gpui::TestAppContext) {
3648// init_test(cx);
3649
3650// let fs = FakeFs::new(cx.background());
3651// fs.insert_tree(
3652// "/dir",
3653// json!({
3654// "one.rs": "const ONE: usize = 1;",
3655// "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3656// "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3657// "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3658// }),
3659// )
3660// .await;
3661// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3662// assert_eq!(
3663// search(
3664// &project,
3665// SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3666// cx
3667// )
3668// .await
3669// .unwrap(),
3670// HashMap::from_iter([
3671// ("two.rs".to_string(), vec![6..9]),
3672// ("three.rs".to_string(), vec![37..40])
3673// ])
3674// );
3675
3676// let buffer_4 = project
3677// .update(cx, |project, cx| {
3678// project.open_local_buffer("/dir/four.rs", cx)
3679// })
3680// .await
3681// .unwrap();
3682// buffer_4.update(cx, |buffer, cx| {
3683// let text = "two::TWO";
3684// buffer.edit([(20..28, text), (31..43, text)], None, cx);
3685// });
3686
3687// assert_eq!(
3688// search(
3689// &project,
3690// SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3691// cx
3692// )
3693// .await
3694// .unwrap(),
3695// HashMap::from_iter([
3696// ("two.rs".to_string(), vec![6..9]),
3697// ("three.rs".to_string(), vec![37..40]),
3698// ("four.rs".to_string(), vec![25..28, 36..39])
3699// ])
3700// );
3701// }
3702
3703// #[gpui::test]
3704// async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3705// init_test(cx);
3706
3707// let search_query = "file";
3708
3709// let fs = FakeFs::new(cx.background());
3710// fs.insert_tree(
3711// "/dir",
3712// json!({
3713// "one.rs": r#"// Rust file one"#,
3714// "one.ts": r#"// TypeScript file one"#,
3715// "two.rs": r#"// Rust file two"#,
3716// "two.ts": r#"// TypeScript file two"#,
3717// }),
3718// )
3719// .await;
3720// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3721
3722// assert!(
3723// search(
3724// &project,
3725// SearchQuery::text(
3726// search_query,
3727// false,
3728// true,
3729// vec![PathMatcher::new("*.odd").unwrap()],
3730// Vec::new()
3731// )
3732// .unwrap(),
3733// cx
3734// )
3735// .await
3736// .unwrap()
3737// .is_empty(),
3738// "If no inclusions match, no files should be returned"
3739// );
3740
3741// assert_eq!(
3742// search(
3743// &project,
3744// SearchQuery::text(
3745// search_query,
3746// false,
3747// true,
3748// vec![PathMatcher::new("*.rs").unwrap()],
3749// Vec::new()
3750// )
3751// .unwrap(),
3752// cx
3753// )
3754// .await
3755// .unwrap(),
3756// HashMap::from_iter([
3757// ("one.rs".to_string(), vec![8..12]),
3758// ("two.rs".to_string(), vec![8..12]),
3759// ]),
3760// "Rust only search should give only Rust files"
3761// );
3762
3763// assert_eq!(
3764// search(
3765// &project,
3766// SearchQuery::text(
3767// search_query,
3768// false,
3769// true,
3770// vec![
3771// PathMatcher::new("*.ts").unwrap(),
3772// PathMatcher::new("*.odd").unwrap(),
3773// ],
3774// Vec::new()
3775// ).unwrap(),
3776// cx
3777// )
3778// .await
3779// .unwrap(),
3780// HashMap::from_iter([
3781// ("one.ts".to_string(), vec![14..18]),
3782// ("two.ts".to_string(), vec![14..18]),
3783// ]),
3784// "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3785// );
3786
3787// assert_eq!(
3788// search(
3789// &project,
3790// SearchQuery::text(
3791// search_query,
3792// false,
3793// true,
3794// vec![
3795// PathMatcher::new("*.rs").unwrap(),
3796// PathMatcher::new("*.ts").unwrap(),
3797// PathMatcher::new("*.odd").unwrap(),
3798// ],
3799// Vec::new()
3800// ).unwrap(),
3801// cx
3802// )
3803// .await
3804// .unwrap(),
3805// HashMap::from_iter([
3806// ("one.rs".to_string(), vec![8..12]),
3807// ("one.ts".to_string(), vec![14..18]),
3808// ("two.rs".to_string(), vec![8..12]),
3809// ("two.ts".to_string(), vec![14..18]),
3810// ]),
3811// "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3812// );
3813// }
3814
3815// #[gpui::test]
3816// async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3817// init_test(cx);
3818
3819// let search_query = "file";
3820
3821// let fs = FakeFs::new(cx.background());
3822// fs.insert_tree(
3823// "/dir",
3824// json!({
3825// "one.rs": r#"// Rust file one"#,
3826// "one.ts": r#"// TypeScript file one"#,
3827// "two.rs": r#"// Rust file two"#,
3828// "two.ts": r#"// TypeScript file two"#,
3829// }),
3830// )
3831// .await;
3832// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3833
3834// assert_eq!(
3835// search(
3836// &project,
3837// SearchQuery::text(
3838// search_query,
3839// false,
3840// true,
3841// Vec::new(),
3842// vec![PathMatcher::new("*.odd").unwrap()],
3843// )
3844// .unwrap(),
3845// cx
3846// )
3847// .await
3848// .unwrap(),
3849// HashMap::from_iter([
3850// ("one.rs".to_string(), vec![8..12]),
3851// ("one.ts".to_string(), vec![14..18]),
3852// ("two.rs".to_string(), vec![8..12]),
3853// ("two.ts".to_string(), vec![14..18]),
3854// ]),
3855// "If no exclusions match, all files should be returned"
3856// );
3857
3858// assert_eq!(
3859// search(
3860// &project,
3861// SearchQuery::text(
3862// search_query,
3863// false,
3864// true,
3865// Vec::new(),
3866// vec![PathMatcher::new("*.rs").unwrap()],
3867// )
3868// .unwrap(),
3869// cx
3870// )
3871// .await
3872// .unwrap(),
3873// HashMap::from_iter([
3874// ("one.ts".to_string(), vec![14..18]),
3875// ("two.ts".to_string(), vec![14..18]),
3876// ]),
3877// "Rust exclusion search should give only TypeScript files"
3878// );
3879
3880// assert_eq!(
3881// search(
3882// &project,
3883// SearchQuery::text(
3884// search_query,
3885// false,
3886// true,
3887// Vec::new(),
3888// vec![
3889// PathMatcher::new("*.ts").unwrap(),
3890// PathMatcher::new("*.odd").unwrap(),
3891// ],
3892// ).unwrap(),
3893// cx
3894// )
3895// .await
3896// .unwrap(),
3897// HashMap::from_iter([
3898// ("one.rs".to_string(), vec![8..12]),
3899// ("two.rs".to_string(), vec![8..12]),
3900// ]),
3901// "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3902// );
3903
3904// assert!(
3905// search(
3906// &project,
3907// SearchQuery::text(
3908// search_query,
3909// false,
3910// true,
3911// Vec::new(),
3912// vec![
3913// PathMatcher::new("*.rs").unwrap(),
3914// PathMatcher::new("*.ts").unwrap(),
3915// PathMatcher::new("*.odd").unwrap(),
3916// ],
3917// ).unwrap(),
3918// cx
3919// )
3920// .await
3921// .unwrap().is_empty(),
3922// "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3923// );
3924// }
3925
3926// #[gpui::test]
3927// async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3928// init_test(cx);
3929
3930// let search_query = "file";
3931
3932// let fs = FakeFs::new(cx.background());
3933// fs.insert_tree(
3934// "/dir",
3935// json!({
3936// "one.rs": r#"// Rust file one"#,
3937// "one.ts": r#"// TypeScript file one"#,
3938// "two.rs": r#"// Rust file two"#,
3939// "two.ts": r#"// TypeScript file two"#,
3940// }),
3941// )
3942// .await;
3943// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3944
3945// assert!(
3946// search(
3947// &project,
3948// SearchQuery::text(
3949// search_query,
3950// false,
3951// true,
3952// vec![PathMatcher::new("*.odd").unwrap()],
3953// vec![PathMatcher::new("*.odd").unwrap()],
3954// )
3955// .unwrap(),
3956// cx
3957// )
3958// .await
3959// .unwrap()
3960// .is_empty(),
3961// "If both no exclusions and inclusions match, exclusions should win and return nothing"
3962// );
3963
3964// assert!(
3965// search(
3966// &project,
3967// SearchQuery::text(
3968// search_query,
3969// false,
3970// true,
3971// vec![PathMatcher::new("*.ts").unwrap()],
3972// vec![PathMatcher::new("*.ts").unwrap()],
3973// ).unwrap(),
3974// cx
3975// )
3976// .await
3977// .unwrap()
3978// .is_empty(),
3979// "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3980// );
3981
3982// assert!(
3983// search(
3984// &project,
3985// SearchQuery::text(
3986// search_query,
3987// false,
3988// true,
3989// vec![
3990// PathMatcher::new("*.ts").unwrap(),
3991// PathMatcher::new("*.odd").unwrap()
3992// ],
3993// vec![
3994// PathMatcher::new("*.ts").unwrap(),
3995// PathMatcher::new("*.odd").unwrap()
3996// ],
3997// )
3998// .unwrap(),
3999// cx
4000// )
4001// .await
4002// .unwrap()
4003// .is_empty(),
4004// "Non-matching inclusions and exclusions should not change that."
4005// );
4006
4007// assert_eq!(
4008// search(
4009// &project,
4010// SearchQuery::text(
4011// search_query,
4012// false,
4013// true,
4014// vec![
4015// PathMatcher::new("*.ts").unwrap(),
4016// PathMatcher::new("*.odd").unwrap()
4017// ],
4018// vec![
4019// PathMatcher::new("*.rs").unwrap(),
4020// PathMatcher::new("*.odd").unwrap()
4021// ],
4022// )
4023// .unwrap(),
4024// cx
4025// )
4026// .await
4027// .unwrap(),
4028// HashMap::from_iter([
4029// ("one.ts".to_string(), vec![14..18]),
4030// ("two.ts".to_string(), vec![14..18]),
4031// ]),
4032// "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4033// );
4034// }
4035
4036// #[test]
4037// fn test_glob_literal_prefix() {
4038// assert_eq!(glob_literal_prefix("**/*.js"), "");
4039// assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4040// assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4041// assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4042// }
4043
4044// async fn search(
4045// project: &ModelHandle<Project>,
4046// query: SearchQuery,
4047// cx: &mut gpui::TestAppContext,
4048// ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4049// let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4050// let mut result = HashMap::default();
4051// while let Some((buffer, range)) = search_rx.next().await {
4052// result.entry(buffer).or_insert(range);
4053// }
4054// Ok(result
4055// .into_iter()
4056// .map(|(buffer, ranges)| {
4057// buffer.read_with(cx, |buffer, _| {
4058// let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4059// let ranges = ranges
4060// .into_iter()
4061// .map(|range| range.to_offset(buffer))
4062// .collect::<Vec<_>>();
4063// (path, ranges)
4064// })
4065// })
4066// .collect())
4067// }
4068
4069// fn init_test(cx: &mut gpui::TestAppContext) {
4070// cx.foreground().forbid_parking();
4071
4072// cx.update(|cx| {
4073// cx.set_global(SettingsStore::test(cx));
4074// language2::init(cx);
4075// Project::init_settings(cx);
4076// });
4077// }