1use anyhow::Result;
2use gpui::App;
3use schemars::{JsonSchema, Schema, transform::transform_subschemas};
4use serde::{Serialize, de::DeserializeOwned};
5use serde_json::Value;
6use std::{ops::Range, sync::LazyLock};
7use tree_sitter::{Query, StreamingIterator as _};
8use util::RangeExt;
9
10/// Parameters that are used when generating some JSON schemas at runtime.
11pub struct SettingsJsonSchemaParams<'a> {
12 pub language_names: &'a [String],
13 pub font_names: &'a [String],
14}
15
16/// Value registered which specifies JSON schemas that are generated at runtime.
17pub struct ParameterizedJsonSchema {
18 pub add_and_get_ref:
19 fn(&mut schemars::SchemaGenerator, &SettingsJsonSchemaParams, &App) -> schemars::Schema,
20}
21
22inventory::collect!(ParameterizedJsonSchema);
23
24const DEFS_PATH: &str = "#/$defs/";
25
26/// Replaces the JSON schema definition for some type if it is in use (in the definitions list), and
27/// returns a reference to it.
28///
29/// This asserts that JsonSchema::schema_name() + "2" does not exist because this indicates that
30/// there are multiple types that use this name, and unfortunately schemars APIs do not support
31/// resolving this ambiguity - see https://github.com/GREsau/schemars/issues/449
32///
33/// This takes a closure for `schema` because some settings types are not available on the remote
34/// server, and so will crash when attempting to access e.g. GlobalThemeRegistry.
35pub fn replace_subschema<T: JsonSchema>(
36 generator: &mut schemars::SchemaGenerator,
37 schema: impl Fn() -> schemars::Schema,
38) -> schemars::Schema {
39 // fallback on just using the schema name, which could collide.
40 let schema_name = T::schema_name();
41 let definitions = generator.definitions_mut();
42 assert!(!definitions.contains_key(&format!("{schema_name}2")));
43 if definitions.contains_key(schema_name.as_ref()) {
44 definitions.insert(schema_name.to_string(), schema().to_value());
45 }
46 Schema::new_ref(format!("{DEFS_PATH}{schema_name}"))
47}
48
49/// Adds a new JSON schema definition and returns a reference to it. **Panics** if the name is
50/// already in use.
51pub fn add_new_subschema(
52 generator: &mut schemars::SchemaGenerator,
53 name: &str,
54 schema: Value,
55) -> Schema {
56 let old_definition = generator.definitions_mut().insert(name.to_string(), schema);
57 assert_eq!(old_definition, None);
58 schemars::Schema::new_ref(format!("{DEFS_PATH}{name}"))
59}
60
61/// Defaults `additionalProperties` to `true`, as if `#[schemars(deny_unknown_fields)]` was on every
62/// struct. Skips structs that have `additionalProperties` set (such as if #[serde(flatten)] is used
63/// on a map).
64#[derive(Clone)]
65pub struct DefaultDenyUnknownFields;
66
67impl schemars::transform::Transform for DefaultDenyUnknownFields {
68 fn transform(&mut self, schema: &mut schemars::Schema) {
69 if let Some(object) = schema.as_object_mut() {
70 if object.contains_key("properties")
71 && !object.contains_key("additionalProperties")
72 && !object.contains_key("unevaluatedProperties")
73 {
74 object.insert("additionalProperties".to_string(), false.into());
75 }
76 }
77 transform_subschemas(self, schema);
78 }
79}
80
81pub fn update_value_in_json_text<'a>(
82 text: &mut String,
83 key_path: &mut Vec<&'a str>,
84 tab_size: usize,
85 old_value: &'a Value,
86 new_value: &'a Value,
87 preserved_keys: &[&str],
88 edits: &mut Vec<(Range<usize>, String)>,
89) {
90 // If the old and new values are both objects, then compare them key by key,
91 // preserving the comments and formatting of the unchanged parts. Otherwise,
92 // replace the old value with the new value.
93 if let (Value::Object(old_object), Value::Object(new_object)) = (old_value, new_value) {
94 for (key, old_sub_value) in old_object.iter() {
95 key_path.push(key);
96 if let Some(new_sub_value) = new_object.get(key) {
97 // Key exists in both old and new, recursively update
98 update_value_in_json_text(
99 text,
100 key_path,
101 tab_size,
102 old_sub_value,
103 new_sub_value,
104 preserved_keys,
105 edits,
106 );
107 } else {
108 // Key was removed from new object, remove the entire key-value pair
109 let (range, replacement) =
110 replace_value_in_json_text(text, key_path, 0, None, None);
111 text.replace_range(range.clone(), &replacement);
112 edits.push((range, replacement));
113 }
114 key_path.pop();
115 }
116 for (key, new_sub_value) in new_object.iter() {
117 key_path.push(key);
118 if !old_object.contains_key(key) {
119 update_value_in_json_text(
120 text,
121 key_path,
122 tab_size,
123 &Value::Null,
124 new_sub_value,
125 preserved_keys,
126 edits,
127 );
128 }
129 key_path.pop();
130 }
131 } else if key_path
132 .last()
133 .map_or(false, |key| preserved_keys.contains(key))
134 || old_value != new_value
135 {
136 let mut new_value = new_value.clone();
137 if let Some(new_object) = new_value.as_object_mut() {
138 new_object.retain(|_, v| !v.is_null());
139 }
140 let (range, replacement) =
141 replace_value_in_json_text(text, key_path, tab_size, Some(&new_value), None);
142 text.replace_range(range.clone(), &replacement);
143 edits.push((range, replacement));
144 }
145}
146
147/// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`.
148fn replace_value_in_json_text(
149 text: &str,
150 key_path: &[&str],
151 tab_size: usize,
152 new_value: Option<&Value>,
153 replace_key: Option<&str>,
154) -> (Range<usize>, String) {
155 static PAIR_QUERY: LazyLock<Query> = LazyLock::new(|| {
156 Query::new(
157 &tree_sitter_json::LANGUAGE.into(),
158 "(pair key: (string) @key value: (_) @value)",
159 )
160 .expect("Failed to create PAIR_QUERY")
161 });
162
163 let mut parser = tree_sitter::Parser::new();
164 parser
165 .set_language(&tree_sitter_json::LANGUAGE.into())
166 .unwrap();
167 let syntax_tree = parser.parse(text, None).unwrap();
168
169 let mut cursor = tree_sitter::QueryCursor::new();
170
171 let mut depth = 0;
172 let mut last_value_range = 0..0;
173 let mut first_key_start = None;
174 let mut existing_value_range = 0..text.len();
175
176 let mut matches = cursor.matches(&PAIR_QUERY, syntax_tree.root_node(), text.as_bytes());
177 while let Some(mat) = matches.next() {
178 if mat.captures.len() != 2 {
179 continue;
180 }
181
182 let key_range = mat.captures[0].node.byte_range();
183 let value_range = mat.captures[1].node.byte_range();
184
185 // Don't enter sub objects until we find an exact
186 // match for the current keypath
187 if last_value_range.contains_inclusive(&value_range) {
188 continue;
189 }
190
191 last_value_range = value_range.clone();
192
193 if key_range.start > existing_value_range.end {
194 break;
195 }
196
197 first_key_start.get_or_insert(key_range.start);
198
199 let found_key = text
200 .get(key_range.clone())
201 .map(|key_text| {
202 depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth])
203 })
204 .unwrap_or(false);
205
206 if found_key {
207 existing_value_range = value_range;
208 // Reset last value range when increasing in depth
209 last_value_range = existing_value_range.start..existing_value_range.start;
210 depth += 1;
211
212 if depth == key_path.len() {
213 break;
214 }
215
216 first_key_start = None;
217 }
218 }
219
220 // We found the exact key we want
221 if depth == key_path.len() {
222 if let Some(new_value) = new_value {
223 let new_val = to_pretty_json(new_value, tab_size, tab_size * depth);
224 if let Some(replace_key) = replace_key {
225 let new_key = format!("\"{}\": ", replace_key);
226 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
227 if let Some(prev_key_start) = text[..key_start].rfind('"') {
228 existing_value_range.start = prev_key_start;
229 } else {
230 existing_value_range.start = key_start;
231 }
232 }
233 (existing_value_range, new_key + &new_val)
234 } else {
235 (existing_value_range, new_val)
236 }
237 } else {
238 let mut removal_start = first_key_start.unwrap_or(existing_value_range.start);
239 let mut removal_end = existing_value_range.end;
240
241 // Find the actual key position by looking for the key in the pair
242 // We need to extend the range to include the key, not just the value
243 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
244 if let Some(prev_key_start) = text[..key_start].rfind('"') {
245 removal_start = prev_key_start;
246 } else {
247 removal_start = key_start;
248 }
249 }
250
251 // Look backward for a preceding comma first
252 let preceding_text = text.get(0..removal_start).unwrap_or("");
253 if let Some(comma_pos) = preceding_text.rfind(',') {
254 // Check if there are only whitespace characters between the comma and our key
255 let between_comma_and_key = text.get(comma_pos + 1..removal_start).unwrap_or("");
256 if between_comma_and_key.trim().is_empty() {
257 removal_start = comma_pos;
258 }
259 }
260
261 if let Some(remaining_text) = text.get(existing_value_range.end..) {
262 let mut chars = remaining_text.char_indices();
263 while let Some((offset, ch)) = chars.next() {
264 if ch == ',' {
265 removal_end = existing_value_range.end + offset + 1;
266 // Also consume whitespace after the comma
267 while let Some((_, next_ch)) = chars.next() {
268 if next_ch.is_whitespace() {
269 removal_end += next_ch.len_utf8();
270 } else {
271 break;
272 }
273 }
274 break;
275 } else if !ch.is_whitespace() {
276 break;
277 }
278 }
279 }
280 (removal_start..removal_end, String::new())
281 }
282 } else {
283 // We have key paths, construct the sub objects
284 let new_key = key_path[depth];
285
286 // We don't have the key, construct the nested objects
287 let mut new_value =
288 serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap();
289 for key in key_path[(depth + 1)..].iter().rev() {
290 new_value = serde_json::json!({ key.to_string(): new_value });
291 }
292
293 if let Some(first_key_start) = first_key_start {
294 let mut row = 0;
295 let mut column = 0;
296 for (ix, char) in text.char_indices() {
297 if ix == first_key_start {
298 break;
299 }
300 if char == '\n' {
301 row += 1;
302 column = 0;
303 } else {
304 column += char.len_utf8();
305 }
306 }
307
308 if row > 0 {
309 // depth is 0 based, but division needs to be 1 based.
310 let new_val = to_pretty_json(&new_value, column / (depth + 1), column);
311 let space = ' ';
312 let content = format!("\"{new_key}\": {new_val},\n{space:width$}", width = column);
313 (first_key_start..first_key_start, content)
314 } else {
315 let new_val = serde_json::to_string(&new_value).unwrap();
316 let mut content = format!(r#""{new_key}": {new_val},"#);
317 content.push(' ');
318 (first_key_start..first_key_start, content)
319 }
320 } else {
321 new_value = serde_json::json!({ new_key.to_string(): new_value });
322 let indent_prefix_len = 4 * depth;
323 let mut new_val = to_pretty_json(&new_value, 4, indent_prefix_len);
324 if depth == 0 {
325 new_val.push('\n');
326 }
327 // best effort to keep comments with best effort indentation
328 let mut replace_text = &text[existing_value_range.clone()];
329 while let Some(comment_start) = replace_text.rfind("//") {
330 if let Some(comment_end) = replace_text[comment_start..].find('\n') {
331 let mut comment_with_indent_start = replace_text[..comment_start]
332 .rfind('\n')
333 .unwrap_or(comment_start);
334 if !replace_text[comment_with_indent_start..comment_start]
335 .trim()
336 .is_empty()
337 {
338 comment_with_indent_start = comment_start;
339 }
340 new_val.insert_str(
341 1,
342 &replace_text[comment_with_indent_start..comment_start + comment_end],
343 );
344 }
345 replace_text = &replace_text[..comment_start];
346 }
347
348 (existing_value_range, new_val)
349 }
350 }
351}
352
353const TS_DOCUMENT_KIND: &'static str = "document";
354const TS_ARRAY_KIND: &'static str = "array";
355const TS_COMMENT_KIND: &'static str = "comment";
356
357pub fn replace_top_level_array_value_in_json_text(
358 text: &str,
359 key_path: &[&str],
360 new_value: Option<&Value>,
361 replace_key: Option<&str>,
362 array_index: usize,
363 tab_size: usize,
364) -> Result<(Range<usize>, String)> {
365 let mut parser = tree_sitter::Parser::new();
366 parser
367 .set_language(&tree_sitter_json::LANGUAGE.into())
368 .unwrap();
369 let syntax_tree = parser.parse(text, None).unwrap();
370
371 let mut cursor = syntax_tree.walk();
372
373 if cursor.node().kind() == TS_DOCUMENT_KIND {
374 anyhow::ensure!(
375 cursor.goto_first_child(),
376 "Document empty - No top level array"
377 );
378 }
379
380 while cursor.node().kind() != TS_ARRAY_KIND {
381 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
382 }
383
384 // false if no children
385 //
386 cursor.goto_first_child();
387 debug_assert_eq!(cursor.node().kind(), "[");
388
389 let mut index = 0;
390
391 while index <= array_index {
392 let node = cursor.node();
393 if !matches!(node.kind(), "[" | "]" | TS_COMMENT_KIND | ",")
394 && !node.is_extra()
395 && !node.is_missing()
396 {
397 if index == array_index {
398 break;
399 }
400 index += 1;
401 }
402 if !cursor.goto_next_sibling() {
403 if let Some(new_value) = new_value {
404 return append_top_level_array_value_in_json_text(text, new_value, tab_size);
405 } else {
406 return Ok((0..0, String::new()));
407 }
408 }
409 }
410
411 let range = cursor.node().range();
412 let indent_width = range.start_point.column;
413 let offset = range.start_byte;
414 let value_str = &text[range.start_byte..range.end_byte];
415 let needs_indent = range.start_point.row > 0;
416
417 let (mut replace_range, mut replace_value) =
418 replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
419
420 replace_range.start += offset;
421 replace_range.end += offset;
422
423 if needs_indent {
424 let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
425 replace_value = replace_value.replace('\n', &increased_indent);
426 // replace_value.push('\n');
427 } else {
428 while let Some(idx) = replace_value.find("\n ") {
429 replace_value.remove(idx + 1);
430 }
431 while let Some(idx) = replace_value.find("\n") {
432 replace_value.replace_range(idx..idx + 1, " ");
433 }
434 }
435
436 return Ok((replace_range, replace_value));
437}
438
439pub fn append_top_level_array_value_in_json_text(
440 text: &str,
441 new_value: &Value,
442 tab_size: usize,
443) -> Result<(Range<usize>, String)> {
444 let mut parser = tree_sitter::Parser::new();
445 parser
446 .set_language(&tree_sitter_json::LANGUAGE.into())
447 .unwrap();
448 let syntax_tree = parser.parse(text, None).unwrap();
449
450 let mut cursor = syntax_tree.walk();
451
452 if cursor.node().kind() == TS_DOCUMENT_KIND {
453 anyhow::ensure!(
454 cursor.goto_first_child(),
455 "Document empty - No top level array"
456 );
457 }
458
459 while cursor.node().kind() != TS_ARRAY_KIND {
460 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
461 }
462
463 anyhow::ensure!(
464 cursor.goto_last_child(),
465 "Malformed JSON syntax tree, expected `]` at end of array"
466 );
467 debug_assert_eq!(cursor.node().kind(), "]");
468 let close_bracket_start = cursor.node().start_byte();
469 cursor.goto_previous_sibling();
470 while (cursor.node().is_extra() || cursor.node().is_missing()) && cursor.goto_previous_sibling()
471 {
472 }
473
474 let mut comma_range = None;
475 let mut prev_item_range = None;
476
477 if cursor.node().kind() == "," {
478 comma_range = Some(cursor.node().byte_range());
479 while cursor.goto_previous_sibling() && cursor.node().is_extra() {}
480
481 debug_assert_ne!(cursor.node().kind(), "[");
482 prev_item_range = Some(cursor.node().range());
483 } else {
484 while (cursor.node().is_extra() || cursor.node().is_missing())
485 && cursor.goto_previous_sibling()
486 {}
487 if cursor.node().kind() != "[" {
488 prev_item_range = Some(cursor.node().range());
489 }
490 }
491
492 let (mut replace_range, mut replace_value) =
493 replace_value_in_json_text("", &[], tab_size, Some(new_value), None);
494
495 replace_range.start = close_bracket_start;
496 replace_range.end = close_bracket_start;
497
498 let space = ' ';
499 if let Some(prev_item_range) = prev_item_range {
500 let needs_newline = prev_item_range.start_point.row > 0;
501 let indent_width = text[..prev_item_range.start_byte].rfind('\n').map_or(
502 prev_item_range.start_point.column,
503 |idx| {
504 prev_item_range.start_point.column
505 - text[idx + 1..prev_item_range.start_byte].trim_start().len()
506 },
507 );
508
509 let prev_item_end = comma_range
510 .as_ref()
511 .map_or(prev_item_range.end_byte, |range| range.end);
512 if text[prev_item_end..replace_range.start].trim().is_empty() {
513 replace_range.start = prev_item_end;
514 }
515
516 if needs_newline {
517 let increased_indent = format!("\n{space:width$}", width = indent_width);
518 replace_value = replace_value.replace('\n', &increased_indent);
519 replace_value.push('\n');
520 replace_value.insert_str(0, &format!("\n{space:width$}", width = indent_width));
521 } else {
522 while let Some(idx) = replace_value.find("\n ") {
523 replace_value.remove(idx + 1);
524 }
525 while let Some(idx) = replace_value.find('\n') {
526 replace_value.replace_range(idx..idx + 1, " ");
527 }
528 replace_value.insert(0, ' ');
529 }
530
531 if comma_range.is_none() {
532 replace_value.insert(0, ',');
533 }
534 } else {
535 if let Some(prev_newline) = text[..replace_range.start].rfind('\n') {
536 if text[prev_newline..replace_range.start].trim().is_empty() {
537 replace_range.start = prev_newline;
538 }
539 }
540 let indent = format!("\n{space:width$}", width = tab_size);
541 replace_value = replace_value.replace('\n', &indent);
542 replace_value.insert_str(0, &indent);
543 replace_value.push('\n');
544 }
545 return Ok((replace_range, replace_value));
546}
547
548pub fn to_pretty_json(
549 value: &impl Serialize,
550 indent_size: usize,
551 indent_prefix_len: usize,
552) -> String {
553 const SPACES: [u8; 32] = [b' '; 32];
554
555 debug_assert!(indent_size <= SPACES.len());
556 debug_assert!(indent_prefix_len <= SPACES.len());
557
558 let mut output = Vec::new();
559 let mut ser = serde_json::Serializer::with_formatter(
560 &mut output,
561 serde_json::ser::PrettyFormatter::with_indent(&SPACES[0..indent_size.min(SPACES.len())]),
562 );
563
564 value.serialize(&mut ser).unwrap();
565 let text = String::from_utf8(output).unwrap();
566
567 let mut adjusted_text = String::new();
568 for (i, line) in text.split('\n').enumerate() {
569 if i > 0 {
570 adjusted_text.push_str(str::from_utf8(&SPACES[0..indent_prefix_len]).unwrap());
571 }
572 adjusted_text.push_str(line);
573 adjusted_text.push('\n');
574 }
575 adjusted_text.pop();
576 adjusted_text
577}
578
579pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
580 Ok(serde_json_lenient::from_str(content)?)
581}
582
583#[cfg(test)]
584mod tests {
585 use super::*;
586 use serde_json::{Value, json};
587 use unindent::Unindent;
588
589 #[test]
590 fn object_replace() {
591 #[track_caller]
592 fn check_object_replace(
593 input: String,
594 key_path: &[&str],
595 value: Option<Value>,
596 expected: String,
597 ) {
598 let result = replace_value_in_json_text(&input, key_path, 4, value.as_ref(), None);
599 let mut result_str = input.to_string();
600 result_str.replace_range(result.0, &result.1);
601 pretty_assertions::assert_eq!(expected, result_str);
602 }
603 check_object_replace(
604 r#"{
605 "a": 1,
606 "b": 2
607 }"#
608 .unindent(),
609 &["b"],
610 Some(json!(3)),
611 r#"{
612 "a": 1,
613 "b": 3
614 }"#
615 .unindent(),
616 );
617 check_object_replace(
618 r#"{
619 "a": 1,
620 "b": 2
621 }"#
622 .unindent(),
623 &["b"],
624 None,
625 r#"{
626 "a": 1
627 }"#
628 .unindent(),
629 );
630 check_object_replace(
631 r#"{
632 "a": 1,
633 "b": 2
634 }"#
635 .unindent(),
636 &["c"],
637 Some(json!(3)),
638 r#"{
639 "c": 3,
640 "a": 1,
641 "b": 2
642 }"#
643 .unindent(),
644 );
645 check_object_replace(
646 r#"{
647 "a": 1,
648 "b": {
649 "c": 2,
650 "d": 3,
651 }
652 }"#
653 .unindent(),
654 &["b", "c"],
655 Some(json!([1, 2, 3])),
656 r#"{
657 "a": 1,
658 "b": {
659 "c": [
660 1,
661 2,
662 3
663 ],
664 "d": 3,
665 }
666 }"#
667 .unindent(),
668 );
669
670 check_object_replace(
671 r#"{
672 "name": "old_name",
673 "id": 123
674 }"#
675 .unindent(),
676 &["name"],
677 Some(json!("new_name")),
678 r#"{
679 "name": "new_name",
680 "id": 123
681 }"#
682 .unindent(),
683 );
684
685 check_object_replace(
686 r#"{
687 "enabled": false,
688 "count": 5
689 }"#
690 .unindent(),
691 &["enabled"],
692 Some(json!(true)),
693 r#"{
694 "enabled": true,
695 "count": 5
696 }"#
697 .unindent(),
698 );
699
700 check_object_replace(
701 r#"{
702 "value": null,
703 "other": "test"
704 }"#
705 .unindent(),
706 &["value"],
707 Some(json!(42)),
708 r#"{
709 "value": 42,
710 "other": "test"
711 }"#
712 .unindent(),
713 );
714
715 check_object_replace(
716 r#"{
717 "config": {
718 "old": true
719 },
720 "name": "test"
721 }"#
722 .unindent(),
723 &["config"],
724 Some(json!({"new": false, "count": 3})),
725 r#"{
726 "config": {
727 "new": false,
728 "count": 3
729 },
730 "name": "test"
731 }"#
732 .unindent(),
733 );
734
735 check_object_replace(
736 r#"{
737 // This is a comment
738 "a": 1,
739 "b": 2 // Another comment
740 }"#
741 .unindent(),
742 &["b"],
743 Some(json!({"foo": "bar"})),
744 r#"{
745 // This is a comment
746 "a": 1,
747 "b": {
748 "foo": "bar"
749 } // Another comment
750 }"#
751 .unindent(),
752 );
753
754 check_object_replace(
755 r#"{}"#.to_string(),
756 &["new_key"],
757 Some(json!("value")),
758 r#"{
759 "new_key": "value"
760 }
761 "#
762 .unindent(),
763 );
764
765 check_object_replace(
766 r#"{
767 "only_key": 123
768 }"#
769 .unindent(),
770 &["only_key"],
771 None,
772 "{\n \n}".to_string(),
773 );
774
775 check_object_replace(
776 r#"{
777 "level1": {
778 "level2": {
779 "level3": {
780 "target": "old"
781 }
782 }
783 }
784 }"#
785 .unindent(),
786 &["level1", "level2", "level3", "target"],
787 Some(json!("new")),
788 r#"{
789 "level1": {
790 "level2": {
791 "level3": {
792 "target": "new"
793 }
794 }
795 }
796 }"#
797 .unindent(),
798 );
799
800 check_object_replace(
801 r#"{
802 "parent": {}
803 }"#
804 .unindent(),
805 &["parent", "child"],
806 Some(json!("value")),
807 r#"{
808 "parent": {
809 "child": "value"
810 }
811 }"#
812 .unindent(),
813 );
814
815 check_object_replace(
816 r#"{
817 "a": 1,
818 "b": 2,
819 }"#
820 .unindent(),
821 &["b"],
822 Some(json!(3)),
823 r#"{
824 "a": 1,
825 "b": 3,
826 }"#
827 .unindent(),
828 );
829
830 check_object_replace(
831 r#"{
832 "items": [1, 2, 3],
833 "count": 3
834 }"#
835 .unindent(),
836 &["items", "1"],
837 Some(json!(5)),
838 r#"{
839 "items": {
840 "1": 5
841 },
842 "count": 3
843 }"#
844 .unindent(),
845 );
846
847 check_object_replace(
848 r#"{
849 "items": [1, 2, 3],
850 "count": 3
851 }"#
852 .unindent(),
853 &["items", "1"],
854 None,
855 r#"{
856 "items": {
857 "1": null
858 },
859 "count": 3
860 }"#
861 .unindent(),
862 );
863
864 check_object_replace(
865 r#"{
866 "items": [1, 2, 3],
867 "count": 3
868 }"#
869 .unindent(),
870 &["items"],
871 Some(json!(["a", "b", "c", "d"])),
872 r#"{
873 "items": [
874 "a",
875 "b",
876 "c",
877 "d"
878 ],
879 "count": 3
880 }"#
881 .unindent(),
882 );
883
884 check_object_replace(
885 r#"{
886 "0": "zero",
887 "1": "one"
888 }"#
889 .unindent(),
890 &["1"],
891 Some(json!("ONE")),
892 r#"{
893 "0": "zero",
894 "1": "ONE"
895 }"#
896 .unindent(),
897 );
898 // Test with comments between object members
899 check_object_replace(
900 r#"{
901 "a": 1,
902 // Comment between members
903 "b": 2,
904 /* Block comment */
905 "c": 3
906 }"#
907 .unindent(),
908 &["b"],
909 Some(json!({"nested": true})),
910 r#"{
911 "a": 1,
912 // Comment between members
913 "b": {
914 "nested": true
915 },
916 /* Block comment */
917 "c": 3
918 }"#
919 .unindent(),
920 );
921
922 // Test with trailing comments on replaced value
923 check_object_replace(
924 r#"{
925 "a": 1, // keep this comment
926 "b": 2 // this should stay
927 }"#
928 .unindent(),
929 &["a"],
930 Some(json!("changed")),
931 r#"{
932 "a": "changed", // keep this comment
933 "b": 2 // this should stay
934 }"#
935 .unindent(),
936 );
937
938 // Test with deep indentation
939 check_object_replace(
940 r#"{
941 "deeply": {
942 "nested": {
943 "value": "old"
944 }
945 }
946 }"#
947 .unindent(),
948 &["deeply", "nested", "value"],
949 Some(json!("new")),
950 r#"{
951 "deeply": {
952 "nested": {
953 "value": "new"
954 }
955 }
956 }"#
957 .unindent(),
958 );
959
960 // Test removing value with comment preservation
961 check_object_replace(
962 r#"{
963 // Header comment
964 "a": 1,
965 // This comment belongs to b
966 "b": 2,
967 // This comment belongs to c
968 "c": 3
969 }"#
970 .unindent(),
971 &["b"],
972 None,
973 r#"{
974 // Header comment
975 "a": 1,
976 // This comment belongs to b
977 // This comment belongs to c
978 "c": 3
979 }"#
980 .unindent(),
981 );
982
983 // Test with multiline block comments
984 check_object_replace(
985 r#"{
986 /*
987 * This is a multiline
988 * block comment
989 */
990 "value": "old",
991 /* Another block */ "other": 123
992 }"#
993 .unindent(),
994 &["value"],
995 Some(json!("new")),
996 r#"{
997 /*
998 * This is a multiline
999 * block comment
1000 */
1001 "value": "new",
1002 /* Another block */ "other": 123
1003 }"#
1004 .unindent(),
1005 );
1006
1007 check_object_replace(
1008 r#"{
1009 // This object is empty
1010 }"#
1011 .unindent(),
1012 &["key"],
1013 Some(json!("value")),
1014 r#"{
1015 // This object is empty
1016 "key": "value"
1017 }
1018 "#
1019 .unindent(),
1020 );
1021
1022 // Test replacing in object with only comments
1023 check_object_replace(
1024 r#"{
1025 // Comment 1
1026 // Comment 2
1027 }"#
1028 .unindent(),
1029 &["new"],
1030 Some(json!(42)),
1031 r#"{
1032 // Comment 1
1033 // Comment 2
1034 "new": 42
1035 }
1036 "#
1037 .unindent(),
1038 );
1039
1040 // Test with inconsistent spacing
1041 check_object_replace(
1042 r#"{
1043 "a":1,
1044 "b" : 2 ,
1045 "c": 3
1046 }"#
1047 .unindent(),
1048 &["b"],
1049 Some(json!("spaced")),
1050 r#"{
1051 "a":1,
1052 "b" : "spaced" ,
1053 "c": 3
1054 }"#
1055 .unindent(),
1056 );
1057 }
1058
1059 #[test]
1060 fn array_replace() {
1061 #[track_caller]
1062 fn check_array_replace(
1063 input: impl ToString,
1064 index: usize,
1065 key_path: &[&str],
1066 value: Value,
1067 expected: impl ToString,
1068 ) {
1069 let input = input.to_string();
1070 let result = replace_top_level_array_value_in_json_text(
1071 &input,
1072 key_path,
1073 Some(&value),
1074 None,
1075 index,
1076 4,
1077 )
1078 .expect("replace succeeded");
1079 let mut result_str = input;
1080 result_str.replace_range(result.0, &result.1);
1081 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1082 }
1083
1084 check_array_replace(r#"[1, 3, 3]"#, 1, &[], json!(2), r#"[1, 2, 3]"#);
1085 check_array_replace(r#"[1, 3, 3]"#, 2, &[], json!(2), r#"[1, 3, 2]"#);
1086 check_array_replace(r#"[1, 3, 3,]"#, 3, &[], json!(2), r#"[1, 3, 3, 2]"#);
1087 check_array_replace(r#"[1, 3, 3,]"#, 100, &[], json!(2), r#"[1, 3, 3, 2]"#);
1088 check_array_replace(
1089 r#"[
1090 1,
1091 2,
1092 3,
1093 ]"#
1094 .unindent(),
1095 1,
1096 &[],
1097 json!({"foo": "bar", "baz": "qux"}),
1098 r#"[
1099 1,
1100 {
1101 "foo": "bar",
1102 "baz": "qux"
1103 },
1104 3,
1105 ]"#
1106 .unindent(),
1107 );
1108 check_array_replace(
1109 r#"[1, 3, 3,]"#,
1110 1,
1111 &[],
1112 json!({"foo": "bar", "baz": "qux"}),
1113 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1114 );
1115
1116 check_array_replace(
1117 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1118 1,
1119 &["baz"],
1120 json!({"qux": "quz"}),
1121 r#"[1, { "foo": "bar", "baz": { "qux": "quz" } }, 3,]"#,
1122 );
1123
1124 check_array_replace(
1125 r#"[
1126 1,
1127 {
1128 "foo": "bar",
1129 "baz": "qux"
1130 },
1131 3
1132 ]"#,
1133 1,
1134 &["baz"],
1135 json!({"qux": "quz"}),
1136 r#"[
1137 1,
1138 {
1139 "foo": "bar",
1140 "baz": {
1141 "qux": "quz"
1142 }
1143 },
1144 3
1145 ]"#,
1146 );
1147
1148 check_array_replace(
1149 r#"[
1150 1,
1151 {
1152 "foo": "bar",
1153 "baz": {
1154 "qux": "quz"
1155 }
1156 },
1157 3
1158 ]"#,
1159 1,
1160 &["baz"],
1161 json!("qux"),
1162 r#"[
1163 1,
1164 {
1165 "foo": "bar",
1166 "baz": "qux"
1167 },
1168 3
1169 ]"#,
1170 );
1171
1172 check_array_replace(
1173 r#"[
1174 1,
1175 {
1176 "foo": "bar",
1177 // some comment to keep
1178 "baz": {
1179 // some comment to remove
1180 "qux": "quz"
1181 }
1182 // some other comment to keep
1183 },
1184 3
1185 ]"#,
1186 1,
1187 &["baz"],
1188 json!("qux"),
1189 r#"[
1190 1,
1191 {
1192 "foo": "bar",
1193 // some comment to keep
1194 "baz": "qux"
1195 // some other comment to keep
1196 },
1197 3
1198 ]"#,
1199 );
1200
1201 // Test with comments between array elements
1202 check_array_replace(
1203 r#"[
1204 1,
1205 // This is element 2
1206 2,
1207 /* Block comment */ 3,
1208 4 // Trailing comment
1209 ]"#,
1210 2,
1211 &[],
1212 json!("replaced"),
1213 r#"[
1214 1,
1215 // This is element 2
1216 2,
1217 /* Block comment */ "replaced",
1218 4 // Trailing comment
1219 ]"#,
1220 );
1221
1222 // Test empty array with comments
1223 check_array_replace(
1224 r#"[
1225 // Empty array with comment
1226 ]"#
1227 .unindent(),
1228 0,
1229 &[],
1230 json!("first"),
1231 r#"[
1232 // Empty array with comment
1233 "first"
1234 ]"#
1235 .unindent(),
1236 );
1237 check_array_replace(
1238 r#"[]"#.unindent(),
1239 0,
1240 &[],
1241 json!("first"),
1242 r#"[
1243 "first"
1244 ]"#
1245 .unindent(),
1246 );
1247
1248 // Test array with leading comments
1249 check_array_replace(
1250 r#"[
1251 // Leading comment
1252 // Another leading comment
1253 1,
1254 2
1255 ]"#,
1256 0,
1257 &[],
1258 json!({"new": "object"}),
1259 r#"[
1260 // Leading comment
1261 // Another leading comment
1262 {
1263 "new": "object"
1264 },
1265 2
1266 ]"#,
1267 );
1268
1269 // Test with deep indentation
1270 check_array_replace(
1271 r#"[
1272 1,
1273 2,
1274 3
1275 ]"#,
1276 1,
1277 &[],
1278 json!("deep"),
1279 r#"[
1280 1,
1281 "deep",
1282 3
1283 ]"#,
1284 );
1285
1286 // Test with mixed spacing
1287 check_array_replace(
1288 r#"[1,2, 3, 4]"#,
1289 2,
1290 &[],
1291 json!("spaced"),
1292 r#"[1,2, "spaced", 4]"#,
1293 );
1294
1295 // Test replacing nested array element
1296 check_array_replace(
1297 r#"[
1298 [1, 2, 3],
1299 [4, 5, 6],
1300 [7, 8, 9]
1301 ]"#,
1302 1,
1303 &[],
1304 json!(["a", "b", "c", "d"]),
1305 r#"[
1306 [1, 2, 3],
1307 [
1308 "a",
1309 "b",
1310 "c",
1311 "d"
1312 ],
1313 [7, 8, 9]
1314 ]"#,
1315 );
1316
1317 // Test with multiline block comments
1318 check_array_replace(
1319 r#"[
1320 /*
1321 * This is a
1322 * multiline comment
1323 */
1324 "first",
1325 "second"
1326 ]"#,
1327 0,
1328 &[],
1329 json!("updated"),
1330 r#"[
1331 /*
1332 * This is a
1333 * multiline comment
1334 */
1335 "updated",
1336 "second"
1337 ]"#,
1338 );
1339
1340 // Test replacing with null
1341 check_array_replace(
1342 r#"[true, false, true]"#,
1343 1,
1344 &[],
1345 json!(null),
1346 r#"[true, null, true]"#,
1347 );
1348
1349 // Test single element array
1350 check_array_replace(
1351 r#"[42]"#,
1352 0,
1353 &[],
1354 json!({"answer": 42}),
1355 r#"[{ "answer": 42 }]"#,
1356 );
1357
1358 // Test array with only comments
1359 check_array_replace(
1360 r#"[
1361 // Comment 1
1362 // Comment 2
1363 // Comment 3
1364 ]"#
1365 .unindent(),
1366 10,
1367 &[],
1368 json!(123),
1369 r#"[
1370 // Comment 1
1371 // Comment 2
1372 // Comment 3
1373 123
1374 ]"#
1375 .unindent(),
1376 );
1377 }
1378
1379 #[test]
1380 fn array_append() {
1381 #[track_caller]
1382 fn check_array_append(input: impl ToString, value: Value, expected: impl ToString) {
1383 let input = input.to_string();
1384 let result = append_top_level_array_value_in_json_text(&input, &value, 4)
1385 .expect("append succeeded");
1386 let mut result_str = input;
1387 result_str.replace_range(result.0, &result.1);
1388 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1389 }
1390 check_array_append(r#"[1, 3, 3]"#, json!(4), r#"[1, 3, 3, 4]"#);
1391 check_array_append(r#"[1, 3, 3,]"#, json!(4), r#"[1, 3, 3, 4]"#);
1392 check_array_append(r#"[1, 3, 3 ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1393 check_array_append(r#"[1, 3, 3, ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1394 check_array_append(
1395 r#"[
1396 1,
1397 2,
1398 3
1399 ]"#
1400 .unindent(),
1401 json!(4),
1402 r#"[
1403 1,
1404 2,
1405 3,
1406 4
1407 ]"#
1408 .unindent(),
1409 );
1410 check_array_append(
1411 r#"[
1412 1,
1413 2,
1414 3,
1415 ]"#
1416 .unindent(),
1417 json!(4),
1418 r#"[
1419 1,
1420 2,
1421 3,
1422 4
1423 ]"#
1424 .unindent(),
1425 );
1426 check_array_append(
1427 r#"[
1428 1,
1429 2,
1430 3,
1431 ]"#
1432 .unindent(),
1433 json!({"foo": "bar", "baz": "qux"}),
1434 r#"[
1435 1,
1436 2,
1437 3,
1438 {
1439 "foo": "bar",
1440 "baz": "qux"
1441 }
1442 ]"#
1443 .unindent(),
1444 );
1445 check_array_append(
1446 r#"[ 1, 2, 3, ]"#.unindent(),
1447 json!({"foo": "bar", "baz": "qux"}),
1448 r#"[ 1, 2, 3, { "foo": "bar", "baz": "qux" }]"#.unindent(),
1449 );
1450 check_array_append(
1451 r#"[]"#,
1452 json!({"foo": "bar"}),
1453 r#"[
1454 {
1455 "foo": "bar"
1456 }
1457 ]"#
1458 .unindent(),
1459 );
1460
1461 // Test with comments between array elements
1462 check_array_append(
1463 r#"[
1464 1,
1465 // Comment between elements
1466 2,
1467 /* Block comment */ 3
1468 ]"#
1469 .unindent(),
1470 json!(4),
1471 r#"[
1472 1,
1473 // Comment between elements
1474 2,
1475 /* Block comment */ 3,
1476 4
1477 ]"#
1478 .unindent(),
1479 );
1480
1481 // Test with trailing comment on last element
1482 check_array_append(
1483 r#"[
1484 1,
1485 2,
1486 3 // Trailing comment
1487 ]"#
1488 .unindent(),
1489 json!("new"),
1490 r#"[
1491 1,
1492 2,
1493 3 // Trailing comment
1494 ,
1495 "new"
1496 ]"#
1497 .unindent(),
1498 );
1499
1500 // Test empty array with comments
1501 check_array_append(
1502 r#"[
1503 // Empty array with comment
1504 ]"#
1505 .unindent(),
1506 json!("first"),
1507 r#"[
1508 // Empty array with comment
1509 "first"
1510 ]"#
1511 .unindent(),
1512 );
1513
1514 // Test with multiline block comment at end
1515 check_array_append(
1516 r#"[
1517 1,
1518 2
1519 /*
1520 * This is a
1521 * multiline comment
1522 */
1523 ]"#
1524 .unindent(),
1525 json!(3),
1526 r#"[
1527 1,
1528 2
1529 /*
1530 * This is a
1531 * multiline comment
1532 */
1533 ,
1534 3
1535 ]"#
1536 .unindent(),
1537 );
1538
1539 // Test with deep indentation
1540 check_array_append(
1541 r#"[
1542 1,
1543 2,
1544 3
1545 ]"#
1546 .unindent(),
1547 json!("deep"),
1548 r#"[
1549 1,
1550 2,
1551 3,
1552 "deep"
1553 ]"#
1554 .unindent(),
1555 );
1556
1557 // Test with no spacing
1558 check_array_append(r#"[1,2,3]"#, json!(4), r#"[1,2,3, 4]"#);
1559
1560 // Test appending complex nested structure
1561 check_array_append(
1562 r#"[
1563 {"a": 1},
1564 {"b": 2}
1565 ]"#
1566 .unindent(),
1567 json!({"c": {"nested": [1, 2, 3]}}),
1568 r#"[
1569 {"a": 1},
1570 {"b": 2},
1571 {
1572 "c": {
1573 "nested": [
1574 1,
1575 2,
1576 3
1577 ]
1578 }
1579 }
1580 ]"#
1581 .unindent(),
1582 );
1583
1584 // Test array ending with comment after bracket
1585 check_array_append(
1586 r#"[
1587 1,
1588 2,
1589 3
1590 ] // Comment after array"#
1591 .unindent(),
1592 json!(4),
1593 r#"[
1594 1,
1595 2,
1596 3,
1597 4
1598 ] // Comment after array"#
1599 .unindent(),
1600 );
1601
1602 // Test with inconsistent element formatting
1603 check_array_append(
1604 r#"[1,
1605 2,
1606 3,
1607 ]"#
1608 .unindent(),
1609 json!(4),
1610 r#"[1,
1611 2,
1612 3,
1613 4
1614 ]"#
1615 .unindent(),
1616 );
1617
1618 // Test appending to single-line array with trailing comma
1619 check_array_append(
1620 r#"[1, 2, 3,]"#,
1621 json!({"key": "value"}),
1622 r#"[1, 2, 3, { "key": "value" }]"#,
1623 );
1624
1625 // Test appending null value
1626 check_array_append(r#"[true, false]"#, json!(null), r#"[true, false, null]"#);
1627
1628 // Test appending to array with only comments
1629 check_array_append(
1630 r#"[
1631 // Just comments here
1632 // More comments
1633 ]"#
1634 .unindent(),
1635 json!(42),
1636 r#"[
1637 // Just comments here
1638 // More comments
1639 42
1640 ]"#
1641 .unindent(),
1642 );
1643 }
1644}