1use std::{ops::Range, sync::LazyLock};
2
3use anyhow::Result;
4use schemars::schema::{
5 ArrayValidation, InstanceType, RootSchema, Schema, SchemaObject, SingleOrVec,
6};
7use serde::{Serialize, de::DeserializeOwned};
8use serde_json::Value;
9use tree_sitter::{Query, StreamingIterator as _};
10use util::RangeExt;
11
12pub struct SettingsJsonSchemaParams<'a> {
13 pub language_names: &'a [String],
14 pub font_names: &'a [String],
15}
16
17impl SettingsJsonSchemaParams<'_> {
18 pub fn font_family_schema(&self) -> Schema {
19 let available_fonts: Vec<_> = self.font_names.iter().cloned().map(Value::String).collect();
20
21 SchemaObject {
22 instance_type: Some(InstanceType::String.into()),
23 enum_values: Some(available_fonts),
24 ..Default::default()
25 }
26 .into()
27 }
28
29 pub fn font_fallback_schema(&self) -> Schema {
30 SchemaObject {
31 instance_type: Some(SingleOrVec::Vec(vec![
32 InstanceType::Array,
33 InstanceType::Null,
34 ])),
35 array: Some(Box::new(ArrayValidation {
36 items: Some(schemars::schema::SingleOrVec::Single(Box::new(
37 self.font_family_schema(),
38 ))),
39 unique_items: Some(true),
40 ..Default::default()
41 })),
42 ..Default::default()
43 }
44 .into()
45 }
46}
47
48type PropertyName<'a> = &'a str;
49type ReferencePath<'a> = &'a str;
50
51/// Modifies the provided [`RootSchema`] by adding references to all of the specified properties.
52///
53/// # Examples
54///
55/// ```
56/// # let root_schema = RootSchema::default();
57/// add_references_to_properties(&mut root_schema, &[
58/// ("property_a", "#/definitions/DefinitionA"),
59/// ("property_b", "#/definitions/DefinitionB"),
60/// ])
61/// ```
62pub fn add_references_to_properties(
63 root_schema: &mut RootSchema,
64 properties_with_references: &[(PropertyName, ReferencePath)],
65) {
66 for (property, definition) in properties_with_references {
67 let Some(schema) = root_schema.schema.object().properties.get_mut(*property) else {
68 log::warn!("property '{property}' not found in JSON schema");
69 continue;
70 };
71
72 match schema {
73 Schema::Object(schema) => {
74 schema.reference = Some(definition.to_string());
75 }
76 Schema::Bool(_) => {
77 // Boolean schemas can't have references.
78 }
79 }
80 }
81}
82
83pub fn update_value_in_json_text<'a>(
84 text: &mut String,
85 key_path: &mut Vec<&'a str>,
86 tab_size: usize,
87 old_value: &'a Value,
88 new_value: &'a Value,
89 preserved_keys: &[&str],
90 edits: &mut Vec<(Range<usize>, String)>,
91) {
92 // If the old and new values are both objects, then compare them key by key,
93 // preserving the comments and formatting of the unchanged parts. Otherwise,
94 // replace the old value with the new value.
95 if let (Value::Object(old_object), Value::Object(new_object)) = (old_value, new_value) {
96 for (key, old_sub_value) in old_object.iter() {
97 key_path.push(key);
98 if let Some(new_sub_value) = new_object.get(key) {
99 // Key exists in both old and new, recursively update
100 update_value_in_json_text(
101 text,
102 key_path,
103 tab_size,
104 old_sub_value,
105 new_sub_value,
106 preserved_keys,
107 edits,
108 );
109 } else {
110 // Key was removed from new object, remove the entire key-value pair
111 let (range, replacement) =
112 replace_value_in_json_text(text, key_path, 0, None, None);
113 text.replace_range(range.clone(), &replacement);
114 edits.push((range, replacement));
115 }
116 key_path.pop();
117 }
118 for (key, new_sub_value) in new_object.iter() {
119 key_path.push(key);
120 if !old_object.contains_key(key) {
121 update_value_in_json_text(
122 text,
123 key_path,
124 tab_size,
125 &Value::Null,
126 new_sub_value,
127 preserved_keys,
128 edits,
129 );
130 }
131 key_path.pop();
132 }
133 } else if key_path
134 .last()
135 .map_or(false, |key| preserved_keys.contains(key))
136 || old_value != new_value
137 {
138 let mut new_value = new_value.clone();
139 if let Some(new_object) = new_value.as_object_mut() {
140 new_object.retain(|_, v| !v.is_null());
141 }
142 let (range, replacement) =
143 replace_value_in_json_text(text, key_path, tab_size, Some(&new_value), None);
144 text.replace_range(range.clone(), &replacement);
145 edits.push((range, replacement));
146 }
147}
148
149/// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`.
150fn replace_value_in_json_text(
151 text: &str,
152 key_path: &[&str],
153 tab_size: usize,
154 new_value: Option<&Value>,
155 replace_key: Option<&str>,
156) -> (Range<usize>, String) {
157 static PAIR_QUERY: LazyLock<Query> = LazyLock::new(|| {
158 Query::new(
159 &tree_sitter_json::LANGUAGE.into(),
160 "(pair key: (string) @key value: (_) @value)",
161 )
162 .expect("Failed to create PAIR_QUERY")
163 });
164
165 let mut parser = tree_sitter::Parser::new();
166 parser
167 .set_language(&tree_sitter_json::LANGUAGE.into())
168 .unwrap();
169 let syntax_tree = parser.parse(text, None).unwrap();
170
171 let mut cursor = tree_sitter::QueryCursor::new();
172
173 let mut depth = 0;
174 let mut last_value_range = 0..0;
175 let mut first_key_start = None;
176 let mut existing_value_range = 0..text.len();
177
178 let mut matches = cursor.matches(&PAIR_QUERY, syntax_tree.root_node(), text.as_bytes());
179 while let Some(mat) = matches.next() {
180 if mat.captures.len() != 2 {
181 continue;
182 }
183
184 let key_range = mat.captures[0].node.byte_range();
185 let value_range = mat.captures[1].node.byte_range();
186
187 // Don't enter sub objects until we find an exact
188 // match for the current keypath
189 if last_value_range.contains_inclusive(&value_range) {
190 continue;
191 }
192
193 last_value_range = value_range.clone();
194
195 if key_range.start > existing_value_range.end {
196 break;
197 }
198
199 first_key_start.get_or_insert(key_range.start);
200
201 let found_key = text
202 .get(key_range.clone())
203 .map(|key_text| {
204 depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth])
205 })
206 .unwrap_or(false);
207
208 if found_key {
209 existing_value_range = value_range;
210 // Reset last value range when increasing in depth
211 last_value_range = existing_value_range.start..existing_value_range.start;
212 depth += 1;
213
214 if depth == key_path.len() {
215 break;
216 }
217
218 first_key_start = None;
219 }
220 }
221
222 // We found the exact key we want
223 if depth == key_path.len() {
224 if let Some(new_value) = new_value {
225 let new_val = to_pretty_json(new_value, tab_size, tab_size * depth);
226 if let Some(replace_key) = replace_key {
227 let new_key = format!("\"{}\": ", replace_key);
228 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
229 if let Some(prev_key_start) = text[..key_start].rfind('"') {
230 existing_value_range.start = prev_key_start;
231 } else {
232 existing_value_range.start = key_start;
233 }
234 }
235 (existing_value_range, new_key + &new_val)
236 } else {
237 (existing_value_range, new_val)
238 }
239 } else {
240 let mut removal_start = first_key_start.unwrap_or(existing_value_range.start);
241 let mut removal_end = existing_value_range.end;
242
243 // Find the actual key position by looking for the key in the pair
244 // We need to extend the range to include the key, not just the value
245 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
246 if let Some(prev_key_start) = text[..key_start].rfind('"') {
247 removal_start = prev_key_start;
248 } else {
249 removal_start = key_start;
250 }
251 }
252
253 // Look backward for a preceding comma first
254 let preceding_text = text.get(0..removal_start).unwrap_or("");
255 if let Some(comma_pos) = preceding_text.rfind(',') {
256 // Check if there are only whitespace characters between the comma and our key
257 let between_comma_and_key = text.get(comma_pos + 1..removal_start).unwrap_or("");
258 if between_comma_and_key.trim().is_empty() {
259 removal_start = comma_pos;
260 }
261 }
262
263 if let Some(remaining_text) = text.get(existing_value_range.end..) {
264 let mut chars = remaining_text.char_indices();
265 while let Some((offset, ch)) = chars.next() {
266 if ch == ',' {
267 removal_end = existing_value_range.end + offset + 1;
268 // Also consume whitespace after the comma
269 while let Some((_, next_ch)) = chars.next() {
270 if next_ch.is_whitespace() {
271 removal_end += next_ch.len_utf8();
272 } else {
273 break;
274 }
275 }
276 break;
277 } else if !ch.is_whitespace() {
278 break;
279 }
280 }
281 }
282 (removal_start..removal_end, String::new())
283 }
284 } else {
285 // We have key paths, construct the sub objects
286 let new_key = key_path[depth];
287
288 // We don't have the key, construct the nested objects
289 let mut new_value =
290 serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap();
291 for key in key_path[(depth + 1)..].iter().rev() {
292 new_value = serde_json::json!({ key.to_string(): new_value });
293 }
294
295 if let Some(first_key_start) = first_key_start {
296 let mut row = 0;
297 let mut column = 0;
298 for (ix, char) in text.char_indices() {
299 if ix == first_key_start {
300 break;
301 }
302 if char == '\n' {
303 row += 1;
304 column = 0;
305 } else {
306 column += char.len_utf8();
307 }
308 }
309
310 if row > 0 {
311 // depth is 0 based, but division needs to be 1 based.
312 let new_val = to_pretty_json(&new_value, column / (depth + 1), column);
313 let space = ' ';
314 let content = format!("\"{new_key}\": {new_val},\n{space:width$}", width = column);
315 (first_key_start..first_key_start, content)
316 } else {
317 let new_val = serde_json::to_string(&new_value).unwrap();
318 let mut content = format!(r#""{new_key}": {new_val},"#);
319 content.push(' ');
320 (first_key_start..first_key_start, content)
321 }
322 } else {
323 new_value = serde_json::json!({ new_key.to_string(): new_value });
324 let indent_prefix_len = 4 * depth;
325 let mut new_val = to_pretty_json(&new_value, 4, indent_prefix_len);
326 if depth == 0 {
327 new_val.push('\n');
328 }
329 // best effort to keep comments with best effort indentation
330 let mut replace_text = &text[existing_value_range.clone()];
331 while let Some(comment_start) = replace_text.rfind("//") {
332 if let Some(comment_end) = replace_text[comment_start..].find('\n') {
333 let mut comment_with_indent_start = replace_text[..comment_start]
334 .rfind('\n')
335 .unwrap_or(comment_start);
336 if !replace_text[comment_with_indent_start..comment_start]
337 .trim()
338 .is_empty()
339 {
340 comment_with_indent_start = comment_start;
341 }
342 new_val.insert_str(
343 1,
344 &replace_text[comment_with_indent_start..comment_start + comment_end],
345 );
346 }
347 replace_text = &replace_text[..comment_start];
348 }
349
350 (existing_value_range, new_val)
351 }
352 }
353}
354
355const TS_DOCUMENT_KIND: &'static str = "document";
356const TS_ARRAY_KIND: &'static str = "array";
357const TS_COMMENT_KIND: &'static str = "comment";
358
359pub fn replace_top_level_array_value_in_json_text(
360 text: &str,
361 key_path: &[&str],
362 new_value: Option<&Value>,
363 replace_key: Option<&str>,
364 array_index: usize,
365 tab_size: usize,
366) -> Result<(Range<usize>, String)> {
367 let mut parser = tree_sitter::Parser::new();
368 parser
369 .set_language(&tree_sitter_json::LANGUAGE.into())
370 .unwrap();
371 let syntax_tree = parser.parse(text, None).unwrap();
372
373 let mut cursor = syntax_tree.walk();
374
375 if cursor.node().kind() == TS_DOCUMENT_KIND {
376 anyhow::ensure!(
377 cursor.goto_first_child(),
378 "Document empty - No top level array"
379 );
380 }
381
382 while cursor.node().kind() != TS_ARRAY_KIND {
383 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
384 }
385
386 // false if no children
387 //
388 cursor.goto_first_child();
389 debug_assert_eq!(cursor.node().kind(), "[");
390
391 let mut index = 0;
392
393 while index <= array_index {
394 let node = cursor.node();
395 if !matches!(node.kind(), "[" | "]" | TS_COMMENT_KIND | ",")
396 && !node.is_extra()
397 && !node.is_missing()
398 {
399 if index == array_index {
400 break;
401 }
402 index += 1;
403 }
404 if !cursor.goto_next_sibling() {
405 if let Some(new_value) = new_value {
406 return append_top_level_array_value_in_json_text(text, new_value, tab_size);
407 } else {
408 return Ok((0..0, String::new()));
409 }
410 }
411 }
412
413 let range = cursor.node().range();
414 let indent_width = range.start_point.column;
415 let offset = range.start_byte;
416 let value_str = &text[range.start_byte..range.end_byte];
417 let needs_indent = range.start_point.row > 0;
418
419 let (mut replace_range, mut replace_value) =
420 replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
421
422 replace_range.start += offset;
423 replace_range.end += offset;
424
425 if needs_indent {
426 let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
427 replace_value = replace_value.replace('\n', &increased_indent);
428 // replace_value.push('\n');
429 } else {
430 while let Some(idx) = replace_value.find("\n ") {
431 replace_value.remove(idx + 1);
432 }
433 while let Some(idx) = replace_value.find("\n") {
434 replace_value.replace_range(idx..idx + 1, " ");
435 }
436 }
437
438 return Ok((replace_range, replace_value));
439}
440
441pub fn append_top_level_array_value_in_json_text(
442 text: &str,
443 new_value: &Value,
444 tab_size: usize,
445) -> Result<(Range<usize>, String)> {
446 let mut parser = tree_sitter::Parser::new();
447 parser
448 .set_language(&tree_sitter_json::LANGUAGE.into())
449 .unwrap();
450 let syntax_tree = parser.parse(text, None).unwrap();
451
452 let mut cursor = syntax_tree.walk();
453
454 if cursor.node().kind() == TS_DOCUMENT_KIND {
455 anyhow::ensure!(
456 cursor.goto_first_child(),
457 "Document empty - No top level array"
458 );
459 }
460
461 while cursor.node().kind() != TS_ARRAY_KIND {
462 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
463 }
464
465 anyhow::ensure!(
466 cursor.goto_last_child(),
467 "Malformed JSON syntax tree, expected `]` at end of array"
468 );
469 debug_assert_eq!(cursor.node().kind(), "]");
470 let close_bracket_start = cursor.node().start_byte();
471 cursor.goto_previous_sibling();
472 while (cursor.node().is_extra() || cursor.node().is_missing()) && cursor.goto_previous_sibling()
473 {
474 }
475
476 let mut comma_range = None;
477 let mut prev_item_range = None;
478
479 if cursor.node().kind() == "," {
480 comma_range = Some(cursor.node().byte_range());
481 while cursor.goto_previous_sibling() && cursor.node().is_extra() {}
482
483 debug_assert_ne!(cursor.node().kind(), "[");
484 prev_item_range = Some(cursor.node().range());
485 } else {
486 while (cursor.node().is_extra() || cursor.node().is_missing())
487 && cursor.goto_previous_sibling()
488 {}
489 if cursor.node().kind() != "[" {
490 prev_item_range = Some(cursor.node().range());
491 }
492 }
493
494 let (mut replace_range, mut replace_value) =
495 replace_value_in_json_text("", &[], tab_size, Some(new_value), None);
496
497 replace_range.start = close_bracket_start;
498 replace_range.end = close_bracket_start;
499
500 let space = ' ';
501 if let Some(prev_item_range) = prev_item_range {
502 let needs_newline = prev_item_range.start_point.row > 0;
503 let indent_width = text[..prev_item_range.start_byte].rfind('\n').map_or(
504 prev_item_range.start_point.column,
505 |idx| {
506 prev_item_range.start_point.column
507 - text[idx + 1..prev_item_range.start_byte].trim_start().len()
508 },
509 );
510
511 let prev_item_end = comma_range
512 .as_ref()
513 .map_or(prev_item_range.end_byte, |range| range.end);
514 if text[prev_item_end..replace_range.start].trim().is_empty() {
515 replace_range.start = prev_item_end;
516 }
517
518 if needs_newline {
519 let increased_indent = format!("\n{space:width$}", width = indent_width);
520 replace_value = replace_value.replace('\n', &increased_indent);
521 replace_value.push('\n');
522 replace_value.insert_str(0, &format!("\n{space:width$}", width = indent_width));
523 } else {
524 while let Some(idx) = replace_value.find("\n ") {
525 replace_value.remove(idx + 1);
526 }
527 while let Some(idx) = replace_value.find('\n') {
528 replace_value.replace_range(idx..idx + 1, " ");
529 }
530 replace_value.insert(0, ' ');
531 }
532
533 if comma_range.is_none() {
534 replace_value.insert(0, ',');
535 }
536 } else {
537 if let Some(prev_newline) = text[..replace_range.start].rfind('\n') {
538 if text[prev_newline..replace_range.start].trim().is_empty() {
539 replace_range.start = prev_newline;
540 }
541 }
542 let indent = format!("\n{space:width$}", width = tab_size);
543 replace_value = replace_value.replace('\n', &indent);
544 replace_value.insert_str(0, &indent);
545 replace_value.push('\n');
546 }
547 return Ok((replace_range, replace_value));
548}
549
550pub fn to_pretty_json(
551 value: &impl Serialize,
552 indent_size: usize,
553 indent_prefix_len: usize,
554) -> String {
555 const SPACES: [u8; 32] = [b' '; 32];
556
557 debug_assert!(indent_size <= SPACES.len());
558 debug_assert!(indent_prefix_len <= SPACES.len());
559
560 let mut output = Vec::new();
561 let mut ser = serde_json::Serializer::with_formatter(
562 &mut output,
563 serde_json::ser::PrettyFormatter::with_indent(&SPACES[0..indent_size.min(SPACES.len())]),
564 );
565
566 value.serialize(&mut ser).unwrap();
567 let text = String::from_utf8(output).unwrap();
568
569 let mut adjusted_text = String::new();
570 for (i, line) in text.split('\n').enumerate() {
571 if i > 0 {
572 adjusted_text.push_str(str::from_utf8(&SPACES[0..indent_prefix_len]).unwrap());
573 }
574 adjusted_text.push_str(line);
575 adjusted_text.push('\n');
576 }
577 adjusted_text.pop();
578 adjusted_text
579}
580
581pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
582 Ok(serde_json_lenient::from_str(content)?)
583}
584
585#[cfg(test)]
586mod tests {
587 use super::*;
588 use serde_json::{Value, json};
589 use unindent::Unindent;
590
591 #[test]
592 fn object_replace() {
593 #[track_caller]
594 fn check_object_replace(
595 input: String,
596 key_path: &[&str],
597 value: Option<Value>,
598 expected: String,
599 ) {
600 let result = replace_value_in_json_text(&input, key_path, 4, value.as_ref(), None);
601 let mut result_str = input.to_string();
602 result_str.replace_range(result.0, &result.1);
603 pretty_assertions::assert_eq!(expected, result_str);
604 }
605 check_object_replace(
606 r#"{
607 "a": 1,
608 "b": 2
609 }"#
610 .unindent(),
611 &["b"],
612 Some(json!(3)),
613 r#"{
614 "a": 1,
615 "b": 3
616 }"#
617 .unindent(),
618 );
619 check_object_replace(
620 r#"{
621 "a": 1,
622 "b": 2
623 }"#
624 .unindent(),
625 &["b"],
626 None,
627 r#"{
628 "a": 1
629 }"#
630 .unindent(),
631 );
632 check_object_replace(
633 r#"{
634 "a": 1,
635 "b": 2
636 }"#
637 .unindent(),
638 &["c"],
639 Some(json!(3)),
640 r#"{
641 "c": 3,
642 "a": 1,
643 "b": 2
644 }"#
645 .unindent(),
646 );
647 check_object_replace(
648 r#"{
649 "a": 1,
650 "b": {
651 "c": 2,
652 "d": 3,
653 }
654 }"#
655 .unindent(),
656 &["b", "c"],
657 Some(json!([1, 2, 3])),
658 r#"{
659 "a": 1,
660 "b": {
661 "c": [
662 1,
663 2,
664 3
665 ],
666 "d": 3,
667 }
668 }"#
669 .unindent(),
670 );
671
672 check_object_replace(
673 r#"{
674 "name": "old_name",
675 "id": 123
676 }"#
677 .unindent(),
678 &["name"],
679 Some(json!("new_name")),
680 r#"{
681 "name": "new_name",
682 "id": 123
683 }"#
684 .unindent(),
685 );
686
687 check_object_replace(
688 r#"{
689 "enabled": false,
690 "count": 5
691 }"#
692 .unindent(),
693 &["enabled"],
694 Some(json!(true)),
695 r#"{
696 "enabled": true,
697 "count": 5
698 }"#
699 .unindent(),
700 );
701
702 check_object_replace(
703 r#"{
704 "value": null,
705 "other": "test"
706 }"#
707 .unindent(),
708 &["value"],
709 Some(json!(42)),
710 r#"{
711 "value": 42,
712 "other": "test"
713 }"#
714 .unindent(),
715 );
716
717 check_object_replace(
718 r#"{
719 "config": {
720 "old": true
721 },
722 "name": "test"
723 }"#
724 .unindent(),
725 &["config"],
726 Some(json!({"new": false, "count": 3})),
727 r#"{
728 "config": {
729 "new": false,
730 "count": 3
731 },
732 "name": "test"
733 }"#
734 .unindent(),
735 );
736
737 check_object_replace(
738 r#"{
739 // This is a comment
740 "a": 1,
741 "b": 2 // Another comment
742 }"#
743 .unindent(),
744 &["b"],
745 Some(json!({"foo": "bar"})),
746 r#"{
747 // This is a comment
748 "a": 1,
749 "b": {
750 "foo": "bar"
751 } // Another comment
752 }"#
753 .unindent(),
754 );
755
756 check_object_replace(
757 r#"{}"#.to_string(),
758 &["new_key"],
759 Some(json!("value")),
760 r#"{
761 "new_key": "value"
762 }
763 "#
764 .unindent(),
765 );
766
767 check_object_replace(
768 r#"{
769 "only_key": 123
770 }"#
771 .unindent(),
772 &["only_key"],
773 None,
774 "{\n \n}".to_string(),
775 );
776
777 check_object_replace(
778 r#"{
779 "level1": {
780 "level2": {
781 "level3": {
782 "target": "old"
783 }
784 }
785 }
786 }"#
787 .unindent(),
788 &["level1", "level2", "level3", "target"],
789 Some(json!("new")),
790 r#"{
791 "level1": {
792 "level2": {
793 "level3": {
794 "target": "new"
795 }
796 }
797 }
798 }"#
799 .unindent(),
800 );
801
802 check_object_replace(
803 r#"{
804 "parent": {}
805 }"#
806 .unindent(),
807 &["parent", "child"],
808 Some(json!("value")),
809 r#"{
810 "parent": {
811 "child": "value"
812 }
813 }"#
814 .unindent(),
815 );
816
817 check_object_replace(
818 r#"{
819 "a": 1,
820 "b": 2,
821 }"#
822 .unindent(),
823 &["b"],
824 Some(json!(3)),
825 r#"{
826 "a": 1,
827 "b": 3,
828 }"#
829 .unindent(),
830 );
831
832 check_object_replace(
833 r#"{
834 "items": [1, 2, 3],
835 "count": 3
836 }"#
837 .unindent(),
838 &["items", "1"],
839 Some(json!(5)),
840 r#"{
841 "items": {
842 "1": 5
843 },
844 "count": 3
845 }"#
846 .unindent(),
847 );
848
849 check_object_replace(
850 r#"{
851 "items": [1, 2, 3],
852 "count": 3
853 }"#
854 .unindent(),
855 &["items", "1"],
856 None,
857 r#"{
858 "items": {
859 "1": null
860 },
861 "count": 3
862 }"#
863 .unindent(),
864 );
865
866 check_object_replace(
867 r#"{
868 "items": [1, 2, 3],
869 "count": 3
870 }"#
871 .unindent(),
872 &["items"],
873 Some(json!(["a", "b", "c", "d"])),
874 r#"{
875 "items": [
876 "a",
877 "b",
878 "c",
879 "d"
880 ],
881 "count": 3
882 }"#
883 .unindent(),
884 );
885
886 check_object_replace(
887 r#"{
888 "0": "zero",
889 "1": "one"
890 }"#
891 .unindent(),
892 &["1"],
893 Some(json!("ONE")),
894 r#"{
895 "0": "zero",
896 "1": "ONE"
897 }"#
898 .unindent(),
899 );
900 // Test with comments between object members
901 check_object_replace(
902 r#"{
903 "a": 1,
904 // Comment between members
905 "b": 2,
906 /* Block comment */
907 "c": 3
908 }"#
909 .unindent(),
910 &["b"],
911 Some(json!({"nested": true})),
912 r#"{
913 "a": 1,
914 // Comment between members
915 "b": {
916 "nested": true
917 },
918 /* Block comment */
919 "c": 3
920 }"#
921 .unindent(),
922 );
923
924 // Test with trailing comments on replaced value
925 check_object_replace(
926 r#"{
927 "a": 1, // keep this comment
928 "b": 2 // this should stay
929 }"#
930 .unindent(),
931 &["a"],
932 Some(json!("changed")),
933 r#"{
934 "a": "changed", // keep this comment
935 "b": 2 // this should stay
936 }"#
937 .unindent(),
938 );
939
940 // Test with deep indentation
941 check_object_replace(
942 r#"{
943 "deeply": {
944 "nested": {
945 "value": "old"
946 }
947 }
948 }"#
949 .unindent(),
950 &["deeply", "nested", "value"],
951 Some(json!("new")),
952 r#"{
953 "deeply": {
954 "nested": {
955 "value": "new"
956 }
957 }
958 }"#
959 .unindent(),
960 );
961
962 // Test removing value with comment preservation
963 check_object_replace(
964 r#"{
965 // Header comment
966 "a": 1,
967 // This comment belongs to b
968 "b": 2,
969 // This comment belongs to c
970 "c": 3
971 }"#
972 .unindent(),
973 &["b"],
974 None,
975 r#"{
976 // Header comment
977 "a": 1,
978 // This comment belongs to b
979 // This comment belongs to c
980 "c": 3
981 }"#
982 .unindent(),
983 );
984
985 // Test with multiline block comments
986 check_object_replace(
987 r#"{
988 /*
989 * This is a multiline
990 * block comment
991 */
992 "value": "old",
993 /* Another block */ "other": 123
994 }"#
995 .unindent(),
996 &["value"],
997 Some(json!("new")),
998 r#"{
999 /*
1000 * This is a multiline
1001 * block comment
1002 */
1003 "value": "new",
1004 /* Another block */ "other": 123
1005 }"#
1006 .unindent(),
1007 );
1008
1009 check_object_replace(
1010 r#"{
1011 // This object is empty
1012 }"#
1013 .unindent(),
1014 &["key"],
1015 Some(json!("value")),
1016 r#"{
1017 // This object is empty
1018 "key": "value"
1019 }
1020 "#
1021 .unindent(),
1022 );
1023
1024 // Test replacing in object with only comments
1025 check_object_replace(
1026 r#"{
1027 // Comment 1
1028 // Comment 2
1029 }"#
1030 .unindent(),
1031 &["new"],
1032 Some(json!(42)),
1033 r#"{
1034 // Comment 1
1035 // Comment 2
1036 "new": 42
1037 }
1038 "#
1039 .unindent(),
1040 );
1041
1042 // Test with inconsistent spacing
1043 check_object_replace(
1044 r#"{
1045 "a":1,
1046 "b" : 2 ,
1047 "c": 3
1048 }"#
1049 .unindent(),
1050 &["b"],
1051 Some(json!("spaced")),
1052 r#"{
1053 "a":1,
1054 "b" : "spaced" ,
1055 "c": 3
1056 }"#
1057 .unindent(),
1058 );
1059 }
1060
1061 #[test]
1062 fn array_replace() {
1063 #[track_caller]
1064 fn check_array_replace(
1065 input: impl ToString,
1066 index: usize,
1067 key_path: &[&str],
1068 value: Value,
1069 expected: impl ToString,
1070 ) {
1071 let input = input.to_string();
1072 let result = replace_top_level_array_value_in_json_text(
1073 &input,
1074 key_path,
1075 Some(&value),
1076 None,
1077 index,
1078 4,
1079 )
1080 .expect("replace succeeded");
1081 let mut result_str = input;
1082 result_str.replace_range(result.0, &result.1);
1083 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1084 }
1085
1086 check_array_replace(r#"[1, 3, 3]"#, 1, &[], json!(2), r#"[1, 2, 3]"#);
1087 check_array_replace(r#"[1, 3, 3]"#, 2, &[], json!(2), r#"[1, 3, 2]"#);
1088 check_array_replace(r#"[1, 3, 3,]"#, 3, &[], json!(2), r#"[1, 3, 3, 2]"#);
1089 check_array_replace(r#"[1, 3, 3,]"#, 100, &[], json!(2), r#"[1, 3, 3, 2]"#);
1090 check_array_replace(
1091 r#"[
1092 1,
1093 2,
1094 3,
1095 ]"#
1096 .unindent(),
1097 1,
1098 &[],
1099 json!({"foo": "bar", "baz": "qux"}),
1100 r#"[
1101 1,
1102 {
1103 "foo": "bar",
1104 "baz": "qux"
1105 },
1106 3,
1107 ]"#
1108 .unindent(),
1109 );
1110 check_array_replace(
1111 r#"[1, 3, 3,]"#,
1112 1,
1113 &[],
1114 json!({"foo": "bar", "baz": "qux"}),
1115 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1116 );
1117
1118 check_array_replace(
1119 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1120 1,
1121 &["baz"],
1122 json!({"qux": "quz"}),
1123 r#"[1, { "foo": "bar", "baz": { "qux": "quz" } }, 3,]"#,
1124 );
1125
1126 check_array_replace(
1127 r#"[
1128 1,
1129 {
1130 "foo": "bar",
1131 "baz": "qux"
1132 },
1133 3
1134 ]"#,
1135 1,
1136 &["baz"],
1137 json!({"qux": "quz"}),
1138 r#"[
1139 1,
1140 {
1141 "foo": "bar",
1142 "baz": {
1143 "qux": "quz"
1144 }
1145 },
1146 3
1147 ]"#,
1148 );
1149
1150 check_array_replace(
1151 r#"[
1152 1,
1153 {
1154 "foo": "bar",
1155 "baz": {
1156 "qux": "quz"
1157 }
1158 },
1159 3
1160 ]"#,
1161 1,
1162 &["baz"],
1163 json!("qux"),
1164 r#"[
1165 1,
1166 {
1167 "foo": "bar",
1168 "baz": "qux"
1169 },
1170 3
1171 ]"#,
1172 );
1173
1174 check_array_replace(
1175 r#"[
1176 1,
1177 {
1178 "foo": "bar",
1179 // some comment to keep
1180 "baz": {
1181 // some comment to remove
1182 "qux": "quz"
1183 }
1184 // some other comment to keep
1185 },
1186 3
1187 ]"#,
1188 1,
1189 &["baz"],
1190 json!("qux"),
1191 r#"[
1192 1,
1193 {
1194 "foo": "bar",
1195 // some comment to keep
1196 "baz": "qux"
1197 // some other comment to keep
1198 },
1199 3
1200 ]"#,
1201 );
1202
1203 // Test with comments between array elements
1204 check_array_replace(
1205 r#"[
1206 1,
1207 // This is element 2
1208 2,
1209 /* Block comment */ 3,
1210 4 // Trailing comment
1211 ]"#,
1212 2,
1213 &[],
1214 json!("replaced"),
1215 r#"[
1216 1,
1217 // This is element 2
1218 2,
1219 /* Block comment */ "replaced",
1220 4 // Trailing comment
1221 ]"#,
1222 );
1223
1224 // Test empty array with comments
1225 check_array_replace(
1226 r#"[
1227 // Empty array with comment
1228 ]"#
1229 .unindent(),
1230 0,
1231 &[],
1232 json!("first"),
1233 r#"[
1234 // Empty array with comment
1235 "first"
1236 ]"#
1237 .unindent(),
1238 );
1239 check_array_replace(
1240 r#"[]"#.unindent(),
1241 0,
1242 &[],
1243 json!("first"),
1244 r#"[
1245 "first"
1246 ]"#
1247 .unindent(),
1248 );
1249
1250 // Test array with leading comments
1251 check_array_replace(
1252 r#"[
1253 // Leading comment
1254 // Another leading comment
1255 1,
1256 2
1257 ]"#,
1258 0,
1259 &[],
1260 json!({"new": "object"}),
1261 r#"[
1262 // Leading comment
1263 // Another leading comment
1264 {
1265 "new": "object"
1266 },
1267 2
1268 ]"#,
1269 );
1270
1271 // Test with deep indentation
1272 check_array_replace(
1273 r#"[
1274 1,
1275 2,
1276 3
1277 ]"#,
1278 1,
1279 &[],
1280 json!("deep"),
1281 r#"[
1282 1,
1283 "deep",
1284 3
1285 ]"#,
1286 );
1287
1288 // Test with mixed spacing
1289 check_array_replace(
1290 r#"[1,2, 3, 4]"#,
1291 2,
1292 &[],
1293 json!("spaced"),
1294 r#"[1,2, "spaced", 4]"#,
1295 );
1296
1297 // Test replacing nested array element
1298 check_array_replace(
1299 r#"[
1300 [1, 2, 3],
1301 [4, 5, 6],
1302 [7, 8, 9]
1303 ]"#,
1304 1,
1305 &[],
1306 json!(["a", "b", "c", "d"]),
1307 r#"[
1308 [1, 2, 3],
1309 [
1310 "a",
1311 "b",
1312 "c",
1313 "d"
1314 ],
1315 [7, 8, 9]
1316 ]"#,
1317 );
1318
1319 // Test with multiline block comments
1320 check_array_replace(
1321 r#"[
1322 /*
1323 * This is a
1324 * multiline comment
1325 */
1326 "first",
1327 "second"
1328 ]"#,
1329 0,
1330 &[],
1331 json!("updated"),
1332 r#"[
1333 /*
1334 * This is a
1335 * multiline comment
1336 */
1337 "updated",
1338 "second"
1339 ]"#,
1340 );
1341
1342 // Test replacing with null
1343 check_array_replace(
1344 r#"[true, false, true]"#,
1345 1,
1346 &[],
1347 json!(null),
1348 r#"[true, null, true]"#,
1349 );
1350
1351 // Test single element array
1352 check_array_replace(
1353 r#"[42]"#,
1354 0,
1355 &[],
1356 json!({"answer": 42}),
1357 r#"[{ "answer": 42 }]"#,
1358 );
1359
1360 // Test array with only comments
1361 check_array_replace(
1362 r#"[
1363 // Comment 1
1364 // Comment 2
1365 // Comment 3
1366 ]"#
1367 .unindent(),
1368 10,
1369 &[],
1370 json!(123),
1371 r#"[
1372 // Comment 1
1373 // Comment 2
1374 // Comment 3
1375 123
1376 ]"#
1377 .unindent(),
1378 );
1379 }
1380
1381 #[test]
1382 fn array_append() {
1383 #[track_caller]
1384 fn check_array_append(input: impl ToString, value: Value, expected: impl ToString) {
1385 let input = input.to_string();
1386 let result = append_top_level_array_value_in_json_text(&input, &value, 4)
1387 .expect("append succeeded");
1388 let mut result_str = input;
1389 result_str.replace_range(result.0, &result.1);
1390 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1391 }
1392 check_array_append(r#"[1, 3, 3]"#, json!(4), r#"[1, 3, 3, 4]"#);
1393 check_array_append(r#"[1, 3, 3,]"#, json!(4), r#"[1, 3, 3, 4]"#);
1394 check_array_append(r#"[1, 3, 3 ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1395 check_array_append(r#"[1, 3, 3, ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1396 check_array_append(
1397 r#"[
1398 1,
1399 2,
1400 3
1401 ]"#
1402 .unindent(),
1403 json!(4),
1404 r#"[
1405 1,
1406 2,
1407 3,
1408 4
1409 ]"#
1410 .unindent(),
1411 );
1412 check_array_append(
1413 r#"[
1414 1,
1415 2,
1416 3,
1417 ]"#
1418 .unindent(),
1419 json!(4),
1420 r#"[
1421 1,
1422 2,
1423 3,
1424 4
1425 ]"#
1426 .unindent(),
1427 );
1428 check_array_append(
1429 r#"[
1430 1,
1431 2,
1432 3,
1433 ]"#
1434 .unindent(),
1435 json!({"foo": "bar", "baz": "qux"}),
1436 r#"[
1437 1,
1438 2,
1439 3,
1440 {
1441 "foo": "bar",
1442 "baz": "qux"
1443 }
1444 ]"#
1445 .unindent(),
1446 );
1447 check_array_append(
1448 r#"[ 1, 2, 3, ]"#.unindent(),
1449 json!({"foo": "bar", "baz": "qux"}),
1450 r#"[ 1, 2, 3, { "foo": "bar", "baz": "qux" }]"#.unindent(),
1451 );
1452 check_array_append(
1453 r#"[]"#,
1454 json!({"foo": "bar"}),
1455 r#"[
1456 {
1457 "foo": "bar"
1458 }
1459 ]"#
1460 .unindent(),
1461 );
1462
1463 // Test with comments between array elements
1464 check_array_append(
1465 r#"[
1466 1,
1467 // Comment between elements
1468 2,
1469 /* Block comment */ 3
1470 ]"#
1471 .unindent(),
1472 json!(4),
1473 r#"[
1474 1,
1475 // Comment between elements
1476 2,
1477 /* Block comment */ 3,
1478 4
1479 ]"#
1480 .unindent(),
1481 );
1482
1483 // Test with trailing comment on last element
1484 check_array_append(
1485 r#"[
1486 1,
1487 2,
1488 3 // Trailing comment
1489 ]"#
1490 .unindent(),
1491 json!("new"),
1492 r#"[
1493 1,
1494 2,
1495 3 // Trailing comment
1496 ,
1497 "new"
1498 ]"#
1499 .unindent(),
1500 );
1501
1502 // Test empty array with comments
1503 check_array_append(
1504 r#"[
1505 // Empty array with comment
1506 ]"#
1507 .unindent(),
1508 json!("first"),
1509 r#"[
1510 // Empty array with comment
1511 "first"
1512 ]"#
1513 .unindent(),
1514 );
1515
1516 // Test with multiline block comment at end
1517 check_array_append(
1518 r#"[
1519 1,
1520 2
1521 /*
1522 * This is a
1523 * multiline comment
1524 */
1525 ]"#
1526 .unindent(),
1527 json!(3),
1528 r#"[
1529 1,
1530 2
1531 /*
1532 * This is a
1533 * multiline comment
1534 */
1535 ,
1536 3
1537 ]"#
1538 .unindent(),
1539 );
1540
1541 // Test with deep indentation
1542 check_array_append(
1543 r#"[
1544 1,
1545 2,
1546 3
1547 ]"#
1548 .unindent(),
1549 json!("deep"),
1550 r#"[
1551 1,
1552 2,
1553 3,
1554 "deep"
1555 ]"#
1556 .unindent(),
1557 );
1558
1559 // Test with no spacing
1560 check_array_append(r#"[1,2,3]"#, json!(4), r#"[1,2,3, 4]"#);
1561
1562 // Test appending complex nested structure
1563 check_array_append(
1564 r#"[
1565 {"a": 1},
1566 {"b": 2}
1567 ]"#
1568 .unindent(),
1569 json!({"c": {"nested": [1, 2, 3]}}),
1570 r#"[
1571 {"a": 1},
1572 {"b": 2},
1573 {
1574 "c": {
1575 "nested": [
1576 1,
1577 2,
1578 3
1579 ]
1580 }
1581 }
1582 ]"#
1583 .unindent(),
1584 );
1585
1586 // Test array ending with comment after bracket
1587 check_array_append(
1588 r#"[
1589 1,
1590 2,
1591 3
1592 ] // Comment after array"#
1593 .unindent(),
1594 json!(4),
1595 r#"[
1596 1,
1597 2,
1598 3,
1599 4
1600 ] // Comment after array"#
1601 .unindent(),
1602 );
1603
1604 // Test with inconsistent element formatting
1605 check_array_append(
1606 r#"[1,
1607 2,
1608 3,
1609 ]"#
1610 .unindent(),
1611 json!(4),
1612 r#"[1,
1613 2,
1614 3,
1615 4
1616 ]"#
1617 .unindent(),
1618 );
1619
1620 // Test appending to single-line array with trailing comma
1621 check_array_append(
1622 r#"[1, 2, 3,]"#,
1623 json!({"key": "value"}),
1624 r#"[1, 2, 3, { "key": "value" }]"#,
1625 );
1626
1627 // Test appending null value
1628 check_array_append(r#"[true, false]"#, json!(null), r#"[true, false, null]"#);
1629
1630 // Test appending to array with only comments
1631 check_array_append(
1632 r#"[
1633 // Just comments here
1634 // More comments
1635 ]"#
1636 .unindent(),
1637 json!(42),
1638 r#"[
1639 // Just comments here
1640 // More comments
1641 42
1642 ]"#
1643 .unindent(),
1644 );
1645 }
1646}