1use anyhow::Result;
2use gpui::App;
3use serde::{Serialize, de::DeserializeOwned};
4use serde_json::Value;
5use std::{ops::Range, sync::LazyLock};
6use tree_sitter::{Query, StreamingIterator as _};
7use util::RangeExt;
8
9/// Parameters that are used when generating some JSON schemas at runtime.
10pub struct SettingsJsonSchemaParams<'a> {
11 pub language_names: &'a [String],
12 pub font_names: &'a [String],
13}
14
15/// Value registered which specifies JSON schemas that are generated at runtime.
16pub struct ParameterizedJsonSchema {
17 pub add_and_get_ref:
18 fn(&mut schemars::SchemaGenerator, &SettingsJsonSchemaParams, &App) -> schemars::Schema,
19}
20
21inventory::collect!(ParameterizedJsonSchema);
22
23pub fn update_value_in_json_text<'a>(
24 text: &mut String,
25 key_path: &mut Vec<&'a str>,
26 tab_size: usize,
27 old_value: &'a Value,
28 new_value: &'a Value,
29 preserved_keys: &[&str],
30 edits: &mut Vec<(Range<usize>, String)>,
31) {
32 // If the old and new values are both objects, then compare them key by key,
33 // preserving the comments and formatting of the unchanged parts. Otherwise,
34 // replace the old value with the new value.
35 if let (Value::Object(old_object), Value::Object(new_object)) = (old_value, new_value) {
36 for (key, old_sub_value) in old_object.iter() {
37 key_path.push(key);
38 if let Some(new_sub_value) = new_object.get(key) {
39 // Key exists in both old and new, recursively update
40 update_value_in_json_text(
41 text,
42 key_path,
43 tab_size,
44 old_sub_value,
45 new_sub_value,
46 preserved_keys,
47 edits,
48 );
49 } else {
50 // Key was removed from new object, remove the entire key-value pair
51 let (range, replacement) =
52 replace_value_in_json_text(text, key_path, 0, None, None);
53 text.replace_range(range.clone(), &replacement);
54 edits.push((range, replacement));
55 }
56 key_path.pop();
57 }
58 for (key, new_sub_value) in new_object.iter() {
59 key_path.push(key);
60 if !old_object.contains_key(key) {
61 update_value_in_json_text(
62 text,
63 key_path,
64 tab_size,
65 &Value::Null,
66 new_sub_value,
67 preserved_keys,
68 edits,
69 );
70 }
71 key_path.pop();
72 }
73 } else if key_path
74 .last()
75 .is_some_and(|key| preserved_keys.contains(key))
76 || old_value != new_value
77 {
78 let mut new_value = new_value.clone();
79 if let Some(new_object) = new_value.as_object_mut() {
80 new_object.retain(|_, v| !v.is_null());
81 }
82 let (range, replacement) =
83 replace_value_in_json_text(text, key_path, tab_size, Some(&new_value), None);
84 text.replace_range(range.clone(), &replacement);
85 edits.push((range, replacement));
86 }
87}
88
89/// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`.
90pub fn replace_value_in_json_text<T: AsRef<str>>(
91 text: &str,
92 key_path: &[T],
93 tab_size: usize,
94 new_value: Option<&Value>,
95 replace_key: Option<&str>,
96) -> (Range<usize>, String) {
97 static PAIR_QUERY: LazyLock<Query> = LazyLock::new(|| {
98 Query::new(
99 &tree_sitter_json::LANGUAGE.into(),
100 "(pair key: (string) @key value: (_) @value)",
101 )
102 .expect("Failed to create PAIR_QUERY")
103 });
104
105 let mut parser = tree_sitter::Parser::new();
106 parser
107 .set_language(&tree_sitter_json::LANGUAGE.into())
108 .unwrap();
109 let syntax_tree = parser.parse(text, None).unwrap();
110
111 let mut cursor = tree_sitter::QueryCursor::new();
112
113 let mut depth = 0;
114 let mut last_value_range = 0..0;
115 let mut first_key_start = None;
116 let mut existing_value_range = 0..text.len();
117
118 let mut matches = cursor.matches(&PAIR_QUERY, syntax_tree.root_node(), text.as_bytes());
119 while let Some(mat) = matches.next() {
120 if mat.captures.len() != 2 {
121 continue;
122 }
123
124 let key_range = mat.captures[0].node.byte_range();
125 let value_range = mat.captures[1].node.byte_range();
126
127 // Don't enter sub objects until we find an exact
128 // match for the current keypath
129 if last_value_range.contains_inclusive(&value_range) {
130 continue;
131 }
132
133 last_value_range = value_range.clone();
134
135 if key_range.start > existing_value_range.end {
136 break;
137 }
138
139 first_key_start.get_or_insert(key_range.start);
140
141 let found_key = text
142 .get(key_range.clone())
143 .and_then(|key_text| {
144 serde_json::to_string(key_path[depth].as_ref())
145 .ok()
146 .map(|key_path| depth < key_path.len() && key_text == key_path)
147 })
148 .unwrap_or(false);
149
150 if found_key {
151 existing_value_range = value_range;
152 // Reset last value range when increasing in depth
153 last_value_range = existing_value_range.start..existing_value_range.start;
154 depth += 1;
155
156 if depth == key_path.len() {
157 break;
158 }
159
160 first_key_start = None;
161 }
162 }
163
164 // We found the exact key we want
165 if depth == key_path.len() {
166 if let Some(new_value) = new_value {
167 let new_val = to_pretty_json(new_value, tab_size, tab_size * depth);
168 if let Some(replace_key) = replace_key.and_then(|str| serde_json::to_string(str).ok()) {
169 let new_key = format!("{}: ", replace_key);
170 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
171 if let Some(prev_key_start) = text[..key_start].rfind('"') {
172 existing_value_range.start = prev_key_start;
173 } else {
174 existing_value_range.start = key_start;
175 }
176 }
177 (existing_value_range, new_key + &new_val)
178 } else {
179 (existing_value_range, new_val)
180 }
181 } else {
182 let mut removal_start = first_key_start.unwrap_or(existing_value_range.start);
183 let mut removal_end = existing_value_range.end;
184
185 // Find the actual key position by looking for the key in the pair
186 // We need to extend the range to include the key, not just the value
187 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
188 if let Some(prev_key_start) = text[..key_start].rfind('"') {
189 removal_start = prev_key_start;
190 } else {
191 removal_start = key_start;
192 }
193 }
194
195 let mut removed_comma = false;
196 // Look backward for a preceding comma first
197 let preceding_text = text.get(0..removal_start).unwrap_or("");
198 if let Some(comma_pos) = preceding_text.rfind(',') {
199 // Check if there are only whitespace characters between the comma and our key
200 let between_comma_and_key = text.get(comma_pos + 1..removal_start).unwrap_or("");
201 if between_comma_and_key.trim().is_empty() {
202 removal_start = comma_pos;
203 removed_comma = true;
204 }
205 }
206 if let Some(remaining_text) = text.get(existing_value_range.end..)
207 && !removed_comma
208 {
209 let mut chars = remaining_text.char_indices();
210 while let Some((offset, ch)) = chars.next() {
211 if ch == ',' {
212 removal_end = existing_value_range.end + offset + 1;
213 // Also consume whitespace after the comma
214 for (_, next_ch) in chars.by_ref() {
215 if next_ch.is_whitespace() {
216 removal_end += next_ch.len_utf8();
217 } else {
218 break;
219 }
220 }
221 break;
222 } else if !ch.is_whitespace() {
223 break;
224 }
225 }
226 }
227 (removal_start..removal_end, String::new())
228 }
229 } else {
230 // We have key paths, construct the sub objects
231 let new_key = key_path[depth].as_ref();
232
233 // We don't have the key, construct the nested objects
234 let mut new_value =
235 serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap();
236 for key in key_path[(depth + 1)..].iter().rev() {
237 new_value = serde_json::json!({ key.as_ref().to_string(): new_value });
238 }
239
240 if let Some(first_key_start) = first_key_start {
241 let mut row = 0;
242 let mut column = 0;
243 for (ix, char) in text.char_indices() {
244 if ix == first_key_start {
245 break;
246 }
247 if char == '\n' {
248 row += 1;
249 column = 0;
250 } else {
251 column += char.len_utf8();
252 }
253 }
254
255 if row > 0 {
256 // depth is 0 based, but division needs to be 1 based.
257 let new_val = to_pretty_json(&new_value, column / (depth + 1), column);
258 let space = ' ';
259 let content = format!("\"{new_key}\": {new_val},\n{space:width$}", width = column);
260 (first_key_start..first_key_start, content)
261 } else {
262 let new_val = serde_json::to_string(&new_value).unwrap();
263 let mut content = format!(r#""{new_key}": {new_val},"#);
264 content.push(' ');
265 (first_key_start..first_key_start, content)
266 }
267 } else {
268 new_value = serde_json::json!({ new_key.to_string(): new_value });
269 let indent_prefix_len = 4 * depth;
270 let mut new_val = to_pretty_json(&new_value, 4, indent_prefix_len);
271 if depth == 0 {
272 new_val.push('\n');
273 }
274 // best effort to keep comments with best effort indentation
275 let mut replace_text = &text[existing_value_range.clone()];
276 while let Some(comment_start) = replace_text.rfind("//") {
277 if let Some(comment_end) = replace_text[comment_start..].find('\n') {
278 let mut comment_with_indent_start = replace_text[..comment_start]
279 .rfind('\n')
280 .unwrap_or(comment_start);
281 if !replace_text[comment_with_indent_start..comment_start]
282 .trim()
283 .is_empty()
284 {
285 comment_with_indent_start = comment_start;
286 }
287 new_val.insert_str(
288 1,
289 &replace_text[comment_with_indent_start..comment_start + comment_end],
290 );
291 }
292 replace_text = &replace_text[..comment_start];
293 }
294
295 (existing_value_range, new_val)
296 }
297 }
298}
299
300const TS_DOCUMENT_KIND: &str = "document";
301const TS_ARRAY_KIND: &str = "array";
302const TS_COMMENT_KIND: &str = "comment";
303
304pub fn replace_top_level_array_value_in_json_text(
305 text: &str,
306 key_path: &[&str],
307 new_value: Option<&Value>,
308 replace_key: Option<&str>,
309 array_index: usize,
310 tab_size: usize,
311) -> Result<(Range<usize>, String)> {
312 let mut parser = tree_sitter::Parser::new();
313 parser
314 .set_language(&tree_sitter_json::LANGUAGE.into())
315 .unwrap();
316 let syntax_tree = parser.parse(text, None).unwrap();
317
318 let mut cursor = syntax_tree.walk();
319
320 if cursor.node().kind() == TS_DOCUMENT_KIND {
321 anyhow::ensure!(
322 cursor.goto_first_child(),
323 "Document empty - No top level array"
324 );
325 }
326
327 while cursor.node().kind() != TS_ARRAY_KIND {
328 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
329 }
330
331 // false if no children
332 //
333 cursor.goto_first_child();
334 debug_assert_eq!(cursor.node().kind(), "[");
335
336 let mut index = 0;
337
338 while index <= array_index {
339 let node = cursor.node();
340 if !matches!(node.kind(), "[" | "]" | TS_COMMENT_KIND | ",")
341 && !node.is_extra()
342 && !node.is_missing()
343 {
344 if index == array_index {
345 break;
346 }
347 index += 1;
348 }
349 if !cursor.goto_next_sibling() {
350 if let Some(new_value) = new_value {
351 return append_top_level_array_value_in_json_text(text, new_value, tab_size);
352 } else {
353 return Ok((0..0, String::new()));
354 }
355 }
356 }
357
358 let range = cursor.node().range();
359 let indent_width = range.start_point.column;
360 let offset = range.start_byte;
361 let text_range = range.start_byte..range.end_byte;
362 let value_str = &text[text_range.clone()];
363 let needs_indent = range.start_point.row > 0;
364
365 if new_value.is_none() && key_path.is_empty() {
366 let mut remove_range = text_range;
367 if index == 0 {
368 while cursor.goto_next_sibling()
369 && (cursor.node().is_extra() || cursor.node().is_missing())
370 {}
371 if cursor.node().kind() == "," {
372 remove_range.end = cursor.node().range().end_byte;
373 }
374 if let Some(next_newline) = &text[remove_range.end + 1..].find('\n')
375 && text[remove_range.end + 1..remove_range.end + next_newline]
376 .chars()
377 .all(|c| c.is_ascii_whitespace())
378 {
379 remove_range.end = remove_range.end + next_newline;
380 }
381 } else {
382 while cursor.goto_previous_sibling()
383 && (cursor.node().is_extra() || cursor.node().is_missing())
384 {}
385 if cursor.node().kind() == "," {
386 remove_range.start = cursor.node().range().start_byte;
387 }
388 }
389 Ok((remove_range, String::new()))
390 } else {
391 let (mut replace_range, mut replace_value) =
392 replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
393
394 replace_range.start += offset;
395 replace_range.end += offset;
396
397 if needs_indent {
398 let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
399 replace_value = replace_value.replace('\n', &increased_indent);
400 // replace_value.push('\n');
401 } else {
402 while let Some(idx) = replace_value.find("\n ") {
403 replace_value.remove(idx + 1);
404 }
405 while let Some(idx) = replace_value.find("\n") {
406 replace_value.replace_range(idx..idx + 1, " ");
407 }
408 }
409
410 Ok((replace_range, replace_value))
411 }
412}
413
414pub fn append_top_level_array_value_in_json_text(
415 text: &str,
416 new_value: &Value,
417 tab_size: usize,
418) -> Result<(Range<usize>, String)> {
419 let mut parser = tree_sitter::Parser::new();
420 parser
421 .set_language(&tree_sitter_json::LANGUAGE.into())
422 .unwrap();
423 let syntax_tree = parser.parse(text, None).unwrap();
424
425 let mut cursor = syntax_tree.walk();
426
427 if cursor.node().kind() == TS_DOCUMENT_KIND {
428 anyhow::ensure!(
429 cursor.goto_first_child(),
430 "Document empty - No top level array"
431 );
432 }
433
434 while cursor.node().kind() != TS_ARRAY_KIND {
435 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
436 }
437
438 anyhow::ensure!(
439 cursor.goto_last_child(),
440 "Malformed JSON syntax tree, expected `]` at end of array"
441 );
442 debug_assert_eq!(cursor.node().kind(), "]");
443 let close_bracket_start = cursor.node().start_byte();
444 while cursor.goto_previous_sibling()
445 && (cursor.node().is_extra() || cursor.node().is_missing())
446 && !cursor.node().is_error()
447 {}
448
449 let mut comma_range = None;
450 let mut prev_item_range = None;
451
452 if cursor.node().kind() == "," || is_error_of_kind(&mut cursor, ",") {
453 comma_range = Some(cursor.node().byte_range());
454 while cursor.goto_previous_sibling()
455 && (cursor.node().is_extra() || cursor.node().is_missing())
456 {}
457
458 debug_assert_ne!(cursor.node().kind(), "[");
459 prev_item_range = Some(cursor.node().range());
460 } else {
461 while (cursor.node().is_extra() || cursor.node().is_missing())
462 && cursor.goto_previous_sibling()
463 {}
464 if cursor.node().kind() != "[" {
465 prev_item_range = Some(cursor.node().range());
466 }
467 }
468
469 let (mut replace_range, mut replace_value) =
470 replace_value_in_json_text::<&str>("", &[], tab_size, Some(new_value), None);
471
472 replace_range.start = close_bracket_start;
473 replace_range.end = close_bracket_start;
474
475 let space = ' ';
476 if let Some(prev_item_range) = prev_item_range {
477 let needs_newline = prev_item_range.start_point.row > 0;
478 let indent_width = text[..prev_item_range.start_byte].rfind('\n').map_or(
479 prev_item_range.start_point.column,
480 |idx| {
481 prev_item_range.start_point.column
482 - text[idx + 1..prev_item_range.start_byte].trim_start().len()
483 },
484 );
485
486 let prev_item_end = comma_range
487 .as_ref()
488 .map_or(prev_item_range.end_byte, |range| range.end);
489 if text[prev_item_end..replace_range.start].trim().is_empty() {
490 replace_range.start = prev_item_end;
491 }
492
493 if needs_newline {
494 let increased_indent = format!("\n{space:width$}", width = indent_width);
495 replace_value = replace_value.replace('\n', &increased_indent);
496 replace_value.push('\n');
497 replace_value.insert_str(0, &format!("\n{space:width$}", width = indent_width));
498 } else {
499 while let Some(idx) = replace_value.find("\n ") {
500 replace_value.remove(idx + 1);
501 }
502 while let Some(idx) = replace_value.find('\n') {
503 replace_value.replace_range(idx..idx + 1, " ");
504 }
505 replace_value.insert(0, ' ');
506 }
507
508 if comma_range.is_none() {
509 replace_value.insert(0, ',');
510 }
511 } else {
512 if let Some(prev_newline) = text[..replace_range.start].rfind('\n')
513 && text[prev_newline..replace_range.start].trim().is_empty()
514 {
515 replace_range.start = prev_newline;
516 }
517 let indent = format!("\n{space:width$}", width = tab_size);
518 replace_value = replace_value.replace('\n', &indent);
519 replace_value.insert_str(0, &indent);
520 replace_value.push('\n');
521 }
522 return Ok((replace_range, replace_value));
523
524 fn is_error_of_kind(cursor: &mut tree_sitter::TreeCursor<'_>, kind: &str) -> bool {
525 if cursor.node().kind() != "ERROR" {
526 return false;
527 }
528
529 let descendant_index = cursor.descendant_index();
530 let res = cursor.goto_first_child() && cursor.node().kind() == kind;
531 cursor.goto_descendant(descendant_index);
532 res
533 }
534}
535
536pub fn to_pretty_json(
537 value: &impl Serialize,
538 indent_size: usize,
539 indent_prefix_len: usize,
540) -> String {
541 const SPACES: [u8; 32] = [b' '; 32];
542
543 debug_assert!(indent_size <= SPACES.len());
544 debug_assert!(indent_prefix_len <= SPACES.len());
545
546 let mut output = Vec::new();
547 let mut ser = serde_json::Serializer::with_formatter(
548 &mut output,
549 serde_json::ser::PrettyFormatter::with_indent(&SPACES[0..indent_size.min(SPACES.len())]),
550 );
551
552 value.serialize(&mut ser).unwrap();
553 let text = String::from_utf8(output).unwrap();
554
555 let mut adjusted_text = String::new();
556 for (i, line) in text.split('\n').enumerate() {
557 if i > 0 {
558 adjusted_text.push_str(str::from_utf8(&SPACES[0..indent_prefix_len]).unwrap());
559 }
560 adjusted_text.push_str(line);
561 adjusted_text.push('\n');
562 }
563 adjusted_text.pop();
564 adjusted_text
565}
566
567pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
568 let mut deserializer = serde_json_lenient::Deserializer::from_str(content);
569 Ok(serde_path_to_error::deserialize(&mut deserializer)?)
570}
571
572#[cfg(test)]
573mod tests {
574 use super::*;
575 use serde_json::{Value, json};
576 use unindent::Unindent;
577
578 #[test]
579 fn object_replace() {
580 #[track_caller]
581 fn check_object_replace(
582 input: String,
583 key_path: &[&str],
584 value: Option<Value>,
585 expected: String,
586 ) {
587 let result = replace_value_in_json_text(&input, key_path, 4, value.as_ref(), None);
588 let mut result_str = input;
589 result_str.replace_range(result.0, &result.1);
590 pretty_assertions::assert_eq!(expected, result_str);
591 }
592 check_object_replace(
593 r#"{
594 "a": 1,
595 "b": 2
596 }"#
597 .unindent(),
598 &["b"],
599 Some(json!(3)),
600 r#"{
601 "a": 1,
602 "b": 3
603 }"#
604 .unindent(),
605 );
606 check_object_replace(
607 r#"{
608 "a": 1,
609 "b": 2
610 }"#
611 .unindent(),
612 &["b"],
613 None,
614 r#"{
615 "a": 1
616 }"#
617 .unindent(),
618 );
619 check_object_replace(
620 r#"{
621 "a": 1,
622 "b": 2
623 }"#
624 .unindent(),
625 &["c"],
626 Some(json!(3)),
627 r#"{
628 "c": 3,
629 "a": 1,
630 "b": 2
631 }"#
632 .unindent(),
633 );
634 check_object_replace(
635 r#"{
636 "a": 1,
637 "b": {
638 "c": 2,
639 "d": 3,
640 }
641 }"#
642 .unindent(),
643 &["b", "c"],
644 Some(json!([1, 2, 3])),
645 r#"{
646 "a": 1,
647 "b": {
648 "c": [
649 1,
650 2,
651 3
652 ],
653 "d": 3,
654 }
655 }"#
656 .unindent(),
657 );
658
659 check_object_replace(
660 r#"{
661 "name": "old_name",
662 "id": 123
663 }"#
664 .unindent(),
665 &["name"],
666 Some(json!("new_name")),
667 r#"{
668 "name": "new_name",
669 "id": 123
670 }"#
671 .unindent(),
672 );
673
674 check_object_replace(
675 r#"{
676 "enabled": false,
677 "count": 5
678 }"#
679 .unindent(),
680 &["enabled"],
681 Some(json!(true)),
682 r#"{
683 "enabled": true,
684 "count": 5
685 }"#
686 .unindent(),
687 );
688
689 check_object_replace(
690 r#"{
691 "value": null,
692 "other": "test"
693 }"#
694 .unindent(),
695 &["value"],
696 Some(json!(42)),
697 r#"{
698 "value": 42,
699 "other": "test"
700 }"#
701 .unindent(),
702 );
703
704 check_object_replace(
705 r#"{
706 "config": {
707 "old": true
708 },
709 "name": "test"
710 }"#
711 .unindent(),
712 &["config"],
713 Some(json!({"new": false, "count": 3})),
714 r#"{
715 "config": {
716 "new": false,
717 "count": 3
718 },
719 "name": "test"
720 }"#
721 .unindent(),
722 );
723
724 check_object_replace(
725 r#"{
726 // This is a comment
727 "a": 1,
728 "b": 2 // Another comment
729 }"#
730 .unindent(),
731 &["b"],
732 Some(json!({"foo": "bar"})),
733 r#"{
734 // This is a comment
735 "a": 1,
736 "b": {
737 "foo": "bar"
738 } // Another comment
739 }"#
740 .unindent(),
741 );
742
743 check_object_replace(
744 r#"{}"#.to_string(),
745 &["new_key"],
746 Some(json!("value")),
747 r#"{
748 "new_key": "value"
749 }
750 "#
751 .unindent(),
752 );
753
754 check_object_replace(
755 r#"{
756 "only_key": 123
757 }"#
758 .unindent(),
759 &["only_key"],
760 None,
761 "{\n \n}".to_string(),
762 );
763
764 check_object_replace(
765 r#"{
766 "level1": {
767 "level2": {
768 "level3": {
769 "target": "old"
770 }
771 }
772 }
773 }"#
774 .unindent(),
775 &["level1", "level2", "level3", "target"],
776 Some(json!("new")),
777 r#"{
778 "level1": {
779 "level2": {
780 "level3": {
781 "target": "new"
782 }
783 }
784 }
785 }"#
786 .unindent(),
787 );
788
789 check_object_replace(
790 r#"{
791 "parent": {}
792 }"#
793 .unindent(),
794 &["parent", "child"],
795 Some(json!("value")),
796 r#"{
797 "parent": {
798 "child": "value"
799 }
800 }"#
801 .unindent(),
802 );
803
804 check_object_replace(
805 r#"{
806 "a": 1,
807 "b": 2,
808 }"#
809 .unindent(),
810 &["b"],
811 Some(json!(3)),
812 r#"{
813 "a": 1,
814 "b": 3,
815 }"#
816 .unindent(),
817 );
818
819 check_object_replace(
820 r#"{
821 "items": [1, 2, 3],
822 "count": 3
823 }"#
824 .unindent(),
825 &["items", "1"],
826 Some(json!(5)),
827 r#"{
828 "items": {
829 "1": 5
830 },
831 "count": 3
832 }"#
833 .unindent(),
834 );
835
836 check_object_replace(
837 r#"{
838 "items": [1, 2, 3],
839 "count": 3
840 }"#
841 .unindent(),
842 &["items", "1"],
843 None,
844 r#"{
845 "items": {
846 "1": null
847 },
848 "count": 3
849 }"#
850 .unindent(),
851 );
852
853 check_object_replace(
854 r#"{
855 "items": [1, 2, 3],
856 "count": 3
857 }"#
858 .unindent(),
859 &["items"],
860 Some(json!(["a", "b", "c", "d"])),
861 r#"{
862 "items": [
863 "a",
864 "b",
865 "c",
866 "d"
867 ],
868 "count": 3
869 }"#
870 .unindent(),
871 );
872
873 check_object_replace(
874 r#"{
875 "0": "zero",
876 "1": "one"
877 }"#
878 .unindent(),
879 &["1"],
880 Some(json!("ONE")),
881 r#"{
882 "0": "zero",
883 "1": "ONE"
884 }"#
885 .unindent(),
886 );
887 // Test with comments between object members
888 check_object_replace(
889 r#"{
890 "a": 1,
891 // Comment between members
892 "b": 2,
893 /* Block comment */
894 "c": 3
895 }"#
896 .unindent(),
897 &["b"],
898 Some(json!({"nested": true})),
899 r#"{
900 "a": 1,
901 // Comment between members
902 "b": {
903 "nested": true
904 },
905 /* Block comment */
906 "c": 3
907 }"#
908 .unindent(),
909 );
910
911 // Test with trailing comments on replaced value
912 check_object_replace(
913 r#"{
914 "a": 1, // keep this comment
915 "b": 2 // this should stay
916 }"#
917 .unindent(),
918 &["a"],
919 Some(json!("changed")),
920 r#"{
921 "a": "changed", // keep this comment
922 "b": 2 // this should stay
923 }"#
924 .unindent(),
925 );
926
927 // Test with deep indentation
928 check_object_replace(
929 r#"{
930 "deeply": {
931 "nested": {
932 "value": "old"
933 }
934 }
935 }"#
936 .unindent(),
937 &["deeply", "nested", "value"],
938 Some(json!("new")),
939 r#"{
940 "deeply": {
941 "nested": {
942 "value": "new"
943 }
944 }
945 }"#
946 .unindent(),
947 );
948
949 // Test removing value with comment preservation
950 check_object_replace(
951 r#"{
952 // Header comment
953 "a": 1,
954 // This comment belongs to b
955 "b": 2,
956 // This comment belongs to c
957 "c": 3
958 }"#
959 .unindent(),
960 &["b"],
961 None,
962 r#"{
963 // Header comment
964 "a": 1,
965 // This comment belongs to b
966 // This comment belongs to c
967 "c": 3
968 }"#
969 .unindent(),
970 );
971
972 // Test with multiline block comments
973 check_object_replace(
974 r#"{
975 /*
976 * This is a multiline
977 * block comment
978 */
979 "value": "old",
980 /* Another block */ "other": 123
981 }"#
982 .unindent(),
983 &["value"],
984 Some(json!("new")),
985 r#"{
986 /*
987 * This is a multiline
988 * block comment
989 */
990 "value": "new",
991 /* Another block */ "other": 123
992 }"#
993 .unindent(),
994 );
995
996 check_object_replace(
997 r#"{
998 // This object is empty
999 }"#
1000 .unindent(),
1001 &["key"],
1002 Some(json!("value")),
1003 r#"{
1004 // This object is empty
1005 "key": "value"
1006 }
1007 "#
1008 .unindent(),
1009 );
1010
1011 // Test replacing in object with only comments
1012 check_object_replace(
1013 r#"{
1014 // Comment 1
1015 // Comment 2
1016 }"#
1017 .unindent(),
1018 &["new"],
1019 Some(json!(42)),
1020 r#"{
1021 // Comment 1
1022 // Comment 2
1023 "new": 42
1024 }
1025 "#
1026 .unindent(),
1027 );
1028
1029 // Test with inconsistent spacing
1030 check_object_replace(
1031 r#"{
1032 "a":1,
1033 "b" : 2 ,
1034 "c": 3
1035 }"#
1036 .unindent(),
1037 &["b"],
1038 Some(json!("spaced")),
1039 r#"{
1040 "a":1,
1041 "b" : "spaced" ,
1042 "c": 3
1043 }"#
1044 .unindent(),
1045 );
1046 }
1047
1048 #[test]
1049 fn array_replace() {
1050 #[track_caller]
1051 fn check_array_replace(
1052 input: impl ToString,
1053 index: usize,
1054 key_path: &[&str],
1055 value: Option<Value>,
1056 expected: impl ToString,
1057 ) {
1058 let input = input.to_string();
1059 let result = replace_top_level_array_value_in_json_text(
1060 &input,
1061 key_path,
1062 value.as_ref(),
1063 None,
1064 index,
1065 4,
1066 )
1067 .expect("replace succeeded");
1068 let mut result_str = input;
1069 result_str.replace_range(result.0, &result.1);
1070 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1071 }
1072
1073 check_array_replace(r#"[1, 3, 3]"#, 1, &[], Some(json!(2)), r#"[1, 2, 3]"#);
1074 check_array_replace(r#"[1, 3, 3]"#, 2, &[], Some(json!(2)), r#"[1, 3, 2]"#);
1075 check_array_replace(r#"[1, 3, 3,]"#, 3, &[], Some(json!(2)), r#"[1, 3, 3, 2]"#);
1076 check_array_replace(r#"[1, 3, 3,]"#, 100, &[], Some(json!(2)), r#"[1, 3, 3, 2]"#);
1077 check_array_replace(
1078 r#"[
1079 1,
1080 2,
1081 3,
1082 ]"#
1083 .unindent(),
1084 1,
1085 &[],
1086 Some(json!({"foo": "bar", "baz": "qux"})),
1087 r#"[
1088 1,
1089 {
1090 "foo": "bar",
1091 "baz": "qux"
1092 },
1093 3,
1094 ]"#
1095 .unindent(),
1096 );
1097 check_array_replace(
1098 r#"[1, 3, 3,]"#,
1099 1,
1100 &[],
1101 Some(json!({"foo": "bar", "baz": "qux"})),
1102 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1103 );
1104
1105 check_array_replace(
1106 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1107 1,
1108 &["baz"],
1109 Some(json!({"qux": "quz"})),
1110 r#"[1, { "foo": "bar", "baz": { "qux": "quz" } }, 3,]"#,
1111 );
1112
1113 check_array_replace(
1114 r#"[
1115 1,
1116 {
1117 "foo": "bar",
1118 "baz": "qux"
1119 },
1120 3
1121 ]"#,
1122 1,
1123 &["baz"],
1124 Some(json!({"qux": "quz"})),
1125 r#"[
1126 1,
1127 {
1128 "foo": "bar",
1129 "baz": {
1130 "qux": "quz"
1131 }
1132 },
1133 3
1134 ]"#,
1135 );
1136
1137 check_array_replace(
1138 r#"[
1139 1,
1140 {
1141 "foo": "bar",
1142 "baz": {
1143 "qux": "quz"
1144 }
1145 },
1146 3
1147 ]"#,
1148 1,
1149 &["baz"],
1150 Some(json!("qux")),
1151 r#"[
1152 1,
1153 {
1154 "foo": "bar",
1155 "baz": "qux"
1156 },
1157 3
1158 ]"#,
1159 );
1160
1161 check_array_replace(
1162 r#"[
1163 1,
1164 {
1165 "foo": "bar",
1166 // some comment to keep
1167 "baz": {
1168 // some comment to remove
1169 "qux": "quz"
1170 }
1171 // some other comment to keep
1172 },
1173 3
1174 ]"#,
1175 1,
1176 &["baz"],
1177 Some(json!("qux")),
1178 r#"[
1179 1,
1180 {
1181 "foo": "bar",
1182 // some comment to keep
1183 "baz": "qux"
1184 // some other comment to keep
1185 },
1186 3
1187 ]"#,
1188 );
1189
1190 // Test with comments between array elements
1191 check_array_replace(
1192 r#"[
1193 1,
1194 // This is element 2
1195 2,
1196 /* Block comment */ 3,
1197 4 // Trailing comment
1198 ]"#,
1199 2,
1200 &[],
1201 Some(json!("replaced")),
1202 r#"[
1203 1,
1204 // This is element 2
1205 2,
1206 /* Block comment */ "replaced",
1207 4 // Trailing comment
1208 ]"#,
1209 );
1210
1211 // Test empty array with comments
1212 check_array_replace(
1213 r#"[
1214 // Empty array with comment
1215 ]"#
1216 .unindent(),
1217 0,
1218 &[],
1219 Some(json!("first")),
1220 r#"[
1221 // Empty array with comment
1222 "first"
1223 ]"#
1224 .unindent(),
1225 );
1226 check_array_replace(
1227 r#"[]"#.unindent(),
1228 0,
1229 &[],
1230 Some(json!("first")),
1231 r#"[
1232 "first"
1233 ]"#
1234 .unindent(),
1235 );
1236
1237 // Test array with leading comments
1238 check_array_replace(
1239 r#"[
1240 // Leading comment
1241 // Another leading comment
1242 1,
1243 2
1244 ]"#,
1245 0,
1246 &[],
1247 Some(json!({"new": "object"})),
1248 r#"[
1249 // Leading comment
1250 // Another leading comment
1251 {
1252 "new": "object"
1253 },
1254 2
1255 ]"#,
1256 );
1257
1258 // Test with deep indentation
1259 check_array_replace(
1260 r#"[
1261 1,
1262 2,
1263 3
1264 ]"#,
1265 1,
1266 &[],
1267 Some(json!("deep")),
1268 r#"[
1269 1,
1270 "deep",
1271 3
1272 ]"#,
1273 );
1274
1275 // Test with mixed spacing
1276 check_array_replace(
1277 r#"[1,2, 3, 4]"#,
1278 2,
1279 &[],
1280 Some(json!("spaced")),
1281 r#"[1,2, "spaced", 4]"#,
1282 );
1283
1284 // Test replacing nested array element
1285 check_array_replace(
1286 r#"[
1287 [1, 2, 3],
1288 [4, 5, 6],
1289 [7, 8, 9]
1290 ]"#,
1291 1,
1292 &[],
1293 Some(json!(["a", "b", "c", "d"])),
1294 r#"[
1295 [1, 2, 3],
1296 [
1297 "a",
1298 "b",
1299 "c",
1300 "d"
1301 ],
1302 [7, 8, 9]
1303 ]"#,
1304 );
1305
1306 // Test with multiline block comments
1307 check_array_replace(
1308 r#"[
1309 /*
1310 * This is a
1311 * multiline comment
1312 */
1313 "first",
1314 "second"
1315 ]"#,
1316 0,
1317 &[],
1318 Some(json!("updated")),
1319 r#"[
1320 /*
1321 * This is a
1322 * multiline comment
1323 */
1324 "updated",
1325 "second"
1326 ]"#,
1327 );
1328
1329 // Test replacing with null
1330 check_array_replace(
1331 r#"[true, false, true]"#,
1332 1,
1333 &[],
1334 Some(json!(null)),
1335 r#"[true, null, true]"#,
1336 );
1337
1338 // Test single element array
1339 check_array_replace(
1340 r#"[42]"#,
1341 0,
1342 &[],
1343 Some(json!({"answer": 42})),
1344 r#"[{ "answer": 42 }]"#,
1345 );
1346
1347 // Test array with only comments
1348 check_array_replace(
1349 r#"[
1350 // Comment 1
1351 // Comment 2
1352 // Comment 3
1353 ]"#
1354 .unindent(),
1355 10,
1356 &[],
1357 Some(json!(123)),
1358 r#"[
1359 // Comment 1
1360 // Comment 2
1361 // Comment 3
1362 123
1363 ]"#
1364 .unindent(),
1365 );
1366
1367 check_array_replace(
1368 r#"[
1369 {
1370 "key": "value"
1371 },
1372 {
1373 "key": "value2"
1374 }
1375 ]"#
1376 .unindent(),
1377 0,
1378 &[],
1379 None,
1380 r#"[
1381 {
1382 "key": "value2"
1383 }
1384 ]"#
1385 .unindent(),
1386 );
1387
1388 check_array_replace(
1389 r#"[
1390 {
1391 "key": "value"
1392 },
1393 {
1394 "key": "value2"
1395 },
1396 {
1397 "key": "value3"
1398 },
1399 ]"#
1400 .unindent(),
1401 1,
1402 &[],
1403 None,
1404 r#"[
1405 {
1406 "key": "value"
1407 },
1408 {
1409 "key": "value3"
1410 },
1411 ]"#
1412 .unindent(),
1413 );
1414 }
1415
1416 #[test]
1417 fn array_append() {
1418 #[track_caller]
1419 fn check_array_append(input: impl ToString, value: Value, expected: impl ToString) {
1420 let input = input.to_string();
1421 let result = append_top_level_array_value_in_json_text(&input, &value, 4)
1422 .expect("append succeeded");
1423 let mut result_str = input;
1424 result_str.replace_range(result.0, &result.1);
1425 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1426 }
1427 check_array_append(r#"[1, 3, 3]"#, json!(4), r#"[1, 3, 3, 4]"#);
1428 check_array_append(r#"[1, 3, 3,]"#, json!(4), r#"[1, 3, 3, 4]"#);
1429 check_array_append(r#"[1, 3, 3 ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1430 check_array_append(r#"[1, 3, 3, ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1431 check_array_append(
1432 r#"[
1433 1,
1434 2,
1435 3
1436 ]"#
1437 .unindent(),
1438 json!(4),
1439 r#"[
1440 1,
1441 2,
1442 3,
1443 4
1444 ]"#
1445 .unindent(),
1446 );
1447 check_array_append(
1448 r#"[
1449 1,
1450 2,
1451 3,
1452 ]"#
1453 .unindent(),
1454 json!(4),
1455 r#"[
1456 1,
1457 2,
1458 3,
1459 4
1460 ]"#
1461 .unindent(),
1462 );
1463 check_array_append(
1464 r#"[
1465 1,
1466 2,
1467 3,
1468 ]"#
1469 .unindent(),
1470 json!({"foo": "bar", "baz": "qux"}),
1471 r#"[
1472 1,
1473 2,
1474 3,
1475 {
1476 "foo": "bar",
1477 "baz": "qux"
1478 }
1479 ]"#
1480 .unindent(),
1481 );
1482 check_array_append(
1483 r#"[ 1, 2, 3, ]"#.unindent(),
1484 json!({"foo": "bar", "baz": "qux"}),
1485 r#"[ 1, 2, 3, { "foo": "bar", "baz": "qux" }]"#.unindent(),
1486 );
1487 check_array_append(
1488 r#"[]"#,
1489 json!({"foo": "bar"}),
1490 r#"[
1491 {
1492 "foo": "bar"
1493 }
1494 ]"#
1495 .unindent(),
1496 );
1497
1498 // Test with comments between array elements
1499 check_array_append(
1500 r#"[
1501 1,
1502 // Comment between elements
1503 2,
1504 /* Block comment */ 3
1505 ]"#
1506 .unindent(),
1507 json!(4),
1508 r#"[
1509 1,
1510 // Comment between elements
1511 2,
1512 /* Block comment */ 3,
1513 4
1514 ]"#
1515 .unindent(),
1516 );
1517
1518 // Test with trailing comment on last element
1519 check_array_append(
1520 r#"[
1521 1,
1522 2,
1523 3 // Trailing comment
1524 ]"#
1525 .unindent(),
1526 json!("new"),
1527 r#"[
1528 1,
1529 2,
1530 3 // Trailing comment
1531 ,
1532 "new"
1533 ]"#
1534 .unindent(),
1535 );
1536
1537 // Test empty array with comments
1538 check_array_append(
1539 r#"[
1540 // Empty array with comment
1541 ]"#
1542 .unindent(),
1543 json!("first"),
1544 r#"[
1545 // Empty array with comment
1546 "first"
1547 ]"#
1548 .unindent(),
1549 );
1550
1551 // Test with multiline block comment at end
1552 check_array_append(
1553 r#"[
1554 1,
1555 2
1556 /*
1557 * This is a
1558 * multiline comment
1559 */
1560 ]"#
1561 .unindent(),
1562 json!(3),
1563 r#"[
1564 1,
1565 2
1566 /*
1567 * This is a
1568 * multiline comment
1569 */
1570 ,
1571 3
1572 ]"#
1573 .unindent(),
1574 );
1575
1576 // Test with deep indentation
1577 check_array_append(
1578 r#"[
1579 1,
1580 2,
1581 3
1582 ]"#
1583 .unindent(),
1584 json!("deep"),
1585 r#"[
1586 1,
1587 2,
1588 3,
1589 "deep"
1590 ]"#
1591 .unindent(),
1592 );
1593
1594 // Test with no spacing
1595 check_array_append(r#"[1,2,3]"#, json!(4), r#"[1,2,3, 4]"#);
1596
1597 // Test appending complex nested structure
1598 check_array_append(
1599 r#"[
1600 {"a": 1},
1601 {"b": 2}
1602 ]"#
1603 .unindent(),
1604 json!({"c": {"nested": [1, 2, 3]}}),
1605 r#"[
1606 {"a": 1},
1607 {"b": 2},
1608 {
1609 "c": {
1610 "nested": [
1611 1,
1612 2,
1613 3
1614 ]
1615 }
1616 }
1617 ]"#
1618 .unindent(),
1619 );
1620
1621 // Test array ending with comment after bracket
1622 check_array_append(
1623 r#"[
1624 1,
1625 2,
1626 3
1627 ] // Comment after array"#
1628 .unindent(),
1629 json!(4),
1630 r#"[
1631 1,
1632 2,
1633 3,
1634 4
1635 ] // Comment after array"#
1636 .unindent(),
1637 );
1638
1639 // Test with inconsistent element formatting
1640 check_array_append(
1641 r#"[1,
1642 2,
1643 3,
1644 ]"#
1645 .unindent(),
1646 json!(4),
1647 r#"[1,
1648 2,
1649 3,
1650 4
1651 ]"#
1652 .unindent(),
1653 );
1654
1655 // Test appending to single-line array with trailing comma
1656 check_array_append(
1657 r#"[1, 2, 3,]"#,
1658 json!({"key": "value"}),
1659 r#"[1, 2, 3, { "key": "value" }]"#,
1660 );
1661
1662 // Test appending null value
1663 check_array_append(r#"[true, false]"#, json!(null), r#"[true, false, null]"#);
1664
1665 // Test appending to array with only comments
1666 check_array_append(
1667 r#"[
1668 // Just comments here
1669 // More comments
1670 ]"#
1671 .unindent(),
1672 json!(42),
1673 r#"[
1674 // Just comments here
1675 // More comments
1676 42
1677 ]"#
1678 .unindent(),
1679 );
1680 }
1681}