1use anyhow::Result;
2use gpui::App;
3use schemars::{JsonSchema, Schema};
4use serde::{Serialize, de::DeserializeOwned};
5use serde_json::Value;
6use std::{ops::Range, sync::LazyLock};
7use tree_sitter::{Query, StreamingIterator as _};
8use util::RangeExt;
9
10pub struct SettingsJsonSchemaParams<'a> {
11 pub language_names: &'a [String],
12 pub font_names: &'a [String],
13}
14
15pub struct ParameterizedJsonSchema {
16 pub add_and_get_ref:
17 fn(&mut schemars::SchemaGenerator, &SettingsJsonSchemaParams, &App) -> schemars::Schema,
18}
19
20inventory::collect!(ParameterizedJsonSchema);
21
22pub fn replace_subschema<T: JsonSchema>(
23 generator: &mut schemars::SchemaGenerator,
24 schema: schemars::Schema,
25) -> schemars::Schema {
26 const DEFINITIONS_PATH: &str = "#/definitions/";
27 // The key in definitions may not match T::schema_name() if multiple types have the same name.
28 // This is a workaround for there being no straightforward way to get the key used for a type -
29 // see https://github.com/GREsau/schemars/issues/449
30 let ref_schema = generator.subschema_for::<T>();
31 if let Some(serde_json::Value::String(definition_pointer)) = ref_schema.get("$ref") {
32 if let Some(definition_name) = definition_pointer.strip_prefix(DEFINITIONS_PATH) {
33 generator
34 .definitions_mut()
35 .insert(definition_name.to_string(), schema.to_value());
36 return ref_schema;
37 } else {
38 log::error!(
39 "bug: expected `$ref` field to start with {DEFINITIONS_PATH}, \
40 got {definition_pointer}"
41 );
42 }
43 } else {
44 log::error!("bug: expected `$ref` field in result of `subschema_for`");
45 }
46 // fallback on just using the schema name, which could collide.
47 let schema_name = T::schema_name();
48 generator
49 .definitions_mut()
50 .insert(schema_name.to_string(), schema.to_value());
51 Schema::new_ref(format!("{DEFINITIONS_PATH}{schema_name}"))
52}
53
54pub fn update_value_in_json_text<'a>(
55 text: &mut String,
56 key_path: &mut Vec<&'a str>,
57 tab_size: usize,
58 old_value: &'a Value,
59 new_value: &'a Value,
60 preserved_keys: &[&str],
61 edits: &mut Vec<(Range<usize>, String)>,
62) {
63 // If the old and new values are both objects, then compare them key by key,
64 // preserving the comments and formatting of the unchanged parts. Otherwise,
65 // replace the old value with the new value.
66 if let (Value::Object(old_object), Value::Object(new_object)) = (old_value, new_value) {
67 for (key, old_sub_value) in old_object.iter() {
68 key_path.push(key);
69 if let Some(new_sub_value) = new_object.get(key) {
70 // Key exists in both old and new, recursively update
71 update_value_in_json_text(
72 text,
73 key_path,
74 tab_size,
75 old_sub_value,
76 new_sub_value,
77 preserved_keys,
78 edits,
79 );
80 } else {
81 // Key was removed from new object, remove the entire key-value pair
82 let (range, replacement) =
83 replace_value_in_json_text(text, key_path, 0, None, None);
84 text.replace_range(range.clone(), &replacement);
85 edits.push((range, replacement));
86 }
87 key_path.pop();
88 }
89 for (key, new_sub_value) in new_object.iter() {
90 key_path.push(key);
91 if !old_object.contains_key(key) {
92 update_value_in_json_text(
93 text,
94 key_path,
95 tab_size,
96 &Value::Null,
97 new_sub_value,
98 preserved_keys,
99 edits,
100 );
101 }
102 key_path.pop();
103 }
104 } else if key_path
105 .last()
106 .map_or(false, |key| preserved_keys.contains(key))
107 || old_value != new_value
108 {
109 let mut new_value = new_value.clone();
110 if let Some(new_object) = new_value.as_object_mut() {
111 new_object.retain(|_, v| !v.is_null());
112 }
113 let (range, replacement) =
114 replace_value_in_json_text(text, key_path, tab_size, Some(&new_value), None);
115 text.replace_range(range.clone(), &replacement);
116 edits.push((range, replacement));
117 }
118}
119
120/// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`.
121fn replace_value_in_json_text(
122 text: &str,
123 key_path: &[&str],
124 tab_size: usize,
125 new_value: Option<&Value>,
126 replace_key: Option<&str>,
127) -> (Range<usize>, String) {
128 static PAIR_QUERY: LazyLock<Query> = LazyLock::new(|| {
129 Query::new(
130 &tree_sitter_json::LANGUAGE.into(),
131 "(pair key: (string) @key value: (_) @value)",
132 )
133 .expect("Failed to create PAIR_QUERY")
134 });
135
136 let mut parser = tree_sitter::Parser::new();
137 parser
138 .set_language(&tree_sitter_json::LANGUAGE.into())
139 .unwrap();
140 let syntax_tree = parser.parse(text, None).unwrap();
141
142 let mut cursor = tree_sitter::QueryCursor::new();
143
144 let mut depth = 0;
145 let mut last_value_range = 0..0;
146 let mut first_key_start = None;
147 let mut existing_value_range = 0..text.len();
148
149 let mut matches = cursor.matches(&PAIR_QUERY, syntax_tree.root_node(), text.as_bytes());
150 while let Some(mat) = matches.next() {
151 if mat.captures.len() != 2 {
152 continue;
153 }
154
155 let key_range = mat.captures[0].node.byte_range();
156 let value_range = mat.captures[1].node.byte_range();
157
158 // Don't enter sub objects until we find an exact
159 // match for the current keypath
160 if last_value_range.contains_inclusive(&value_range) {
161 continue;
162 }
163
164 last_value_range = value_range.clone();
165
166 if key_range.start > existing_value_range.end {
167 break;
168 }
169
170 first_key_start.get_or_insert(key_range.start);
171
172 let found_key = text
173 .get(key_range.clone())
174 .map(|key_text| {
175 depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth])
176 })
177 .unwrap_or(false);
178
179 if found_key {
180 existing_value_range = value_range;
181 // Reset last value range when increasing in depth
182 last_value_range = existing_value_range.start..existing_value_range.start;
183 depth += 1;
184
185 if depth == key_path.len() {
186 break;
187 }
188
189 first_key_start = None;
190 }
191 }
192
193 // We found the exact key we want
194 if depth == key_path.len() {
195 if let Some(new_value) = new_value {
196 let new_val = to_pretty_json(new_value, tab_size, tab_size * depth);
197 if let Some(replace_key) = replace_key {
198 let new_key = format!("\"{}\": ", replace_key);
199 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
200 if let Some(prev_key_start) = text[..key_start].rfind('"') {
201 existing_value_range.start = prev_key_start;
202 } else {
203 existing_value_range.start = key_start;
204 }
205 }
206 (existing_value_range, new_key + &new_val)
207 } else {
208 (existing_value_range, new_val)
209 }
210 } else {
211 let mut removal_start = first_key_start.unwrap_or(existing_value_range.start);
212 let mut removal_end = existing_value_range.end;
213
214 // Find the actual key position by looking for the key in the pair
215 // We need to extend the range to include the key, not just the value
216 if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
217 if let Some(prev_key_start) = text[..key_start].rfind('"') {
218 removal_start = prev_key_start;
219 } else {
220 removal_start = key_start;
221 }
222 }
223
224 // Look backward for a preceding comma first
225 let preceding_text = text.get(0..removal_start).unwrap_or("");
226 if let Some(comma_pos) = preceding_text.rfind(',') {
227 // Check if there are only whitespace characters between the comma and our key
228 let between_comma_and_key = text.get(comma_pos + 1..removal_start).unwrap_or("");
229 if between_comma_and_key.trim().is_empty() {
230 removal_start = comma_pos;
231 }
232 }
233
234 if let Some(remaining_text) = text.get(existing_value_range.end..) {
235 let mut chars = remaining_text.char_indices();
236 while let Some((offset, ch)) = chars.next() {
237 if ch == ',' {
238 removal_end = existing_value_range.end + offset + 1;
239 // Also consume whitespace after the comma
240 while let Some((_, next_ch)) = chars.next() {
241 if next_ch.is_whitespace() {
242 removal_end += next_ch.len_utf8();
243 } else {
244 break;
245 }
246 }
247 break;
248 } else if !ch.is_whitespace() {
249 break;
250 }
251 }
252 }
253 (removal_start..removal_end, String::new())
254 }
255 } else {
256 // We have key paths, construct the sub objects
257 let new_key = key_path[depth];
258
259 // We don't have the key, construct the nested objects
260 let mut new_value =
261 serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap();
262 for key in key_path[(depth + 1)..].iter().rev() {
263 new_value = serde_json::json!({ key.to_string(): new_value });
264 }
265
266 if let Some(first_key_start) = first_key_start {
267 let mut row = 0;
268 let mut column = 0;
269 for (ix, char) in text.char_indices() {
270 if ix == first_key_start {
271 break;
272 }
273 if char == '\n' {
274 row += 1;
275 column = 0;
276 } else {
277 column += char.len_utf8();
278 }
279 }
280
281 if row > 0 {
282 // depth is 0 based, but division needs to be 1 based.
283 let new_val = to_pretty_json(&new_value, column / (depth + 1), column);
284 let space = ' ';
285 let content = format!("\"{new_key}\": {new_val},\n{space:width$}", width = column);
286 (first_key_start..first_key_start, content)
287 } else {
288 let new_val = serde_json::to_string(&new_value).unwrap();
289 let mut content = format!(r#""{new_key}": {new_val},"#);
290 content.push(' ');
291 (first_key_start..first_key_start, content)
292 }
293 } else {
294 new_value = serde_json::json!({ new_key.to_string(): new_value });
295 let indent_prefix_len = 4 * depth;
296 let mut new_val = to_pretty_json(&new_value, 4, indent_prefix_len);
297 if depth == 0 {
298 new_val.push('\n');
299 }
300 // best effort to keep comments with best effort indentation
301 let mut replace_text = &text[existing_value_range.clone()];
302 while let Some(comment_start) = replace_text.rfind("//") {
303 if let Some(comment_end) = replace_text[comment_start..].find('\n') {
304 let mut comment_with_indent_start = replace_text[..comment_start]
305 .rfind('\n')
306 .unwrap_or(comment_start);
307 if !replace_text[comment_with_indent_start..comment_start]
308 .trim()
309 .is_empty()
310 {
311 comment_with_indent_start = comment_start;
312 }
313 new_val.insert_str(
314 1,
315 &replace_text[comment_with_indent_start..comment_start + comment_end],
316 );
317 }
318 replace_text = &replace_text[..comment_start];
319 }
320
321 (existing_value_range, new_val)
322 }
323 }
324}
325
326const TS_DOCUMENT_KIND: &'static str = "document";
327const TS_ARRAY_KIND: &'static str = "array";
328const TS_COMMENT_KIND: &'static str = "comment";
329
330pub fn replace_top_level_array_value_in_json_text(
331 text: &str,
332 key_path: &[&str],
333 new_value: Option<&Value>,
334 replace_key: Option<&str>,
335 array_index: usize,
336 tab_size: usize,
337) -> Result<(Range<usize>, String)> {
338 let mut parser = tree_sitter::Parser::new();
339 parser
340 .set_language(&tree_sitter_json::LANGUAGE.into())
341 .unwrap();
342 let syntax_tree = parser.parse(text, None).unwrap();
343
344 let mut cursor = syntax_tree.walk();
345
346 if cursor.node().kind() == TS_DOCUMENT_KIND {
347 anyhow::ensure!(
348 cursor.goto_first_child(),
349 "Document empty - No top level array"
350 );
351 }
352
353 while cursor.node().kind() != TS_ARRAY_KIND {
354 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
355 }
356
357 // false if no children
358 //
359 cursor.goto_first_child();
360 debug_assert_eq!(cursor.node().kind(), "[");
361
362 let mut index = 0;
363
364 while index <= array_index {
365 let node = cursor.node();
366 if !matches!(node.kind(), "[" | "]" | TS_COMMENT_KIND | ",")
367 && !node.is_extra()
368 && !node.is_missing()
369 {
370 if index == array_index {
371 break;
372 }
373 index += 1;
374 }
375 if !cursor.goto_next_sibling() {
376 if let Some(new_value) = new_value {
377 return append_top_level_array_value_in_json_text(text, new_value, tab_size);
378 } else {
379 return Ok((0..0, String::new()));
380 }
381 }
382 }
383
384 let range = cursor.node().range();
385 let indent_width = range.start_point.column;
386 let offset = range.start_byte;
387 let value_str = &text[range.start_byte..range.end_byte];
388 let needs_indent = range.start_point.row > 0;
389
390 let (mut replace_range, mut replace_value) =
391 replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
392
393 replace_range.start += offset;
394 replace_range.end += offset;
395
396 if needs_indent {
397 let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
398 replace_value = replace_value.replace('\n', &increased_indent);
399 // replace_value.push('\n');
400 } else {
401 while let Some(idx) = replace_value.find("\n ") {
402 replace_value.remove(idx + 1);
403 }
404 while let Some(idx) = replace_value.find("\n") {
405 replace_value.replace_range(idx..idx + 1, " ");
406 }
407 }
408
409 return Ok((replace_range, replace_value));
410}
411
412pub fn append_top_level_array_value_in_json_text(
413 text: &str,
414 new_value: &Value,
415 tab_size: usize,
416) -> Result<(Range<usize>, String)> {
417 let mut parser = tree_sitter::Parser::new();
418 parser
419 .set_language(&tree_sitter_json::LANGUAGE.into())
420 .unwrap();
421 let syntax_tree = parser.parse(text, None).unwrap();
422
423 let mut cursor = syntax_tree.walk();
424
425 if cursor.node().kind() == TS_DOCUMENT_KIND {
426 anyhow::ensure!(
427 cursor.goto_first_child(),
428 "Document empty - No top level array"
429 );
430 }
431
432 while cursor.node().kind() != TS_ARRAY_KIND {
433 anyhow::ensure!(cursor.goto_next_sibling(), "EOF - No top level array");
434 }
435
436 anyhow::ensure!(
437 cursor.goto_last_child(),
438 "Malformed JSON syntax tree, expected `]` at end of array"
439 );
440 debug_assert_eq!(cursor.node().kind(), "]");
441 let close_bracket_start = cursor.node().start_byte();
442 cursor.goto_previous_sibling();
443 while (cursor.node().is_extra() || cursor.node().is_missing()) && cursor.goto_previous_sibling()
444 {
445 }
446
447 let mut comma_range = None;
448 let mut prev_item_range = None;
449
450 if cursor.node().kind() == "," {
451 comma_range = Some(cursor.node().byte_range());
452 while cursor.goto_previous_sibling() && cursor.node().is_extra() {}
453
454 debug_assert_ne!(cursor.node().kind(), "[");
455 prev_item_range = Some(cursor.node().range());
456 } else {
457 while (cursor.node().is_extra() || cursor.node().is_missing())
458 && cursor.goto_previous_sibling()
459 {}
460 if cursor.node().kind() != "[" {
461 prev_item_range = Some(cursor.node().range());
462 }
463 }
464
465 let (mut replace_range, mut replace_value) =
466 replace_value_in_json_text("", &[], tab_size, Some(new_value), None);
467
468 replace_range.start = close_bracket_start;
469 replace_range.end = close_bracket_start;
470
471 let space = ' ';
472 if let Some(prev_item_range) = prev_item_range {
473 let needs_newline = prev_item_range.start_point.row > 0;
474 let indent_width = text[..prev_item_range.start_byte].rfind('\n').map_or(
475 prev_item_range.start_point.column,
476 |idx| {
477 prev_item_range.start_point.column
478 - text[idx + 1..prev_item_range.start_byte].trim_start().len()
479 },
480 );
481
482 let prev_item_end = comma_range
483 .as_ref()
484 .map_or(prev_item_range.end_byte, |range| range.end);
485 if text[prev_item_end..replace_range.start].trim().is_empty() {
486 replace_range.start = prev_item_end;
487 }
488
489 if needs_newline {
490 let increased_indent = format!("\n{space:width$}", width = indent_width);
491 replace_value = replace_value.replace('\n', &increased_indent);
492 replace_value.push('\n');
493 replace_value.insert_str(0, &format!("\n{space:width$}", width = indent_width));
494 } else {
495 while let Some(idx) = replace_value.find("\n ") {
496 replace_value.remove(idx + 1);
497 }
498 while let Some(idx) = replace_value.find('\n') {
499 replace_value.replace_range(idx..idx + 1, " ");
500 }
501 replace_value.insert(0, ' ');
502 }
503
504 if comma_range.is_none() {
505 replace_value.insert(0, ',');
506 }
507 } else {
508 if let Some(prev_newline) = text[..replace_range.start].rfind('\n') {
509 if text[prev_newline..replace_range.start].trim().is_empty() {
510 replace_range.start = prev_newline;
511 }
512 }
513 let indent = format!("\n{space:width$}", width = tab_size);
514 replace_value = replace_value.replace('\n', &indent);
515 replace_value.insert_str(0, &indent);
516 replace_value.push('\n');
517 }
518 return Ok((replace_range, replace_value));
519}
520
521pub fn to_pretty_json(
522 value: &impl Serialize,
523 indent_size: usize,
524 indent_prefix_len: usize,
525) -> String {
526 const SPACES: [u8; 32] = [b' '; 32];
527
528 debug_assert!(indent_size <= SPACES.len());
529 debug_assert!(indent_prefix_len <= SPACES.len());
530
531 let mut output = Vec::new();
532 let mut ser = serde_json::Serializer::with_formatter(
533 &mut output,
534 serde_json::ser::PrettyFormatter::with_indent(&SPACES[0..indent_size.min(SPACES.len())]),
535 );
536
537 value.serialize(&mut ser).unwrap();
538 let text = String::from_utf8(output).unwrap();
539
540 let mut adjusted_text = String::new();
541 for (i, line) in text.split('\n').enumerate() {
542 if i > 0 {
543 adjusted_text.push_str(str::from_utf8(&SPACES[0..indent_prefix_len]).unwrap());
544 }
545 adjusted_text.push_str(line);
546 adjusted_text.push('\n');
547 }
548 adjusted_text.pop();
549 adjusted_text
550}
551
552pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
553 Ok(serde_json_lenient::from_str(content)?)
554}
555
556#[cfg(test)]
557mod tests {
558 use super::*;
559 use serde_json::{Value, json};
560 use unindent::Unindent;
561
562 #[test]
563 fn object_replace() {
564 #[track_caller]
565 fn check_object_replace(
566 input: String,
567 key_path: &[&str],
568 value: Option<Value>,
569 expected: String,
570 ) {
571 let result = replace_value_in_json_text(&input, key_path, 4, value.as_ref(), None);
572 let mut result_str = input.to_string();
573 result_str.replace_range(result.0, &result.1);
574 pretty_assertions::assert_eq!(expected, result_str);
575 }
576 check_object_replace(
577 r#"{
578 "a": 1,
579 "b": 2
580 }"#
581 .unindent(),
582 &["b"],
583 Some(json!(3)),
584 r#"{
585 "a": 1,
586 "b": 3
587 }"#
588 .unindent(),
589 );
590 check_object_replace(
591 r#"{
592 "a": 1,
593 "b": 2
594 }"#
595 .unindent(),
596 &["b"],
597 None,
598 r#"{
599 "a": 1
600 }"#
601 .unindent(),
602 );
603 check_object_replace(
604 r#"{
605 "a": 1,
606 "b": 2
607 }"#
608 .unindent(),
609 &["c"],
610 Some(json!(3)),
611 r#"{
612 "c": 3,
613 "a": 1,
614 "b": 2
615 }"#
616 .unindent(),
617 );
618 check_object_replace(
619 r#"{
620 "a": 1,
621 "b": {
622 "c": 2,
623 "d": 3,
624 }
625 }"#
626 .unindent(),
627 &["b", "c"],
628 Some(json!([1, 2, 3])),
629 r#"{
630 "a": 1,
631 "b": {
632 "c": [
633 1,
634 2,
635 3
636 ],
637 "d": 3,
638 }
639 }"#
640 .unindent(),
641 );
642
643 check_object_replace(
644 r#"{
645 "name": "old_name",
646 "id": 123
647 }"#
648 .unindent(),
649 &["name"],
650 Some(json!("new_name")),
651 r#"{
652 "name": "new_name",
653 "id": 123
654 }"#
655 .unindent(),
656 );
657
658 check_object_replace(
659 r#"{
660 "enabled": false,
661 "count": 5
662 }"#
663 .unindent(),
664 &["enabled"],
665 Some(json!(true)),
666 r#"{
667 "enabled": true,
668 "count": 5
669 }"#
670 .unindent(),
671 );
672
673 check_object_replace(
674 r#"{
675 "value": null,
676 "other": "test"
677 }"#
678 .unindent(),
679 &["value"],
680 Some(json!(42)),
681 r#"{
682 "value": 42,
683 "other": "test"
684 }"#
685 .unindent(),
686 );
687
688 check_object_replace(
689 r#"{
690 "config": {
691 "old": true
692 },
693 "name": "test"
694 }"#
695 .unindent(),
696 &["config"],
697 Some(json!({"new": false, "count": 3})),
698 r#"{
699 "config": {
700 "new": false,
701 "count": 3
702 },
703 "name": "test"
704 }"#
705 .unindent(),
706 );
707
708 check_object_replace(
709 r#"{
710 // This is a comment
711 "a": 1,
712 "b": 2 // Another comment
713 }"#
714 .unindent(),
715 &["b"],
716 Some(json!({"foo": "bar"})),
717 r#"{
718 // This is a comment
719 "a": 1,
720 "b": {
721 "foo": "bar"
722 } // Another comment
723 }"#
724 .unindent(),
725 );
726
727 check_object_replace(
728 r#"{}"#.to_string(),
729 &["new_key"],
730 Some(json!("value")),
731 r#"{
732 "new_key": "value"
733 }
734 "#
735 .unindent(),
736 );
737
738 check_object_replace(
739 r#"{
740 "only_key": 123
741 }"#
742 .unindent(),
743 &["only_key"],
744 None,
745 "{\n \n}".to_string(),
746 );
747
748 check_object_replace(
749 r#"{
750 "level1": {
751 "level2": {
752 "level3": {
753 "target": "old"
754 }
755 }
756 }
757 }"#
758 .unindent(),
759 &["level1", "level2", "level3", "target"],
760 Some(json!("new")),
761 r#"{
762 "level1": {
763 "level2": {
764 "level3": {
765 "target": "new"
766 }
767 }
768 }
769 }"#
770 .unindent(),
771 );
772
773 check_object_replace(
774 r#"{
775 "parent": {}
776 }"#
777 .unindent(),
778 &["parent", "child"],
779 Some(json!("value")),
780 r#"{
781 "parent": {
782 "child": "value"
783 }
784 }"#
785 .unindent(),
786 );
787
788 check_object_replace(
789 r#"{
790 "a": 1,
791 "b": 2,
792 }"#
793 .unindent(),
794 &["b"],
795 Some(json!(3)),
796 r#"{
797 "a": 1,
798 "b": 3,
799 }"#
800 .unindent(),
801 );
802
803 check_object_replace(
804 r#"{
805 "items": [1, 2, 3],
806 "count": 3
807 }"#
808 .unindent(),
809 &["items", "1"],
810 Some(json!(5)),
811 r#"{
812 "items": {
813 "1": 5
814 },
815 "count": 3
816 }"#
817 .unindent(),
818 );
819
820 check_object_replace(
821 r#"{
822 "items": [1, 2, 3],
823 "count": 3
824 }"#
825 .unindent(),
826 &["items", "1"],
827 None,
828 r#"{
829 "items": {
830 "1": null
831 },
832 "count": 3
833 }"#
834 .unindent(),
835 );
836
837 check_object_replace(
838 r#"{
839 "items": [1, 2, 3],
840 "count": 3
841 }"#
842 .unindent(),
843 &["items"],
844 Some(json!(["a", "b", "c", "d"])),
845 r#"{
846 "items": [
847 "a",
848 "b",
849 "c",
850 "d"
851 ],
852 "count": 3
853 }"#
854 .unindent(),
855 );
856
857 check_object_replace(
858 r#"{
859 "0": "zero",
860 "1": "one"
861 }"#
862 .unindent(),
863 &["1"],
864 Some(json!("ONE")),
865 r#"{
866 "0": "zero",
867 "1": "ONE"
868 }"#
869 .unindent(),
870 );
871 // Test with comments between object members
872 check_object_replace(
873 r#"{
874 "a": 1,
875 // Comment between members
876 "b": 2,
877 /* Block comment */
878 "c": 3
879 }"#
880 .unindent(),
881 &["b"],
882 Some(json!({"nested": true})),
883 r#"{
884 "a": 1,
885 // Comment between members
886 "b": {
887 "nested": true
888 },
889 /* Block comment */
890 "c": 3
891 }"#
892 .unindent(),
893 );
894
895 // Test with trailing comments on replaced value
896 check_object_replace(
897 r#"{
898 "a": 1, // keep this comment
899 "b": 2 // this should stay
900 }"#
901 .unindent(),
902 &["a"],
903 Some(json!("changed")),
904 r#"{
905 "a": "changed", // keep this comment
906 "b": 2 // this should stay
907 }"#
908 .unindent(),
909 );
910
911 // Test with deep indentation
912 check_object_replace(
913 r#"{
914 "deeply": {
915 "nested": {
916 "value": "old"
917 }
918 }
919 }"#
920 .unindent(),
921 &["deeply", "nested", "value"],
922 Some(json!("new")),
923 r#"{
924 "deeply": {
925 "nested": {
926 "value": "new"
927 }
928 }
929 }"#
930 .unindent(),
931 );
932
933 // Test removing value with comment preservation
934 check_object_replace(
935 r#"{
936 // Header comment
937 "a": 1,
938 // This comment belongs to b
939 "b": 2,
940 // This comment belongs to c
941 "c": 3
942 }"#
943 .unindent(),
944 &["b"],
945 None,
946 r#"{
947 // Header comment
948 "a": 1,
949 // This comment belongs to b
950 // This comment belongs to c
951 "c": 3
952 }"#
953 .unindent(),
954 );
955
956 // Test with multiline block comments
957 check_object_replace(
958 r#"{
959 /*
960 * This is a multiline
961 * block comment
962 */
963 "value": "old",
964 /* Another block */ "other": 123
965 }"#
966 .unindent(),
967 &["value"],
968 Some(json!("new")),
969 r#"{
970 /*
971 * This is a multiline
972 * block comment
973 */
974 "value": "new",
975 /* Another block */ "other": 123
976 }"#
977 .unindent(),
978 );
979
980 check_object_replace(
981 r#"{
982 // This object is empty
983 }"#
984 .unindent(),
985 &["key"],
986 Some(json!("value")),
987 r#"{
988 // This object is empty
989 "key": "value"
990 }
991 "#
992 .unindent(),
993 );
994
995 // Test replacing in object with only comments
996 check_object_replace(
997 r#"{
998 // Comment 1
999 // Comment 2
1000 }"#
1001 .unindent(),
1002 &["new"],
1003 Some(json!(42)),
1004 r#"{
1005 // Comment 1
1006 // Comment 2
1007 "new": 42
1008 }
1009 "#
1010 .unindent(),
1011 );
1012
1013 // Test with inconsistent spacing
1014 check_object_replace(
1015 r#"{
1016 "a":1,
1017 "b" : 2 ,
1018 "c": 3
1019 }"#
1020 .unindent(),
1021 &["b"],
1022 Some(json!("spaced")),
1023 r#"{
1024 "a":1,
1025 "b" : "spaced" ,
1026 "c": 3
1027 }"#
1028 .unindent(),
1029 );
1030 }
1031
1032 #[test]
1033 fn array_replace() {
1034 #[track_caller]
1035 fn check_array_replace(
1036 input: impl ToString,
1037 index: usize,
1038 key_path: &[&str],
1039 value: Value,
1040 expected: impl ToString,
1041 ) {
1042 let input = input.to_string();
1043 let result = replace_top_level_array_value_in_json_text(
1044 &input,
1045 key_path,
1046 Some(&value),
1047 None,
1048 index,
1049 4,
1050 )
1051 .expect("replace succeeded");
1052 let mut result_str = input;
1053 result_str.replace_range(result.0, &result.1);
1054 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1055 }
1056
1057 check_array_replace(r#"[1, 3, 3]"#, 1, &[], json!(2), r#"[1, 2, 3]"#);
1058 check_array_replace(r#"[1, 3, 3]"#, 2, &[], json!(2), r#"[1, 3, 2]"#);
1059 check_array_replace(r#"[1, 3, 3,]"#, 3, &[], json!(2), r#"[1, 3, 3, 2]"#);
1060 check_array_replace(r#"[1, 3, 3,]"#, 100, &[], json!(2), r#"[1, 3, 3, 2]"#);
1061 check_array_replace(
1062 r#"[
1063 1,
1064 2,
1065 3,
1066 ]"#
1067 .unindent(),
1068 1,
1069 &[],
1070 json!({"foo": "bar", "baz": "qux"}),
1071 r#"[
1072 1,
1073 {
1074 "foo": "bar",
1075 "baz": "qux"
1076 },
1077 3,
1078 ]"#
1079 .unindent(),
1080 );
1081 check_array_replace(
1082 r#"[1, 3, 3,]"#,
1083 1,
1084 &[],
1085 json!({"foo": "bar", "baz": "qux"}),
1086 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1087 );
1088
1089 check_array_replace(
1090 r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1091 1,
1092 &["baz"],
1093 json!({"qux": "quz"}),
1094 r#"[1, { "foo": "bar", "baz": { "qux": "quz" } }, 3,]"#,
1095 );
1096
1097 check_array_replace(
1098 r#"[
1099 1,
1100 {
1101 "foo": "bar",
1102 "baz": "qux"
1103 },
1104 3
1105 ]"#,
1106 1,
1107 &["baz"],
1108 json!({"qux": "quz"}),
1109 r#"[
1110 1,
1111 {
1112 "foo": "bar",
1113 "baz": {
1114 "qux": "quz"
1115 }
1116 },
1117 3
1118 ]"#,
1119 );
1120
1121 check_array_replace(
1122 r#"[
1123 1,
1124 {
1125 "foo": "bar",
1126 "baz": {
1127 "qux": "quz"
1128 }
1129 },
1130 3
1131 ]"#,
1132 1,
1133 &["baz"],
1134 json!("qux"),
1135 r#"[
1136 1,
1137 {
1138 "foo": "bar",
1139 "baz": "qux"
1140 },
1141 3
1142 ]"#,
1143 );
1144
1145 check_array_replace(
1146 r#"[
1147 1,
1148 {
1149 "foo": "bar",
1150 // some comment to keep
1151 "baz": {
1152 // some comment to remove
1153 "qux": "quz"
1154 }
1155 // some other comment to keep
1156 },
1157 3
1158 ]"#,
1159 1,
1160 &["baz"],
1161 json!("qux"),
1162 r#"[
1163 1,
1164 {
1165 "foo": "bar",
1166 // some comment to keep
1167 "baz": "qux"
1168 // some other comment to keep
1169 },
1170 3
1171 ]"#,
1172 );
1173
1174 // Test with comments between array elements
1175 check_array_replace(
1176 r#"[
1177 1,
1178 // This is element 2
1179 2,
1180 /* Block comment */ 3,
1181 4 // Trailing comment
1182 ]"#,
1183 2,
1184 &[],
1185 json!("replaced"),
1186 r#"[
1187 1,
1188 // This is element 2
1189 2,
1190 /* Block comment */ "replaced",
1191 4 // Trailing comment
1192 ]"#,
1193 );
1194
1195 // Test empty array with comments
1196 check_array_replace(
1197 r#"[
1198 // Empty array with comment
1199 ]"#
1200 .unindent(),
1201 0,
1202 &[],
1203 json!("first"),
1204 r#"[
1205 // Empty array with comment
1206 "first"
1207 ]"#
1208 .unindent(),
1209 );
1210 check_array_replace(
1211 r#"[]"#.unindent(),
1212 0,
1213 &[],
1214 json!("first"),
1215 r#"[
1216 "first"
1217 ]"#
1218 .unindent(),
1219 );
1220
1221 // Test array with leading comments
1222 check_array_replace(
1223 r#"[
1224 // Leading comment
1225 // Another leading comment
1226 1,
1227 2
1228 ]"#,
1229 0,
1230 &[],
1231 json!({"new": "object"}),
1232 r#"[
1233 // Leading comment
1234 // Another leading comment
1235 {
1236 "new": "object"
1237 },
1238 2
1239 ]"#,
1240 );
1241
1242 // Test with deep indentation
1243 check_array_replace(
1244 r#"[
1245 1,
1246 2,
1247 3
1248 ]"#,
1249 1,
1250 &[],
1251 json!("deep"),
1252 r#"[
1253 1,
1254 "deep",
1255 3
1256 ]"#,
1257 );
1258
1259 // Test with mixed spacing
1260 check_array_replace(
1261 r#"[1,2, 3, 4]"#,
1262 2,
1263 &[],
1264 json!("spaced"),
1265 r#"[1,2, "spaced", 4]"#,
1266 );
1267
1268 // Test replacing nested array element
1269 check_array_replace(
1270 r#"[
1271 [1, 2, 3],
1272 [4, 5, 6],
1273 [7, 8, 9]
1274 ]"#,
1275 1,
1276 &[],
1277 json!(["a", "b", "c", "d"]),
1278 r#"[
1279 [1, 2, 3],
1280 [
1281 "a",
1282 "b",
1283 "c",
1284 "d"
1285 ],
1286 [7, 8, 9]
1287 ]"#,
1288 );
1289
1290 // Test with multiline block comments
1291 check_array_replace(
1292 r#"[
1293 /*
1294 * This is a
1295 * multiline comment
1296 */
1297 "first",
1298 "second"
1299 ]"#,
1300 0,
1301 &[],
1302 json!("updated"),
1303 r#"[
1304 /*
1305 * This is a
1306 * multiline comment
1307 */
1308 "updated",
1309 "second"
1310 ]"#,
1311 );
1312
1313 // Test replacing with null
1314 check_array_replace(
1315 r#"[true, false, true]"#,
1316 1,
1317 &[],
1318 json!(null),
1319 r#"[true, null, true]"#,
1320 );
1321
1322 // Test single element array
1323 check_array_replace(
1324 r#"[42]"#,
1325 0,
1326 &[],
1327 json!({"answer": 42}),
1328 r#"[{ "answer": 42 }]"#,
1329 );
1330
1331 // Test array with only comments
1332 check_array_replace(
1333 r#"[
1334 // Comment 1
1335 // Comment 2
1336 // Comment 3
1337 ]"#
1338 .unindent(),
1339 10,
1340 &[],
1341 json!(123),
1342 r#"[
1343 // Comment 1
1344 // Comment 2
1345 // Comment 3
1346 123
1347 ]"#
1348 .unindent(),
1349 );
1350 }
1351
1352 #[test]
1353 fn array_append() {
1354 #[track_caller]
1355 fn check_array_append(input: impl ToString, value: Value, expected: impl ToString) {
1356 let input = input.to_string();
1357 let result = append_top_level_array_value_in_json_text(&input, &value, 4)
1358 .expect("append succeeded");
1359 let mut result_str = input;
1360 result_str.replace_range(result.0, &result.1);
1361 pretty_assertions::assert_eq!(expected.to_string(), result_str);
1362 }
1363 check_array_append(r#"[1, 3, 3]"#, json!(4), r#"[1, 3, 3, 4]"#);
1364 check_array_append(r#"[1, 3, 3,]"#, json!(4), r#"[1, 3, 3, 4]"#);
1365 check_array_append(r#"[1, 3, 3 ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1366 check_array_append(r#"[1, 3, 3, ]"#, json!(4), r#"[1, 3, 3, 4]"#);
1367 check_array_append(
1368 r#"[
1369 1,
1370 2,
1371 3
1372 ]"#
1373 .unindent(),
1374 json!(4),
1375 r#"[
1376 1,
1377 2,
1378 3,
1379 4
1380 ]"#
1381 .unindent(),
1382 );
1383 check_array_append(
1384 r#"[
1385 1,
1386 2,
1387 3,
1388 ]"#
1389 .unindent(),
1390 json!(4),
1391 r#"[
1392 1,
1393 2,
1394 3,
1395 4
1396 ]"#
1397 .unindent(),
1398 );
1399 check_array_append(
1400 r#"[
1401 1,
1402 2,
1403 3,
1404 ]"#
1405 .unindent(),
1406 json!({"foo": "bar", "baz": "qux"}),
1407 r#"[
1408 1,
1409 2,
1410 3,
1411 {
1412 "foo": "bar",
1413 "baz": "qux"
1414 }
1415 ]"#
1416 .unindent(),
1417 );
1418 check_array_append(
1419 r#"[ 1, 2, 3, ]"#.unindent(),
1420 json!({"foo": "bar", "baz": "qux"}),
1421 r#"[ 1, 2, 3, { "foo": "bar", "baz": "qux" }]"#.unindent(),
1422 );
1423 check_array_append(
1424 r#"[]"#,
1425 json!({"foo": "bar"}),
1426 r#"[
1427 {
1428 "foo": "bar"
1429 }
1430 ]"#
1431 .unindent(),
1432 );
1433
1434 // Test with comments between array elements
1435 check_array_append(
1436 r#"[
1437 1,
1438 // Comment between elements
1439 2,
1440 /* Block comment */ 3
1441 ]"#
1442 .unindent(),
1443 json!(4),
1444 r#"[
1445 1,
1446 // Comment between elements
1447 2,
1448 /* Block comment */ 3,
1449 4
1450 ]"#
1451 .unindent(),
1452 );
1453
1454 // Test with trailing comment on last element
1455 check_array_append(
1456 r#"[
1457 1,
1458 2,
1459 3 // Trailing comment
1460 ]"#
1461 .unindent(),
1462 json!("new"),
1463 r#"[
1464 1,
1465 2,
1466 3 // Trailing comment
1467 ,
1468 "new"
1469 ]"#
1470 .unindent(),
1471 );
1472
1473 // Test empty array with comments
1474 check_array_append(
1475 r#"[
1476 // Empty array with comment
1477 ]"#
1478 .unindent(),
1479 json!("first"),
1480 r#"[
1481 // Empty array with comment
1482 "first"
1483 ]"#
1484 .unindent(),
1485 );
1486
1487 // Test with multiline block comment at end
1488 check_array_append(
1489 r#"[
1490 1,
1491 2
1492 /*
1493 * This is a
1494 * multiline comment
1495 */
1496 ]"#
1497 .unindent(),
1498 json!(3),
1499 r#"[
1500 1,
1501 2
1502 /*
1503 * This is a
1504 * multiline comment
1505 */
1506 ,
1507 3
1508 ]"#
1509 .unindent(),
1510 );
1511
1512 // Test with deep indentation
1513 check_array_append(
1514 r#"[
1515 1,
1516 2,
1517 3
1518 ]"#
1519 .unindent(),
1520 json!("deep"),
1521 r#"[
1522 1,
1523 2,
1524 3,
1525 "deep"
1526 ]"#
1527 .unindent(),
1528 );
1529
1530 // Test with no spacing
1531 check_array_append(r#"[1,2,3]"#, json!(4), r#"[1,2,3, 4]"#);
1532
1533 // Test appending complex nested structure
1534 check_array_append(
1535 r#"[
1536 {"a": 1},
1537 {"b": 2}
1538 ]"#
1539 .unindent(),
1540 json!({"c": {"nested": [1, 2, 3]}}),
1541 r#"[
1542 {"a": 1},
1543 {"b": 2},
1544 {
1545 "c": {
1546 "nested": [
1547 1,
1548 2,
1549 3
1550 ]
1551 }
1552 }
1553 ]"#
1554 .unindent(),
1555 );
1556
1557 // Test array ending with comment after bracket
1558 check_array_append(
1559 r#"[
1560 1,
1561 2,
1562 3
1563 ] // Comment after array"#
1564 .unindent(),
1565 json!(4),
1566 r#"[
1567 1,
1568 2,
1569 3,
1570 4
1571 ] // Comment after array"#
1572 .unindent(),
1573 );
1574
1575 // Test with inconsistent element formatting
1576 check_array_append(
1577 r#"[1,
1578 2,
1579 3,
1580 ]"#
1581 .unindent(),
1582 json!(4),
1583 r#"[1,
1584 2,
1585 3,
1586 4
1587 ]"#
1588 .unindent(),
1589 );
1590
1591 // Test appending to single-line array with trailing comma
1592 check_array_append(
1593 r#"[1, 2, 3,]"#,
1594 json!({"key": "value"}),
1595 r#"[1, 2, 3, { "key": "value" }]"#,
1596 );
1597
1598 // Test appending null value
1599 check_array_append(r#"[true, false]"#, json!(null), r#"[true, false, null]"#);
1600
1601 // Test appending to array with only comments
1602 check_array_append(
1603 r#"[
1604 // Just comments here
1605 // More comments
1606 ]"#
1607 .unindent(),
1608 json!(42),
1609 r#"[
1610 // Just comments here
1611 // More comments
1612 42
1613 ]"#
1614 .unindent(),
1615 );
1616 }
1617}