Skip to content

Commit bcd2401

Browse files
committed
fix(table): flatten nested objects to dot-notation, safe multi-byte truncation (fixes npm#40 npm#43)
1 parent 4b868c7 commit bcd2401

2 files changed

Lines changed: 129 additions & 27 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"gws": patch
3+
---
4+
5+
fix: flatten nested objects in table output and fix multi-byte char truncation panic

src/formatter.rs

Lines changed: 124 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,30 @@ fn format_table(value: &Value) -> String {
9595
format_table_page(value, true)
9696
}
9797

98+
/// Recursively flatten a JSON object into `(dot.notation.key, string_value)` pairs.
99+
///
100+
/// Nested objects become `parent.child` key names so that `--format table` can
101+
/// render them as individual columns instead of raw JSON blobs.
102+
fn flatten_object(obj: &serde_json::Map<String, Value>, prefix: &str) -> Vec<(String, String)> {
103+
let mut out = Vec::new();
104+
for (key, val) in obj {
105+
let full_key = if prefix.is_empty() {
106+
key.clone()
107+
} else {
108+
format!("{prefix}.{key}")
109+
};
110+
match val {
111+
Value::Object(nested) => {
112+
out.extend(flatten_object(nested, &full_key));
113+
}
114+
_ => {
115+
out.push((full_key, value_to_cell(val)));
116+
}
117+
}
118+
}
119+
out
120+
}
121+
98122
/// Format as a text table, optionally omitting the header row.
99123
///
100124
/// Pass `emit_header = false` for continuation pages when using `--page-all`
@@ -109,11 +133,11 @@ fn format_table_page(value: &Value, emit_header: bool) -> String {
109133
} else if let Value::Array(arr) = value {
110134
format_array_as_table(arr, emit_header)
111135
} else if let Value::Object(obj) = value {
112-
// Single object: key/value table
136+
// Single object: key/value table — flatten nested objects first
113137
let mut output = String::new();
114-
let max_key_len = obj.keys().map(|k| k.len()).max().unwrap_or(0);
115-
for (key, val) in obj {
116-
let val_str = value_to_cell(val);
138+
let flat = flatten_object(obj, "");
139+
let max_key_len = flat.iter().map(|(k, _)| k.len()).max().unwrap_or(0);
140+
for (key, val_str) in &flat {
117141
let _ = writeln!(output, "{:width$} {}", key, val_str, width = max_key_len);
118142
}
119143
output
@@ -127,14 +151,21 @@ fn format_array_as_table(arr: &[Value], emit_header: bool) -> String {
127151
return "(empty)\n".to_string();
128152
}
129153

130-
// Collect all unique keys across all objects
154+
// Flatten each row so nested objects become dot-notation columns.
155+
let flat_rows: Vec<Vec<(String, String)>> = arr
156+
.iter()
157+
.map(|item| match item {
158+
Value::Object(obj) => flatten_object(obj, ""),
159+
_ => vec![(String::new(), value_to_cell(item))],
160+
})
161+
.collect();
162+
163+
// Collect all unique column names (preserving insertion order).
131164
let mut columns: Vec<String> = Vec::new();
132-
for item in arr {
133-
if let Value::Object(obj) = item {
134-
for key in obj.keys() {
135-
if !columns.contains(key) {
136-
columns.push(key.clone());
137-
}
165+
for row in &flat_rows {
166+
for (key, _) in row {
167+
if !columns.contains(key) {
168+
columns.push(key.clone());
138169
}
139170
}
140171
}
@@ -148,24 +179,32 @@ fn format_array_as_table(arr: &[Value], emit_header: bool) -> String {
148179
return output;
149180
}
150181

151-
// Calculate column widths
152-
let mut widths: Vec<usize> = columns.iter().map(|c| c.len()).collect();
153-
let rows: Vec<Vec<String>> = arr
182+
// Build lookup: row_index -> column_name -> cell_value
183+
let row_maps: Vec<std::collections::HashMap<&str, &str>> = flat_rows
184+
.iter()
185+
.map(|pairs| {
186+
pairs
187+
.iter()
188+
.map(|(k, v)| (k.as_str(), v.as_str()))
189+
.collect()
190+
})
191+
.collect();
192+
193+
// Calculate column widths (char-count, not byte-count).
194+
let mut widths: Vec<usize> = columns.iter().map(|c| c.chars().count()).collect();
195+
let rows: Vec<Vec<String>> = row_maps
154196
.iter()
155-
.map(|item| {
197+
.map(|row| {
156198
columns
157199
.iter()
158200
.enumerate()
159201
.map(|(i, col)| {
160-
let cell = if let Value::Object(obj) = item {
161-
obj.get(col).map(value_to_cell).unwrap_or_default()
162-
} else {
163-
String::new()
164-
};
165-
if cell.len() > widths[i] {
166-
widths[i] = cell.len();
202+
let cell = row.get(col.as_str()).copied().unwrap_or("").to_string();
203+
let char_len = cell.chars().count();
204+
if char_len > widths[i] {
205+
widths[i] = char_len;
167206
}
168-
// Cap column width at 60
207+
// Cap column width at 60 chars
169208
if widths[i] > 60 {
170209
widths[i] = 60;
171210
}
@@ -191,18 +230,23 @@ fn format_array_as_table(arr: &[Value], emit_header: bool) -> String {
191230
let _ = writeln!(output, "{}", sep.join(" "));
192231
}
193232

194-
// Rows
233+
// Rows — truncate by char count to avoid panicking on multi-byte UTF-8.
195234
for row in &rows {
196235
let cells: Vec<String> = row
197236
.iter()
198237
.enumerate()
199238
.map(|(i, c)| {
200-
let truncated = if c.len() > widths[i] {
201-
format!("{}…", &c[..widths[i] - 1])
239+
let char_len = c.chars().count();
240+
let truncated = if char_len > widths[i] {
241+
// Safe char-boundary slice: take widths[i]-1 chars, then append ellipsis.
242+
let truncated_str: String = c.chars().take(widths[i] - 1).collect();
243+
format!("{truncated_str}…")
202244
} else {
203245
c.clone()
204246
};
205-
format!("{:width$}", truncated, width = widths[i])
247+
// Pad to column width (by char count)
248+
let pad = widths[i].saturating_sub(truncated.chars().count());
249+
format!("{truncated}{}", " ".repeat(pad))
206250
})
207251
.collect();
208252
let _ = writeln!(output, "{}", cells.join(" "));
@@ -404,6 +448,59 @@ mod tests {
404448
assert!(output.contains("abc"));
405449
}
406450

451+
#[test]
452+
fn test_format_table_nested_object_flattened() {
453+
// Nested objects should become dot-notation columns, not raw JSON blobs.
454+
let val = json!({
455+
"user": {
456+
"displayName": "Alice",
457+
"emailAddress": "alice@example.com"
458+
},
459+
"storageQuota": {
460+
"limit": "1000",
461+
"usage": "500"
462+
}
463+
});
464+
let output = format_value(&val, &OutputFormat::Table);
465+
// Should contain dot-notation keys
466+
assert!(output.contains("user.displayName"), "expected flattened key in output:\n{output}");
467+
assert!(output.contains("user.emailAddress"), "expected flattened key in output:\n{output}");
468+
assert!(output.contains("Alice"), "expected value in output:\n{output}");
469+
// Should NOT contain raw JSON blobs
470+
assert!(!output.contains("{\"displayName"), "should not have raw JSON blob:\n{output}");
471+
}
472+
473+
#[test]
474+
fn test_format_table_nested_objects_in_array() {
475+
let val = json!([
476+
{"id": "1", "owner": {"name": "Alice"}},
477+
{"id": "2", "owner": {"name": "Bob"}}
478+
]);
479+
let output = format_value(&val, &OutputFormat::Table);
480+
assert!(output.contains("owner.name"), "expected flattened column:\n{output}");
481+
assert!(output.contains("Alice"), "expected value:\n{output}");
482+
assert!(output.contains("Bob"), "expected value:\n{output}");
483+
}
484+
485+
#[test]
486+
fn test_format_table_multibyte_truncation_does_not_panic() {
487+
// Column width cap is 60 chars, so a long string with multi-byte chars
488+
// must be safely truncated without a byte-boundary panic.
489+
let long_emoji = "😀".repeat(70); // each emoji is 4 bytes
490+
let val = json!([{"col": long_emoji}]);
491+
// Should not panic
492+
let output = format_value(&val, &OutputFormat::Table);
493+
assert!(output.contains("col"), "column name must appear:\n{output}");
494+
}
495+
496+
#[test]
497+
fn test_format_table_multibyte_exact_boundary() {
498+
// Multi-byte chars at various positions must not panic or produce garbled output.
499+
let val = json!([{"name": "café résumé naïve"}]);
500+
let output = format_value(&val, &OutputFormat::Table);
501+
assert!(output.contains("name"), "column must appear:\n{output}");
502+
}
503+
407504
#[test]
408505
fn test_format_csv() {
409506
let val = json!({

0 commit comments

Comments
 (0)