fix(path-expanding): overwrite with new value

This commit is contained in:
Johann Schopplich
2025-11-10 10:51:12 +01:00
parent eefb0242e2
commit 89b227302a
10 changed files with 200 additions and 95 deletions

View File

@@ -38,6 +38,6 @@
"test": "vitest" "test": "vitest"
}, },
"devDependencies": { "devDependencies": {
"@toon-format/spec": "^1.4.0" "@toon-format/spec": "^1.5.2"
} }
} }

View File

@@ -1,7 +1,9 @@
import type { ArrayHeaderInfo, Depth, JsonArray, JsonObject, JsonPrimitive, JsonValue, ParsedLine, ResolvedDecodeOptions } from '../types' import type { ArrayHeaderInfo, Depth, JsonArray, JsonObject, JsonPrimitive, JsonValue, ParsedLine, ResolvedDecodeOptions } from '../types'
import type { ObjectWithQuotedKeys } from './expand'
import type { LineCursor } from './scanner' import type { LineCursor } from './scanner'
import { COLON, DEFAULT_DELIMITER, LIST_ITEM_PREFIX } from '../constants' import { COLON, DEFAULT_DELIMITER, DOT, LIST_ITEM_PREFIX } from '../constants'
import { findClosingQuote } from '../shared/string-utils' import { findClosingQuote } from '../shared/string-utils'
import { QUOTED_KEY_MARKER } from './expand'
import { isArrayHeaderAfterHyphen, isObjectFirstFieldAfterHyphen, mapRowValuesToPrimitives, parseArrayHeaderLine, parseDelimitedValues, parseKeyToken, parsePrimitiveToken } from './parser' import { isArrayHeaderAfterHyphen, isObjectFirstFieldAfterHyphen, mapRowValuesToPrimitives, parseArrayHeaderLine, parseDelimitedValues, parseKeyToken, parsePrimitiveToken } from './parser'
import { assertExpectedCount, validateNoBlankLinesInRange, validateNoExtraListItems, validateNoExtraTabularRows } from './validation' import { assertExpectedCount, validateNoBlankLinesInRange, validateNoExtraListItems, validateNoExtraTabularRows } from './validation'
@@ -55,6 +57,7 @@ function isKeyValueLine(line: ParsedLine): boolean {
function decodeObject(cursor: LineCursor, baseDepth: Depth, options: ResolvedDecodeOptions): JsonObject { function decodeObject(cursor: LineCursor, baseDepth: Depth, options: ResolvedDecodeOptions): JsonObject {
const obj: JsonObject = {} const obj: JsonObject = {}
const quotedKeys: Set<string> = new Set()
// Detect the actual depth of the first field (may differ from baseDepth in nested structures) // Detect the actual depth of the first field (may differ from baseDepth in nested structures)
let computedDepth: Depth | undefined let computedDepth: Depth | undefined
@@ -70,8 +73,13 @@ function decodeObject(cursor: LineCursor, baseDepth: Depth, options: ResolvedDec
} }
if (line.depth === computedDepth) { if (line.depth === computedDepth) {
const [key, value] = decodeKeyValuePair(line, cursor, computedDepth, options) const [key, value, isQuoted] = decodeKeyValuePair(line, cursor, computedDepth, options)
obj[key] = value obj[key] = value
// Track quoted dotted keys for expansion phase
if (isQuoted && key.includes(DOT)) {
quotedKeys.add(key)
}
} }
else { else {
// Different depth (shallower or deeper) - stop object parsing // Different depth (shallower or deeper) - stop object parsing
@@ -79,6 +87,11 @@ function decodeObject(cursor: LineCursor, baseDepth: Depth, options: ResolvedDec
} }
} }
// Attach quoted key metadata if any were found
if (quotedKeys.size > 0) {
(obj as ObjectWithQuotedKeys)[QUOTED_KEY_MARKER] = quotedKeys
}
return obj return obj
} }
@@ -87,21 +100,22 @@ function decodeKeyValue(
cursor: LineCursor, cursor: LineCursor,
baseDepth: Depth, baseDepth: Depth,
options: ResolvedDecodeOptions, options: ResolvedDecodeOptions,
): { key: string, value: JsonValue, followDepth: Depth } { ): { key: string, value: JsonValue, followDepth: Depth, isQuoted: boolean } {
// Check for array header first (before parsing key) // Check for array header first (before parsing key)
const arrayHeader = parseArrayHeaderLine(content, DEFAULT_DELIMITER) const arrayHeader = parseArrayHeaderLine(content, DEFAULT_DELIMITER)
if (arrayHeader && arrayHeader.header.key) { if (arrayHeader && arrayHeader.header.key) {
const value = decodeArrayFromHeader(arrayHeader.header, arrayHeader.inlineValues, cursor, baseDepth, options) const decodedValue = decodeArrayFromHeader(arrayHeader.header, arrayHeader.inlineValues, cursor, baseDepth, options)
// After an array, subsequent fields are at baseDepth + 1 (where array content is) // After an array, subsequent fields are at baseDepth + 1 (where array content is)
return { return {
key: arrayHeader.header.key, key: arrayHeader.header.key,
value, value: decodedValue,
followDepth: baseDepth + 1, followDepth: baseDepth + 1,
isQuoted: false, // Array keys parsed separately in `parseArrayHeaderLine`
} }
} }
// Regular key-value pair // Regular key-value pair
const { key, end } = parseKeyToken(content, 0) const { key, end, isQuoted } = parseKeyToken(content, 0)
const rest = content.slice(end).trim() const rest = content.slice(end).trim()
// No value after colon - expect nested object or empty // No value after colon - expect nested object or empty
@@ -109,15 +123,15 @@ function decodeKeyValue(
const nextLine = cursor.peek() const nextLine = cursor.peek()
if (nextLine && nextLine.depth > baseDepth) { if (nextLine && nextLine.depth > baseDepth) {
const nested = decodeObject(cursor, baseDepth + 1, options) const nested = decodeObject(cursor, baseDepth + 1, options)
return { key, value: nested, followDepth: baseDepth + 1 } return { key, value: nested, followDepth: baseDepth + 1, isQuoted }
} }
// Empty object // Empty object
return { key, value: {}, followDepth: baseDepth + 1 } return { key, value: {}, followDepth: baseDepth + 1, isQuoted }
} }
// Inline primitive value // Inline primitive value
const value = parsePrimitiveToken(rest) const decodedValue = parsePrimitiveToken(rest)
return { key, value, followDepth: baseDepth + 1 } return { key, value: decodedValue, followDepth: baseDepth + 1, isQuoted }
} }
function decodeKeyValuePair( function decodeKeyValuePair(
@@ -125,10 +139,10 @@ function decodeKeyValuePair(
cursor: LineCursor, cursor: LineCursor,
baseDepth: Depth, baseDepth: Depth,
options: ResolvedDecodeOptions, options: ResolvedDecodeOptions,
): [key: string, value: JsonValue] { ): [key: string, value: JsonValue, isQuoted: boolean] {
cursor.advance() cursor.advance()
const { key, value } = decodeKeyValue(line.content, cursor, baseDepth, options) const { key, value, isQuoted } = decodeKeyValue(line.content, cursor, baseDepth, options)
return [key, value] return [key, value, isQuoted]
} }
// #endregion // #endregion
@@ -364,9 +378,15 @@ function decodeObjectFromListItem(
options: ResolvedDecodeOptions, options: ResolvedDecodeOptions,
): JsonObject { ): JsonObject {
const afterHyphen = firstLine.content.slice(LIST_ITEM_PREFIX.length) const afterHyphen = firstLine.content.slice(LIST_ITEM_PREFIX.length)
const { key, value, followDepth } = decodeKeyValue(afterHyphen, cursor, baseDepth, options) const { key, value, followDepth, isQuoted } = decodeKeyValue(afterHyphen, cursor, baseDepth, options)
const obj: JsonObject = { [key]: value } const obj: JsonObject = { [key]: value }
const quotedKeys: Set<string> = new Set()
// Track if first key was quoted and dotted
if (isQuoted && key.includes(DOT)) {
quotedKeys.add(key)
}
// Read subsequent fields // Read subsequent fields
while (!cursor.atEnd()) { while (!cursor.atEnd()) {
@@ -376,14 +396,24 @@ function decodeObjectFromListItem(
} }
if (line.depth === followDepth && !line.content.startsWith(LIST_ITEM_PREFIX)) { if (line.depth === followDepth && !line.content.startsWith(LIST_ITEM_PREFIX)) {
const [k, v] = decodeKeyValuePair(line, cursor, followDepth, options) const [k, v, kIsQuoted] = decodeKeyValuePair(line, cursor, followDepth, options)
obj[k] = v obj[k] = v
// Track quoted dotted keys
if (kIsQuoted && k.includes(DOT)) {
quotedKeys.add(k)
}
} }
else { else {
break break
} }
} }
// Attach quoted key metadata if any were found
if (quotedKeys.size > 0) {
(obj as ObjectWithQuotedKeys)[QUOTED_KEY_MARKER] = quotedKeys
}
return obj return obj
} }

View File

@@ -5,6 +5,19 @@ import { isIdentifierSegment } from '../shared/validation'
// #region Path expansion (safe) // #region Path expansion (safe)
/**
* Symbol used to mark object keys that were originally quoted in the TOON source.
* Quoted dotted keys should not be expanded, even if they meet expansion criteria.
*/
export const QUOTED_KEY_MARKER: unique symbol = Symbol('quotedKey')
/**
* Type for objects that may have quoted key metadata attached.
*/
export interface ObjectWithQuotedKeys extends JsonObject {
[QUOTED_KEY_MARKER]?: Set<string>
}
/** /**
* Checks if two values can be merged (both are plain objects). * Checks if two values can be merged (both are plain objects).
*/ */
@@ -41,30 +54,59 @@ export function expandPathsSafe(value: JsonValue, strict: boolean): JsonValue {
} }
if (isJsonObject(value)) { if (isJsonObject(value)) {
const result: JsonObject = {} const expandedObject: JsonObject = {}
const keys = Object.keys(value) const keys = Object.keys(value)
for (const key of keys) { // Check if this object has quoted key metadata
const val = value[key]! const quotedKeys = (value as ObjectWithQuotedKeys)[QUOTED_KEY_MARKER]
// Check if key contains dots for (const key of keys) {
if (key.includes(DOT)) { const keyValue = value[key]!
// Skip expansion for keys that were originally quoted
const isQuoted = quotedKeys?.has(key)
// Check if key contains dots and should be expanded
if (key.includes(DOT) && !isQuoted) {
const segments = key.split(DOT) const segments = key.split(DOT)
// Validate all segments are identifiers // Validate all segments are identifiers
if (segments.every(seg => isIdentifierSegment(seg))) { if (segments.every(seg => isIdentifierSegment(seg))) {
// Expand this dotted key // Expand this dotted key
const expandedValue = expandPathsSafe(val, strict) const expandedValue = expandPathsSafe(keyValue, strict)
insertPathSafe(result, segments, expandedValue, strict) insertPathSafe(expandedObject, segments, expandedValue, strict)
continue continue
} }
} }
// Not expandable - keep as literal key, but still recursively expand the value // Not expandable - keep as literal key, but still recursively expand the value
result[key] = expandPathsSafe(val, strict) const expandedValue = expandPathsSafe(keyValue, strict)
// Check for conflicts with already-expanded keys
if (key in expandedObject) {
const conflictingValue = expandedObject[key]!
// If both are objects, try to merge them
if (canMerge(conflictingValue, expandedValue)) {
mergeObjects(conflictingValue as JsonObject, expandedValue as JsonObject, strict)
}
else {
// Conflict: incompatible types
if (strict) {
throw new TypeError(
`Path expansion conflict at key "${key}": cannot merge ${typeof conflictingValue} with ${typeof expandedValue}`,
)
}
// Non-strict: overwrite (LWW)
expandedObject[key] = expandedValue
}
}
else {
// No conflict - insert directly
expandedObject[key] = expandedValue
}
} }
return result return expandedObject
} }
// Primitive value - return as-is // Primitive value - return as-is
@@ -80,7 +122,7 @@ export function expandPathsSafe(value: JsonValue, strict: boolean): JsonValue {
* - If both are objects: deep merge (continue insertion) * - If both are objects: deep merge (continue insertion)
* - If values differ: conflict * - If values differ: conflict
* - strict=true: throw TypeError * - strict=true: throw TypeError
* - strict=false: overwrite with new value (last-wins) * - strict=false: overwrite with new value (LWW)
* *
* @param target - The object to insert into * @param target - The object to insert into
* @param segments - Array of path segments (e.g., ['data', 'metadata', 'items']) * @param segments - Array of path segments (e.g., ['data', 'metadata', 'items'])
@@ -94,58 +136,58 @@ function insertPathSafe(
value: JsonValue, value: JsonValue,
strict: boolean, strict: boolean,
): void { ): void {
let current: JsonObject = target let currentNode: JsonObject = target
// Walk to the penultimate segment, creating objects as needed // Walk to the penultimate segment, creating objects as needed
for (let i = 0; i < segments.length - 1; i++) { for (let i = 0; i < segments.length - 1; i++) {
const seg = segments[i]! const seg = segments[i]!
const existing = current[seg] const segmentValue = currentNode[seg]
if (existing === undefined) { if (segmentValue === undefined) {
// Create new intermediate object // Create new intermediate object
const newObj: JsonObject = {} const newObj: JsonObject = {}
current[seg] = newObj currentNode[seg] = newObj
current = newObj currentNode = newObj
} }
else if (isJsonObject(existing)) { else if (isJsonObject(segmentValue)) {
// Continue into existing object // Continue into existing object
current = existing currentNode = segmentValue
} }
else { else {
// Conflict: existing value is not an object // Conflict: existing value is not an object
if (strict) { if (strict) {
throw new TypeError( throw new TypeError(
`Path expansion conflict at segment "${seg}": expected object but found ${typeof existing}`, `Path expansion conflict at segment "${seg}": expected object but found ${typeof segmentValue}`,
) )
} }
// Non-strict: overwrite with new object // Non-strict: overwrite with new object
const newObj: JsonObject = {} const newObj: JsonObject = {}
current[seg] = newObj currentNode[seg] = newObj
current = newObj currentNode = newObj
} }
} }
// Insert at the final segment // Insert at the final segment
const lastSeg = segments[segments.length - 1]! const lastSeg = segments[segments.length - 1]!
const existing = current[lastSeg] const destinationValue = currentNode[lastSeg]
if (existing === undefined) { if (destinationValue === undefined) {
// No conflict - insert directly // No conflict - insert directly
current[lastSeg] = value currentNode[lastSeg] = value
} }
else if (canMerge(existing, value)) { else if (canMerge(destinationValue, value)) {
// Both are objects - deep merge // Both are objects - deep merge
mergeObjects(existing as JsonObject, value as JsonObject, strict) mergeObjects(destinationValue as JsonObject, value as JsonObject, strict)
} }
else { else {
// Conflict: incompatible types // Conflict: incompatible types
if (strict) { if (strict) {
throw new TypeError( throw new TypeError(
`Path expansion conflict at key "${lastSeg}": cannot merge ${typeof existing} with ${typeof value}`, `Path expansion conflict at key "${lastSeg}": cannot merge ${typeof destinationValue} with ${typeof value}`,
) )
} }
// Non-strict: overwrite (LWW) // Non-strict: overwrite (LWW)
current[lastSeg] = value currentNode[lastSeg] = value
} }
} }

View File

@@ -146,7 +146,7 @@ export function parseBracketSegment(
export function parseDelimitedValues(input: string, delimiter: Delimiter): string[] { export function parseDelimitedValues(input: string, delimiter: Delimiter): string[] {
const values: string[] = [] const values: string[] = []
let current = '' let valueBuffer = ''
let inQuotes = false let inQuotes = false
let i = 0 let i = 0
@@ -155,32 +155,32 @@ export function parseDelimitedValues(input: string, delimiter: Delimiter): strin
if (char === BACKSLASH && i + 1 < input.length && inQuotes) { if (char === BACKSLASH && i + 1 < input.length && inQuotes) {
// Escape sequence in quoted string // Escape sequence in quoted string
current += char + input[i + 1] valueBuffer += char + input[i + 1]
i += 2 i += 2
continue continue
} }
if (char === DOUBLE_QUOTE) { if (char === DOUBLE_QUOTE) {
inQuotes = !inQuotes inQuotes = !inQuotes
current += char valueBuffer += char
i++ i++
continue continue
} }
if (char === delimiter && !inQuotes) { if (char === delimiter && !inQuotes) {
values.push(current.trim()) values.push(valueBuffer.trim())
current = '' valueBuffer = ''
i++ i++
continue continue
} }
current += char valueBuffer += char
i++ i++
} }
// Add last value // Add last value
if (current || values.length > 0) { if (valueBuffer || values.length > 0) {
values.push(current.trim()) values.push(valueBuffer.trim())
} }
return values return values
@@ -292,12 +292,12 @@ export function parseQuotedKey(content: string, start: number): { key: string, e
return { key, end } return { key, end }
} }
export function parseKeyToken(content: string, start: number): { key: string, end: number } { export function parseKeyToken(content: string, start: number): { key: string, end: number, isQuoted: boolean } {
if (content[start] === DOUBLE_QUOTE) { if (content[start] === DOUBLE_QUOTE) {
return parseQuotedKey(content, start) return { ...parseQuotedKey(content, start), isQuoted: true }
} }
else { else {
return parseUnquotedKey(content, start) return { ...parseUnquotedKey(content, start), isQuoted: false }
} }
} }

View File

@@ -1,5 +1,5 @@
import type { Depth, JsonArray, JsonObject, JsonPrimitive, JsonValue, ResolvedEncodeOptions } from '../types' import type { Depth, JsonArray, JsonObject, JsonPrimitive, JsonValue, ResolvedEncodeOptions } from '../types'
import { LIST_ITEM_MARKER } from '../constants' import { DOT, LIST_ITEM_MARKER } from '../constants'
import { tryFoldKeyChain } from './folding' import { tryFoldKeyChain } from './folding'
import { isArrayOfArrays, isArrayOfObjects, isArrayOfPrimitives, isJsonArray, isJsonObject, isJsonPrimitive } from './normalize' import { isArrayOfArrays, isArrayOfObjects, isArrayOfPrimitives, isJsonArray, isJsonObject, isJsonPrimitive } from './normalize'
import { encodeAndJoinPrimitives, encodeKey, encodePrimitive, formatHeader } from './primitives' import { encodeAndJoinPrimitives, encodeKey, encodePrimitive, formatHeader } from './primitives'
@@ -28,21 +28,31 @@ export function encodeValue(value: JsonValue, options: ResolvedEncodeOptions): s
// #region Object encoding // #region Object encoding
export function encodeObject(value: JsonObject, writer: LineWriter, depth: Depth, options: ResolvedEncodeOptions): void { export function encodeObject(value: JsonObject, writer: LineWriter, depth: Depth, options: ResolvedEncodeOptions, rootLiteralKeys?: Set<string>, pathPrefix?: string, remainingDepth?: number): void {
const keys = Object.keys(value) const keys = Object.keys(value)
// At root level (depth 0), collect all literal dotted keys for collision checking
if (depth === 0 && !rootLiteralKeys) {
rootLiteralKeys = new Set(keys.filter(k => k.includes('.')))
}
const effectiveFlattenDepth = remainingDepth ?? options.flattenDepth
for (const key of keys) { for (const key of keys) {
encodeKeyValuePair(key, value[key]!, writer, depth, options, keys) encodeKeyValuePair(key, value[key]!, writer, depth, options, keys, rootLiteralKeys, pathPrefix, effectiveFlattenDepth)
} }
} }
export function encodeKeyValuePair(key: string, value: JsonValue, writer: LineWriter, depth: Depth, options: ResolvedEncodeOptions, siblings?: readonly string[]): void { export function encodeKeyValuePair(key: string, value: JsonValue, writer: LineWriter, depth: Depth, options: ResolvedEncodeOptions, siblings?: readonly string[], rootLiteralKeys?: Set<string>, pathPrefix?: string, flattenDepth?: number): void {
const currentPath = pathPrefix ? `${pathPrefix}${DOT}${key}` : key
const effectiveFlattenDepth = flattenDepth ?? options.flattenDepth
// Attempt key folding when enabled // Attempt key folding when enabled
if (options.keyFolding === 'safe' && siblings) { if (options.keyFolding === 'safe' && siblings) {
const foldResult = tryFoldKeyChain(key, value, siblings, options) const foldResult = tryFoldKeyChain(key, value, siblings, options, rootLiteralKeys, pathPrefix, effectiveFlattenDepth)
if (foldResult) { if (foldResult) {
const { foldedKey, remainder, leafValue } = foldResult const { foldedKey, remainder, leafValue, segmentCount } = foldResult
const encodedFoldedKey = encodeKey(foldedKey) const encodedFoldedKey = encodeKey(foldedKey)
// Case 1: Fully folded to a leaf value // Case 1: Fully folded to a leaf value
@@ -65,7 +75,10 @@ export function encodeKeyValuePair(key: string, value: JsonValue, writer: LineWr
// Case 2: Partially folded with a tail object // Case 2: Partially folded with a tail object
if (isJsonObject(remainder)) { if (isJsonObject(remainder)) {
writer.push(depth, `${encodedFoldedKey}:`) writer.push(depth, `${encodedFoldedKey}:`)
encodeObject(remainder, writer, depth + 1, options) // Calculate remaining depth budget (subtract segments already folded)
const remainingDepth = effectiveFlattenDepth - segmentCount
const foldedPath = pathPrefix ? `${pathPrefix}${DOT}${foldedKey}` : foldedKey
encodeObject(remainder, writer, depth + 1, options, rootLiteralKeys, foldedPath, remainingDepth)
return return
} }
} }
@@ -88,7 +101,7 @@ export function encodeKeyValuePair(key: string, value: JsonValue, writer: LineWr
} }
else { else {
writer.push(depth, `${encodedKey}:`) writer.push(depth, `${encodedKey}:`)
encodeObject(value, writer, depth + 1, options) encodeObject(value, writer, depth + 1, options, rootLiteralKeys, currentPath, effectiveFlattenDepth)
} }
} }
} }

View File

@@ -24,6 +24,11 @@ export interface FoldResult {
* Used to avoid redundant traversal when encoding the folded value. * Used to avoid redundant traversal when encoding the folded value.
*/ */
leafValue: JsonValue leafValue: JsonValue
/**
* The number of segments that were folded.
* Used to calculate remaining depth budget for nested encoding.
*/
segmentCount: number
} }
/** /**
@@ -55,6 +60,9 @@ export function tryFoldKeyChain(
value: JsonValue, value: JsonValue,
siblings: readonly string[], siblings: readonly string[],
options: ResolvedEncodeOptions, options: ResolvedEncodeOptions,
rootLiteralKeys?: Set<string>,
pathPrefix?: string,
flattenDepth?: number,
): FoldResult | undefined { ): FoldResult | undefined {
// Only fold when safe mode is enabled // Only fold when safe mode is enabled
if (options.keyFolding !== 'safe') { if (options.keyFolding !== 'safe') {
@@ -66,8 +74,11 @@ export function tryFoldKeyChain(
return undefined return undefined
} }
// Use provided flattenDepth or fall back to options default
const effectiveFlattenDepth = flattenDepth ?? options.flattenDepth
// Collect the chain of single-key objects // Collect the chain of single-key objects
const { segments, tail, leafValue } = collectSingleKeyChain(key, value, options.flattenDepth) const { segments, tail, leafValue } = collectSingleKeyChain(key, value, effectiveFlattenDepth)
// Need at least 2 segments for folding to be worthwhile // Need at least 2 segments for folding to be worthwhile
if (segments.length < 2) { if (segments.length < 2) {
@@ -79,18 +90,27 @@ export function tryFoldKeyChain(
return undefined return undefined
} }
// Build the folded key // Build the folded key (relative to current nesting level)
const foldedKey = buildFoldedKey(segments) const foldedKey = buildFoldedKey(segments)
// Check for collision with existing literal sibling keys (inline check) // Build the absolute path from root
const absolutePath = pathPrefix ? `${pathPrefix}${DOT}${foldedKey}` : foldedKey
// Check for collision with existing literal sibling keys (at current level)
if (siblings.includes(foldedKey)) { if (siblings.includes(foldedKey)) {
return undefined return undefined
} }
// Check for collision with root-level literal dotted keys
if (rootLiteralKeys && rootLiteralKeys.has(absolutePath)) {
return undefined
}
return { return {
foldedKey, foldedKey,
remainder: tail, remainder: tail,
leafValue, leafValue,
segmentCount: segments.length,
} }
} }
@@ -116,15 +136,15 @@ function collectSingleKeyChain(
maxDepth: number, maxDepth: number,
): { segments: string[], tail: JsonValue | undefined, leafValue: JsonValue } { ): { segments: string[], tail: JsonValue | undefined, leafValue: JsonValue } {
const segments: string[] = [startKey] const segments: string[] = [startKey]
let current = startValue let currentValue = startValue
while (segments.length < maxDepth) { while (segments.length < maxDepth) {
// Must be an object to continue // Must be an object to continue
if (!isJsonObject(current)) { if (!isJsonObject(currentValue)) {
break break
} }
const keys = Object.keys(current) const keys = Object.keys(currentValue)
// Must have exactly one key to continue the chain // Must have exactly one key to continue the chain
if (keys.length !== 1) { if (keys.length !== 1) {
@@ -132,32 +152,32 @@ function collectSingleKeyChain(
} }
const nextKey = keys[0]! const nextKey = keys[0]!
const nextValue = current[nextKey]! const nextValue = currentValue[nextKey]!
segments.push(nextKey) segments.push(nextKey)
current = nextValue currentValue = nextValue
} }
// Determine the tail - simplified with early returns // Determine the tail - simplified with early returns
if (!isJsonObject(current)) { if (!isJsonObject(currentValue)) {
// Array, primitive, or null - this is a leaf value // Array, primitive, or null - this is a leaf value
return { segments, tail: undefined, leafValue: current } return { segments, tail: undefined, leafValue: currentValue }
} }
const keys = Object.keys(current) const keys = Object.keys(currentValue)
if (keys.length === 0) { if (keys.length === 0) {
// Empty object is a leaf // Empty object is a leaf
return { segments, tail: undefined, leafValue: current } return { segments, tail: undefined, leafValue: currentValue }
} }
if (keys.length === 1 && segments.length === maxDepth) { if (keys.length === 1 && segments.length === maxDepth) {
// Hit depth limit with remaining chain // Hit depth limit with remaining chain
return { segments, tail: current, leafValue: current } return { segments, tail: currentValue, leafValue: currentValue }
} }
// Multi-key object is the remainder // Multi-key object is the remainder
return { segments, tail: current, leafValue: current } return { segments, tail: currentValue, leafValue: currentValue }
} }
/** /**

View File

@@ -58,15 +58,15 @@ export function normalizeValue(value: unknown): JsonValue {
// Plain object // Plain object
if (isPlainObject(value)) { if (isPlainObject(value)) {
const result: Record<string, JsonValue> = {} const normalized: Record<string, JsonValue> = {}
for (const key in value) { for (const key in value) {
if (Object.prototype.hasOwnProperty.call(value, key)) { if (Object.prototype.hasOwnProperty.call(value, key)) {
result[key] = normalizeValue(value[key]) normalized[key] = normalizeValue(value[key])
} }
} }
return result return normalized
} }
// Fallback: function, symbol, undefined, or other → null // Fallback: function, symbol, undefined, or other → null

View File

@@ -35,14 +35,14 @@ export function decode(input: string, options?: DecodeOptions): JsonValue {
} }
const cursor = new LineCursor(scanResult.lines, scanResult.blankLines) const cursor = new LineCursor(scanResult.lines, scanResult.blankLines)
const value = decodeValueFromLines(cursor, resolvedOptions) const decodedValue = decodeValueFromLines(cursor, resolvedOptions)
// Apply path expansion if enabled // Apply path expansion if enabled
if (resolvedOptions.expandPaths === 'safe') { if (resolvedOptions.expandPaths === 'safe') {
return expandPathsSafe(value, resolvedOptions.strict) return expandPathsSafe(decodedValue, resolvedOptions.strict)
} }
return value return decodedValue
} }
function resolveOptions(options?: EncodeOptions): ResolvedEncodeOptions { function resolveOptions(options?: EncodeOptions): ResolvedEncodeOptions {

View File

@@ -22,7 +22,7 @@ export function escapeString(value: string): string {
* Handles `\n`, `\t`, `\r`, `\\`, and `\"` escape sequences. * Handles `\n`, `\t`, `\r`, `\\`, and `\"` escape sequences.
*/ */
export function unescapeString(value: string): string { export function unescapeString(value: string): string {
let result = '' let unescaped = ''
let i = 0 let i = 0
while (i < value.length) { while (i < value.length) {
@@ -33,27 +33,27 @@ export function unescapeString(value: string): string {
const next = value[i + 1] const next = value[i + 1]
if (next === 'n') { if (next === 'n') {
result += NEWLINE unescaped += NEWLINE
i += 2 i += 2
continue continue
} }
if (next === 't') { if (next === 't') {
result += TAB unescaped += TAB
i += 2 i += 2
continue continue
} }
if (next === 'r') { if (next === 'r') {
result += CARRIAGE_RETURN unescaped += CARRIAGE_RETURN
i += 2 i += 2
continue continue
} }
if (next === BACKSLASH) { if (next === BACKSLASH) {
result += BACKSLASH unescaped += BACKSLASH
i += 2 i += 2
continue continue
} }
if (next === DOUBLE_QUOTE) { if (next === DOUBLE_QUOTE) {
result += DOUBLE_QUOTE unescaped += DOUBLE_QUOTE
i += 2 i += 2
continue continue
} }
@@ -61,11 +61,11 @@ export function unescapeString(value: string): string {
throw new SyntaxError(`Invalid escape sequence: \\${next}`) throw new SyntaxError(`Invalid escape sequence: \\${next}`)
} }
result += value[i] unescaped += value[i]
i++ i++
} }
return result return unescaped
} }
/** /**

10
pnpm-lock.yaml generated
View File

@@ -102,8 +102,8 @@ importers:
packages/toon: packages/toon:
devDependencies: devDependencies:
'@toon-format/spec': '@toon-format/spec':
specifier: ^1.4.0 specifier: ^1.5.2
version: 1.4.0 version: 1.5.2
packages: packages:
@@ -833,8 +833,8 @@ packages:
peerDependencies: peerDependencies:
eslint: '>=9.0.0' eslint: '>=9.0.0'
'@toon-format/spec@1.4.0': '@toon-format/spec@1.5.2':
resolution: {integrity: sha512-SSI+mJ0PJW38A0n7JdnMjKEkXoecYAQHz7UG/Rl83mbwi5i0JcKeHIToLS+Q04OQZGlu9bt2Jzq5t+SaiMdsMg==} resolution: {integrity: sha512-PNEIbKQeW5dp/Q+v2wxDlLmxYz3zeIg4qBXUpx9DFGL98yMjUxQSSwpXTITyPgRxCynpksuOJZexTFVdAUugeQ==}
'@tybys/wasm-util@0.10.1': '@tybys/wasm-util@0.10.1':
resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==}
@@ -3042,7 +3042,7 @@ snapshots:
estraverse: 5.3.0 estraverse: 5.3.0
picomatch: 4.0.3 picomatch: 4.0.3
'@toon-format/spec@1.4.0': {} '@toon-format/spec@1.5.2': {}
'@tybys/wasm-util@0.10.1': '@tybys/wasm-util@0.10.1':
dependencies: dependencies: