Documentation
¶
Index ¶
- Constants
- Variables
- func ArrayItemKey(item any) string
- func BatchIntern(strings []string) []string
- func BatchInternKeys(keys []string) []string
- func ChunkArrayOptimized(arr []any, chunkSize int) [][]any
- func CleanupNullValues(data any, compactArrays bool) any
- func CompactArrayOptimized(arr []any) []any
- func ContainsAnyByte(s, chars string) bool
- func ConvertNumbersToFloat(data any) any
- func CreatePathError(path string, operation string, err error) error
- func DeepMerge(base, override any) any
- func DeepMergeWithMode(base, override any, mode MergeMode) any
- func EncodeFast(v any, buf *bytes.Buffer) bool
- func EscapeJSONPointer(s string) string
- func FastMarshal(v any) ([]byte, error)
- func FastMarshalToString(v any) (string, error)
- func FastParseFloat(b []byte) (float64, error)
- func FastParseInt(b []byte) (int64, error)
- func FilterArrayOptimized(arr []any, predicate func(any) bool) []any
- func FlattenArrayOptimized(arr []any) []any
- func FormatNumberForDedup(f float64) string
- func GetByteSlice() *[]byte
- func GetEncoderBuffer() *bytes.Buffer
- func GetErrorSlice() *[]error
- func GetFastBuffer() *bytes.Buffer
- func GetFlattenedSlice() *[]any
- func GetPathSegmentSlice(hint int) *[]PathSegment
- func GetPooledSlice() *[]any
- func GetResultsSlice(hint int) *[]any
- func GetSafeArrayElement(arr []any, index int) (any, bool)
- func GetStreamingMap(hint int) map[string]any
- func GetStreamingSlice(hint int) *[]any
- func GetStringBuilder() *strings.Builder
- func HTMLEscape(s string) string
- func HTMLEscapeTo(dst *bytes.Buffer, s string)
- func HasComplexSegments(segments []PathSegment) bool
- func HashBool(h uint64, v bool) uint64
- func HashBytesFNV1a(b []byte) uint64
- func HashBytesFNV1aSecure(b []byte) uint64
- func HashInt(h uint64, v int) uint64
- func HashInt64(h uint64, v int64) uint64
- func HashString(h uint64, s string) uint64
- func HashStringFNV1a(s string) uint64
- func HashStringFNV1aSampled(s string) uint64
- func HashStringFNV1aSecure(s string) uint64
- func HashUint64(h, v uint64) uint64
- func IndexIgnoreCase(s, pattern string) int
- func IntToStringFast(n int) string
- func InternKey(key string) string
- func InternKeyBytes(b []byte) string
- func InternString(s string) string
- func InternStringBytes(b []byte) string
- func IsArrayPath(path string) bool
- func IsArrayType(data any) bool
- func IsComplexPath(path string) bool
- func IsDigit(c byte) bool
- func IsDotNotationPath(path string) bool
- func IsExtractionPath(path string) bool
- func IsExtractionSegment(segment PathSegment) bool
- func IsJSONArray(data any) bool
- func IsJSONObject(data any) bool
- func IsJSONPointerPath(path string) bool
- func IsJSONPrimitive(data any) bool
- func IsMatchPatternIgnoreCase(s, pattern string) bool
- func IsNilOrEmpty(data any) bool
- func IsObjectType(data any) bool
- func IsSlicePath(path string) bool
- func IsSliceType(data any) bool
- func IsSpace(c byte) bool
- func IsValidArrayIndex(index string) bool
- func IsValidCacheKey(key string) bool
- func IsValidIndex(index, length int) bool
- func IsValidJSONNumber(s string) bool
- func IsValidJSONPrimitive(s string) bool
- func IsValidNumberString(s string) bool
- func IsValidPropertyName(name string) bool
- func IsValidSliceRange(rangeStr string) bool
- func IsValidUTF8(b []byte) bool
- func IsWordChar(c byte) bool
- func MapArrayOptimized(arr []any, transform func(any) any) []any
- func MarshalJSON(value any, pretty bool, prefix, indent string) (string, error)
- func MergeObjects(obj1, obj2 map[string]any) map[string]any
- func NeedsDotBefore(prevChar rune) bool
- func NeedsDotBeforeByte(prevChar byte) bool
- func NeedsHTMLEscape(s string) bool
- func NeedsPathPreprocessing(path string) bool
- func NewPathError(path, message string, err error) error
- func NormalizeIndex(index, length int) int
- func NormalizePathSeparators(path string) string
- func NormalizeSlice(start, end, length int) (int, int)
- func ParseAndValidateArrayIndex(s string, arrayLength int) (int, bool)
- func ParseArrayIndex(property string) (int, bool)
- func ParseIntFast(s string) (int, bool)
- func ParseSliceComponents(slicePart string) (start, end, step *int, err error)
- func PerformArraySlice(arr []any, start, end, step *int) []any
- func PreprocessPath(path string, sb *strings.Builder) string
- func PutByteSlice(b *[]byte)
- func PutByteSliceSecure(b *[]byte)
- func PutEncoder(e *FastEncoder)
- func PutEncoderBuffer(buf *bytes.Buffer)
- func PutEncoderBufferSecure(buf *bytes.Buffer)
- func PutErrorSlice(s *[]error)
- func PutFastBuffer(buf *bytes.Buffer)
- func PutFlattenedSlice(s *[]any)
- func PutPathSegmentSlice(s *[]PathSegment)
- func PutPooledSlice(s *[]any)
- func PutResultsSlice(s *[]any)
- func PutStreamingMap(m map[string]any)
- func PutStreamingSlice(s *[]any)
- func PutStringBuilder(sb *strings.Builder)
- func ReconstructPath(segments []PathSegment) string
- func ReverseArrayOptimized(arr []any)
- func StringToBytes(s string) []byte
- func TakeFirst(arr []any, n int) []any
- func TakeLast(arr []any, n int) []any
- func TryConvertToArray(m map[string]any) ([]any, bool)
- func UnescapeJSONPointer(s string) string
- func UniqueArrayOptimized(arr []any) []any
- func ValidatePath(path string) error
- func WrapError(err error, context string) error
- type CacheConfig
- type CacheManager
- type CacheStats
- type CheckResult
- type ChunkProcessor
- type CompiledPath
- func (cp *CompiledPath) Exists(data any) bool
- func (cp *CompiledPath) Get(data any) (any, error)
- func (cp *CompiledPath) GetFromRaw(raw []byte) (any, error)
- func (cp *CompiledPath) Hash() uint64
- func (cp *CompiledPath) IsEmpty() bool
- func (cp *CompiledPath) Len() int
- func (cp *CompiledPath) Path() string
- func (cp *CompiledPath) Release()
- func (cp *CompiledPath) Segments() []PathSegment
- func (cp *CompiledPath) String() string
- type CompiledPathCache
- type CompiledPathError
- type ExtractionGroup
- type FastEncoder
- func (e *FastEncoder) Bytes() []byte
- func (e *FastEncoder) EncodeArray(arr []any) error
- func (e *FastEncoder) EncodeBase64(b []byte)
- func (e *FastEncoder) EncodeBool(b bool)
- func (e *FastEncoder) EncodeFloat(n float64, bits int)
- func (e *FastEncoder) EncodeFloat32Slice(arr []float32)
- func (e *FastEncoder) EncodeFloatSlice(arr []float64)
- func (e *FastEncoder) EncodeInt(n int64)
- func (e *FastEncoder) EncodeInt32Slice(arr []int32)
- func (e *FastEncoder) EncodeInt64Slice(arr []int64)
- func (e *FastEncoder) EncodeIntSlice(arr []int)
- func (e *FastEncoder) EncodeMap(m map[string]any) error
- func (e *FastEncoder) EncodeMapStringFloat64(m map[string]float64) error
- func (e *FastEncoder) EncodeMapStringInt(m map[string]int) error
- func (e *FastEncoder) EncodeMapStringInt64(m map[string]int64) error
- func (e *FastEncoder) EncodeMapStringString(m map[string]string) error
- func (e *FastEncoder) EncodeString(s string)
- func (e *FastEncoder) EncodeStringSlice(arr []string)
- func (e *FastEncoder) EncodeTime(t time.Time)
- func (e *FastEncoder) EncodeUint(n uint64)
- func (e *FastEncoder) EncodeUint64Slice(arr []uint64)
- func (e *FastEncoder) EncodeValue(v any) error
- func (e *FastEncoder) Reset()
- type HealthChecker
- type HealthCheckerConfig
- type HealthStatus
- type InternStats
- type KeyIntern
- type KeyInternStats
- type MergeMode
- type Metrics
- type MetricsCollector
- func (mc *MetricsCollector) EndConcurrentOperation()
- func (mc *MetricsCollector) GetMetrics() Metrics
- func (mc *MetricsCollector) GetSummary() string
- func (mc *MetricsCollector) RecordCacheHit()
- func (mc *MetricsCollector) RecordCacheMiss()
- func (mc *MetricsCollector) RecordError(errorType string)
- func (mc *MetricsCollector) RecordOperation(duration time.Duration, success bool, memoryUsed int64)
- func (mc *MetricsCollector) Reset()
- func (mc *MetricsCollector) StartConcurrentOperation()
- type ParallelConfig
- type ParallelMapResult
- type ParallelProcessor
- func (pp *ParallelProcessor) ParallelFilter(arr []any, predicate func(value any) bool) []any
- func (pp *ParallelProcessor) ParallelForEach(arr []any, fn func(index int, value any) error) error
- func (pp *ParallelProcessor) ParallelForEachMap(m map[string]any, fn func(key string, value any) error) error
- func (pp *ParallelProcessor) ParallelMap(m map[string]any, fn func(key string, value any) (any, error)) (map[string]any, error)
- func (pp *ParallelProcessor) ParallelSlice(arr []any, fn func(index int, value any) (any, error)) ([]any, error)
- func (pp *ParallelProcessor) ParallelTransform(arr []any, transform func(value any) any) []any
- type ParallelSliceResult
- type PathIntern
- type PathSegment
- func NewArrayIndexSegment(index int) PathSegment
- func NewArraySliceSegment(start, end, step int, hasStart, hasEnd, hasStep bool) PathSegment
- func NewExtractSegment(extract string) PathSegment
- func NewExtractSegmentWithFlat(key string, flat bool) PathSegment
- func NewPropertySegment(key string) PathSegment
- func NewRecursiveSegment() PathSegment
- func NewWildcardSegment() PathSegment
- func ParseArraySegment(part string, segments []PathSegment) []PathSegment
- func ParseComplexSegment(part string) ([]PathSegment, error)
- func ParseExtractionSegment(part string, segments []PathSegment) []PathSegment
- func ParsePath(path string) ([]PathSegment, error)
- func ParsePathSegment(part string, segments []PathSegment) []PathSegment
- func SplitPathIntoSegments(path string, segments []PathSegment) []PathSegment
- func (ps PathSegment) GetArrayIndex(arrayLength int) (int, error)
- func (ps *PathSegment) GetEnd() (int, bool)
- func (ps *PathSegment) GetStart() (int, bool)
- func (ps *PathSegment) GetStep() (int, bool)
- func (ps *PathSegment) HasEnd() bool
- func (ps *PathSegment) HasStart() bool
- func (ps *PathSegment) HasStep() bool
- func (ps PathSegment) IsArrayAccess() bool
- func (ps *PathSegment) IsFlatExtract() bool
- func (ps *PathSegment) IsNegativeIndex() bool
- func (ps *PathSegment) IsWildcardSegment() bool
- func (ps PathSegment) String() string
- func (ps PathSegment) TypeString() string
- type PathSegmentFlags
- type PathSegmentType
- type StringIntern
- type StructFieldInfo
- type WorkerPool
Constants ¶
const ( // DefaultMaxCacheMemory is the default maximum memory for cache (256MB) // This is more conservative than 2GB to work well on all systems including containers DefaultMaxCacheMemory = 256 * 1024 * 1024 // CacheHighWatermarkPercent is the percentage of max memory at which proactive eviction begins CacheHighWatermarkPercent = 80 )
Cache memory limits - configurable based on system resources
const ( // Depth limits for various operations MaxDeepMergeDepth = 100 // Maximum depth for deep merge operations MaxPathParseDepth = 100 // Maximum depth for path parsing MaxNestingDepth = 200 // Maximum JSON nesting depth for security validation MaxSensitiveDataDepth = 10 // Maximum depth for sensitive data detection // Path and cache limits MaxPathLength = 5000 // Maximum path length for security (single source of truth) MaxCacheKeyLength = 1024 // Maximum cache key length to prevent memory issues // Array index sentinel values // These values are distinct from valid array indices to avoid confusion // ArrayIndexInvalid is returned when the index cannot be determined ArrayIndexInvalid = -999999 // Kept for backward compatibility )
const ( // FNV-1a algorithm constants - single source of truth FNVOffsetBasis uint64 = 14695981039346656037 FNVPrime uint64 = 1099511628211 // LargeStringHashThreshold is the size threshold for using sampling-based hash. // Strings larger than this use HashStringFNV1aSampled for better performance. LargeStringHashThreshold = 4096 )
const ( PathFlagNegative = FlagIsNegative // Indicates negative array index PathFlagWildcard = FlagIsWildcard // Indicates wildcard segment PathFlagFlat = FlagIsFlat // Indicates flat extraction mode PathFlagHasStart = FlagHasStart // Indicates slice has start value PathFlagHasEnd = FlagHasEnd // Indicates slice has end value PathFlagHasStep = FlagHasStep // Indicates slice has step value )
Public API aliases for flag constants (backward compatibility) These provide more descriptive names for the public API
Variables ¶
var ( // ErrPathNotFound indicates the requested path does not exist ErrPathNotFound = errors.New("path not found") // ErrTypeMismatch indicates a type mismatch during path navigation ErrTypeMismatch = errors.New("type mismatch") // ErrInvalidPath indicates an invalid path format ErrInvalidPath = errors.New("invalid path") )
var DefaultParallelProcessor = NewParallelProcessor(DefaultParallelConfig())
DefaultParallelProcessor is the default parallel processor
var FastBufferPool = sync.Pool{ New: func() any { return bytes.NewBuffer(make([]byte, 0, 512)) }, }
FastBufferPool is a pool of byte buffers for fast encoding
var GlobalKeyIntern = NewKeyIntern()
GlobalKeyIntern is the global key interner
var GlobalPathIntern = NewPathIntern(50000)
GlobalPathIntern is the global path interner
var GlobalStringIntern = NewStringIntern(10 * 1024 * 1024) // 10MB max
GlobalStringIntern is the default string interner
Functions ¶
func ArrayItemKey ¶ added in v1.2.0
ArrayItemKey generates a unique key for array item deduplication
func BatchIntern ¶ added in v1.2.0
BatchIntern interns multiple strings at once More efficient than calling Intern multiple times due to reduced lock overhead SECURITY FIX: Added memory-based eviction to prevent unbounded growth
func BatchInternKeys ¶ added in v1.2.0
BatchInternKeys interns multiple keys at once using the key interner
func ChunkArrayOptimized ¶ added in v1.2.0
ChunkArrayOptimized splits array into chunks
func CleanupNullValues ¶ added in v1.3.0
CleanupNullValues recursively removes null values and empty containers from JSON data. When compactArrays is true, null elements are also removed from arrays. PERFORMANCE: Pre-allocates result containers with capacity hints
func CompactArrayOptimized ¶ added in v1.2.0
CompactArrayOptimized removes null and empty values from array with pooling
func ContainsAnyByte ¶ added in v1.2.0
ContainsAnyByte checks if string contains any of the specified bytes This is faster than strings.ContainsAny for single-byte character sets
func ConvertNumbersToFloat ¶ added in v1.3.0
ConvertNumbersToFloat recursively converts json.Number and Number types to float64 This is needed because standard json.Marshal encodes json.Number as strings PERFORMANCE: Pre-allocates result containers with capacity hints
func CreatePathError ¶ added in v1.2.0
CreatePathError creates a path-specific error
func DeepMerge ¶ added in v1.2.0
DeepMerge recursively merges two JSON values using union merge strategy (default) This is kept for backward compatibility - it delegates to DeepMergeWithMode
func DeepMergeWithMode ¶ added in v1.3.0
DeepMergeWithMode recursively merges two JSON values with specified mode
func EncodeFast ¶ added in v1.2.0
EncodeFast attempts to encode a primitive value directly to a buffer PERFORMANCE: Inline encoding for primitives avoids reflection and allocations Returns true if the value was encoded, false if it needs standard encoding
func EscapeJSONPointer ¶ added in v1.1.0
EscapeJSONPointer escapes special characters for JSON Pointer Uses single-pass algorithm to avoid multiple allocations
func FastMarshal ¶ added in v1.2.0
FastMarshal marshals a value to JSON using the fast encoder
func FastMarshalToString ¶ added in v1.2.0
FastMarshalToString marshals a value to a JSON string
func FastParseFloat ¶ added in v1.2.0
FastParseFloat parses a float from a byte slice SECURITY: Rejects NaN and Infinity values which are invalid in standard JSON (RFC 8259)
func FastParseInt ¶ added in v1.2.0
FastParseInt parses an integer from a byte slice PERFORMANCE: Avoids string allocation by parsing directly from bytes
func FilterArrayOptimized ¶ added in v1.2.0
FilterArrayOptimized filters array with a predicate function using pooling
func FlattenArrayOptimized ¶ added in v1.2.0
FlattenArrayOptimized flattens nested arrays with pooling
func FormatNumberForDedup ¶ added in v1.2.0
FormatNumberForDedup formats a number for deduplication key generation. Handles edge cases: NaN, Inf, and values outside int64 range.
func GetByteSlice ¶ added in v1.2.0
func GetByteSlice() *[]byte
GetByteSlice gets a byte slice from the pool PERFORMANCE: Reusable byte slices for encoding operations
func GetEncoderBuffer ¶ added in v1.2.0
GetEncoderBuffer gets a buffer from the pool
func GetErrorSlice ¶ added in v1.3.0
func GetErrorSlice() *[]error
GetErrorSlice retrieves a pooled []error slice
func GetFastBuffer ¶ added in v1.2.0
GetFastBuffer gets a buffer from the pool
func GetFlattenedSlice ¶ added in v1.3.0
func GetFlattenedSlice() *[]any
GetFlattenedSlice retrieves a pooled slice for flattening operations
func GetPathSegmentSlice ¶ added in v1.3.0
func GetPathSegmentSlice(hint int) *[]PathSegment
GetPathSegmentSlice retrieves a pooled []PathSegment slice
func GetPooledSlice ¶ added in v1.2.0
func GetPooledSlice() *[]any
GetPooledSlice gets a pooled slice for array operations
func GetResultsSlice ¶ added in v1.3.0
GetResultsSlice retrieves a pooled []any slice with appropriate capacity SECURITY FIX: For hints larger than pool capacity, allocate directly This prevents capacity mismatch and reduces resize operations
func GetSafeArrayElement ¶ added in v1.0.7
func GetStreamingMap ¶ added in v1.3.0
GetStreamingMap retrieves a pooled map[string]any
func GetStreamingSlice ¶ added in v1.3.0
GetStreamingSlice retrieves a pooled []any slice for streaming
func GetStringBuilder ¶ added in v1.3.0
GetStringBuilder retrieves a pooled strings.Builder
func HTMLEscape ¶ added in v1.3.0
HTMLEscape performs HTML escaping on JSON string. Compatible with encoding/json: escapes <, >, &, U+2028, U+2029.
This is the centralized implementation used by:
- json.HTMLEscape() for encoding/json compatibility
- json.HTMLEscapeBuffer() for buffer operations
- Any other internal components needing HTML escaping
PERFORMANCE v2: Uses pooled buffer and byte-level scanning for speed.
func HTMLEscapeTo ¶ added in v1.3.0
HTMLEscapeTo writes HTML-escaped JSON to the destination buffer. This is more efficient than HTMLEscape when writing to an existing buffer.
func HasComplexSegments ¶ added in v1.2.0
func HasComplexSegments(segments []PathSegment) bool
HasComplexSegments checks if any segment is complex (slice or extract)
func HashBool ¶ added in v1.3.0
HashBool mixes a bool value into the hash using FNV-1a algorithm. Both true and false produce distinct hash changes to prevent collisions.
func HashBytesFNV1a ¶ added in v1.3.0
HashBytesFNV1a computes FNV-1a hash for a byte slice. This is a fast, non-cryptographic hash function suitable for cache keys. PERFORMANCE v2: Uses deferred multiplication pattern for ~40% improvement.
func HashBytesFNV1aSecure ¶ added in v1.3.0
HashBytesFNV1aSecure computes FNV-1a hash with full scan for security-sensitive contexts. SECURITY: Always performs full byte slice scan to prevent collision attacks.
func HashInt64 ¶ added in v1.3.0
HashInt64 mixes an int64 value into the hash using FNV-1a algorithm.
func HashString ¶ added in v1.3.0
HashString mixes a string value into the hash using FNV-1a algorithm. The length is included to prevent collisions between short/long strings.
func HashStringFNV1a ¶ added in v1.3.0
HashStringFNV1a computes FNV-1a hash for a string (full scan). This is a fast, non-cryptographic hash function suitable for cache keys. PERFORMANCE v3: Optimized with small-string fast path and improved loop structure.
func HashStringFNV1aSampled ¶ added in v1.3.0
HashStringFNV1aSampled computes FNV-1a hash with sampling for large strings. PERFORMANCE: For large strings (>4KB), samples first/middle/last sections to avoid full scan overhead while maintaining good hash distribution. PERFORMANCE v4: Optimized with batch byte loading and reduced multiplications.
func HashStringFNV1aSecure ¶ added in v1.3.0
HashStringFNV1aSecure computes FNV-1a hash with full scan for security-sensitive contexts. SECURITY: Always performs full string scan to prevent collision attacks where an attacker crafts strings with identical sampled regions but different content. Use this for security-critical cache keys, validation caching, and any context where collision attacks are a concern. PERFORMANCE: ~30-40% slower than HashStringFNV1aSampled for large strings, but provides strong collision resistance guarantees.
func HashUint64 ¶ added in v1.3.0
HashUint64 mixes a uint64 value into the hash using FNV-1a algorithm. This is the core mixing function for building composite hashes.
func IndexIgnoreCase ¶ added in v1.2.0
IndexIgnoreCase finds a pattern in s case-insensitively without allocation This is a shared utility function used by multiple packages for security pattern matching PERFORMANCE v2: Optimized with reduced branching and batch processing
func IntToStringFast ¶ added in v1.2.0
IntToStringFast converts an integer to string using pre-computed values PERFORMANCE: Avoids strconv.Itoa allocations for values 0-99
func InternKeyBytes ¶ added in v1.2.0
InternKeyBytes interns a JSON key from bytes
func InternString ¶ added in v1.2.0
InternString interns a string using the global string interner
func InternStringBytes ¶ added in v1.2.0
InternStringBytes interns a string from bytes
func IsArrayPath ¶ added in v1.2.0
IsArrayPath checks if a path contains array access
func IsArrayType ¶ added in v1.2.0
IsArrayType checks if data is an array type
func IsComplexPath ¶ added in v1.2.0
IsComplexPath checks if a path contains complex patterns Optimized: single scan instead of multiple Contains calls
func IsDotNotationPath ¶ added in v1.2.0
IsDotNotationPath checks if a path uses dot notation format
func IsExtractionPath ¶ added in v1.2.0
IsExtractionPath checks if a path contains extraction patterns that trigger multi-container (distributed) operations: }[, }:, }{, {flat:
func IsExtractionSegment ¶ added in v1.3.0
func IsExtractionSegment(segment PathSegment) bool
IsExtractionSegment checks if a segment triggers extraction operations
func IsJSONArray ¶ added in v1.2.0
IsJSONArray checks if data is a JSON array ([]any)
func IsJSONObject ¶ added in v1.2.0
IsJSONObject checks if data is a JSON object (map[string]any)
func IsJSONPointerPath ¶ added in v1.2.0
IsJSONPointerPath checks if a path uses JSON Pointer format
func IsJSONPrimitive ¶ added in v1.2.0
IsJSONPrimitive checks if data is a JSON primitive type
func IsMatchPatternIgnoreCase ¶ added in v1.2.0
IsMatchPatternIgnoreCase is the exported version for use by other packages
func IsNilOrEmpty ¶ added in v1.2.0
IsNilOrEmpty checks if a value is nil or empty
func IsObjectType ¶ added in v1.2.0
IsObjectType checks if data is an object type
func IsSlicePath ¶ added in v1.2.0
IsSlicePath checks if a path contains slice notation
func IsSliceType ¶ added in v1.3.0
IsSliceType checks if data is a slice type using reflection This handles any slice type, not just []any
func IsValidArrayIndex ¶ added in v1.2.0
IsValidArrayIndex checks if a string is a valid array index
func IsValidCacheKey ¶ added in v1.3.0
IsValidCacheKey validates that a cache key is valid for use. Returns false if the key is empty, too long, or contains control characters.
func IsValidIndex ¶ added in v1.0.7
func IsValidJSONNumber ¶ added in v1.2.0
IsValidJSONNumber validates if a string represents a valid JSON number format according to RFC 8259. Supports integers, decimals, and scientific notation.
func IsValidJSONPrimitive ¶ added in v1.2.0
IsValidJSONPrimitive checks if a string represents a valid JSON primitive (true, false, null, or number)
func IsValidNumberString ¶ added in v1.2.0
IsValidNumberString checks if a string represents a valid number
func IsValidPropertyName ¶ added in v1.2.0
IsValidPropertyName checks if a name is a valid property name
func IsValidSliceRange ¶ added in v1.2.0
IsValidSliceRange checks if a range string is a valid slice range
func IsValidUTF8 ¶ added in v1.2.0
IsValidUTF8 checks if a byte slice is valid UTF-8
func IsWordChar ¶ added in v1.2.0
IsWordChar returns true if the character is part of a word (alphanumeric or underscore)
func MapArrayOptimized ¶ added in v1.2.0
MapArrayOptimized transforms array elements using pooling
func MarshalJSON ¶ added in v1.2.0
MarshalJSON marshals a value to JSON string with optional pretty printing
func MergeObjects ¶ added in v1.2.0
MergeObjects merges two objects, with the second object taking precedence
func NeedsDotBefore ¶ added in v1.2.0
NeedsDotBefore determines if a dot should be inserted before a character
func NeedsDotBeforeByte ¶ added in v1.2.0
NeedsDotBeforeByte determines if a dot should be inserted before a character (byte version for ASCII fast path)
func NeedsHTMLEscape ¶ added in v1.3.0
NeedsHTMLEscape checks if a string needs HTML escaping. PERFORMANCE: Fast byte-level check to avoid allocation when no escaping is needed. ASCII-only check is safe because all HTML-escape characters (<, >, &) are single-byte ASCII.
func NeedsPathPreprocessing ¶ added in v1.2.0
NeedsPathPreprocessing checks if a path needs preprocessing before parsing
func NewPathError ¶ added in v1.2.0
NewPathError creates a new path error
func NormalizeIndex ¶ added in v1.0.7
func NormalizePathSeparators ¶ added in v1.2.0
NormalizePathSeparators removes duplicate dots and trims leading/trailing dots Optimized: single-pass construction using strings.Builder
func NormalizeSlice ¶ added in v1.0.7
func ParseAndValidateArrayIndex ¶ added in v1.1.0
ParseAndValidateArrayIndex parses a string as an array index and validates it against array length Returns the index and true if successful, 0 and false otherwise
func ParseArrayIndex ¶ added in v1.0.7
func ParseIntFast ¶ added in v1.2.0
ParseIntFast parses a string as an integer without using strconv PERFORMANCE: Avoids strconv.Atoi allocation for common cases SECURITY: Proper overflow detection for both 32-bit and 64-bit systems Returns (value, true) if successful, (0, false) otherwise
func ParseSliceComponents ¶ added in v1.0.7
func PerformArraySlice ¶ added in v1.0.7
PerformArraySlice performs Python-style array slicing with optimized capacity calculation
func PreprocessPath ¶ added in v1.2.0
PreprocessPath adds dots before brackets/braces where needed
func PutByteSlice ¶ added in v1.2.0
func PutByteSlice(b *[]byte)
PutByteSlice returns a byte slice to the pool
func PutByteSliceSecure ¶ added in v1.3.0
func PutByteSliceSecure(b *[]byte)
PutByteSliceSecure returns a byte slice to the pool after clearing sensitive data SECURITY: Use this when the slice may have contained sensitive information PERFORMANCE: Slightly slower than PutByteSlice due to zeroing operation
func PutEncoder ¶ added in v1.2.0
func PutEncoder(e *FastEncoder)
PutEncoder returns an encoder to the appropriate pool PERFORMANCE: Use tiered pools - buffers > 64KB are discarded to prevent memory bloat
func PutEncoderBuffer ¶ added in v1.2.0
PutEncoderBuffer returns a buffer to the pool
func PutEncoderBufferSecure ¶ added in v1.3.0
PutEncoderBufferSecure returns a buffer to the pool after clearing sensitive data SECURITY: Use this when the buffer may have contained sensitive information PERFORMANCE: Slightly slower than PutEncoderBuffer due to zeroing operation
func PutErrorSlice ¶ added in v1.3.0
func PutErrorSlice(s *[]error)
PutErrorSlice returns a []error slice to the pool
func PutFastBuffer ¶ added in v1.2.0
PutFastBuffer returns a buffer to the pool
func PutFlattenedSlice ¶ added in v1.3.0
func PutFlattenedSlice(s *[]any)
PutFlattenedSlice returns a slice used for flattening
func PutPathSegmentSlice ¶ added in v1.3.0
func PutPathSegmentSlice(s *[]PathSegment)
PutPathSegmentSlice returns a []PathSegment slice to the pool
func PutPooledSlice ¶ added in v1.2.0
func PutPooledSlice(s *[]any)
PutPooledSlice returns a slice to the pool
func PutResultsSlice ¶ added in v1.3.0
func PutResultsSlice(s *[]any)
PutResultsSlice returns a []any slice to the appropriate pool
func PutStreamingMap ¶ added in v1.3.0
PutStreamingMap returns a map[string]any to the pool Note: Uses len() as approximation since maps don't have capacity
func PutStreamingSlice ¶ added in v1.3.0
func PutStreamingSlice(s *[]any)
PutStreamingSlice returns a []any slice to the streaming pool
func PutStringBuilder ¶ added in v1.3.0
PutStringBuilder returns a strings.Builder to the pool
func ReconstructPath ¶ added in v1.2.0
func ReconstructPath(segments []PathSegment) string
ReconstructPath reconstructs a path string from segments
func ReverseArrayOptimized ¶ added in v1.2.0
func ReverseArrayOptimized(arr []any)
ReverseArrayOptimized reverses array in place
func StringToBytes ¶ added in v1.2.0
StringToBytes converts string to []byte Using standard conversion for safety and compatibility
func TakeFirst ¶ added in v1.2.0
TakeFirst returns first n elements as a new slice. The returned slice is independent of the input; modifications do not affect the original.
func TakeLast ¶ added in v1.2.0
TakeLast returns last n elements as a new slice. The returned slice is independent of the input; modifications do not affect the original.
func TryConvertToArray ¶ added in v1.2.0
TryConvertToArray attempts to convert a map to an array if it has numeric keys
func UnescapeJSONPointer ¶ added in v1.1.0
UnescapeJSONPointer unescapes JSON Pointer special characters Uses single-pass algorithm to avoid multiple allocations
func UniqueArrayOptimized ¶ added in v1.2.0
UniqueArrayOptimized removes duplicates from array using map for O(n) lookup. Always returns a new slice; the input is never modified or returned directly.
func ValidatePath ¶ added in v1.0.7
ValidatePath validates a path string for syntax correctness. FOCUSED: Only validates syntax (brackets, depth, array indices). SECURITY: Security validation (injection, traversal) should be done by caller. PERFORMANCE: Uses single-pass validation with fast path for simple paths.
Types ¶
type CacheConfig ¶ added in v1.0.10
type CacheConfig interface {
IsCacheEnabled() bool
GetMaxCacheSize() int
GetCacheTTL() time.Duration
}
CacheConfig provides the configuration needed by CacheManager This minimal interface avoids circular dependencies with the main json package
type CacheManager ¶
type CacheManager struct {
// contains filtered or unexported fields
}
CacheManager handles all caching operations with performance and memory management
func NewCacheManager ¶
func NewCacheManager(config CacheConfig) *CacheManager
NewCacheManager creates a new cache manager with sharding
func (*CacheManager) CleanExpiredCache ¶
func (cm *CacheManager) CleanExpiredCache()
CleanExpiredCache removes expired entries from all shards (with goroutine limit)
func (*CacheManager) Clear ¶ added in v1.0.6
func (cm *CacheManager) Clear()
Clear removes all entries from the cache
func (*CacheManager) Close ¶ added in v1.2.2
func (cm *CacheManager) Close()
Close gracefully shuts down the cache manager, waiting for cleanup goroutines to complete
func (*CacheManager) Delete ¶ added in v1.0.6
func (cm *CacheManager) Delete(key string)
Delete removes a value from the cache
func (*CacheManager) Get ¶
func (cm *CacheManager) Get(key string) (any, bool)
Get retrieves a value from cache with O(1) complexity PERFORMANCE: Optimized to minimize lock contention - Uses RLock for the common fast path - Only upgrades to Lock when TTL expiration needs cleanup - LRU position update is deferred to reduce write lock frequency
func (*CacheManager) GetStats ¶
func (cm *CacheManager) GetStats() CacheStats
GetStats returns cache statistics
func (*CacheManager) Set ¶
func (cm *CacheManager) Set(key string, value any)
Set stores a value in the cache
type CacheStats ¶
type CacheStats struct {
Entries int64
TotalMemory int64
HitCount int64
MissCount int64
HitRatio float64
MemoryEfficiency float64
Evictions int64
ShardCount int
}
CacheStats represents cache statistics
type CheckResult ¶
CheckResult represents the result of a single health check
type ChunkProcessor ¶ added in v1.2.0
type ChunkProcessor struct {
// contains filtered or unexported fields
}
ChunkProcessor processes data in chunks for memory efficiency
func NewChunkProcessor ¶ added in v1.2.0
func NewChunkProcessor(chunkSize int) *ChunkProcessor
NewChunkProcessor creates a new chunk processor
func (*ChunkProcessor) ProcessMap ¶ added in v1.2.0
ProcessMap processes a map in chunks
func (*ChunkProcessor) ProcessSlice ¶ added in v1.2.0
func (cp *ChunkProcessor) ProcessSlice(arr []any, fn func(chunk []any) error) error
ProcessSlice processes a slice in chunks
type CompiledPath ¶ added in v1.2.0
type CompiledPath struct {
// contains filtered or unexported fields
}
CompiledPath represents a pre-parsed JSON path ready for fast operations
func CompilePath ¶ added in v1.2.0
func CompilePath(path string) (*CompiledPath, error)
CompilePath parses and compiles a JSON path string into a CompiledPath The returned CompiledPath can be reused for multiple operations
func CompilePathUnsafe ¶ added in v1.2.0
func CompilePathUnsafe(path string) (*CompiledPath, error)
CompilePathUnsafe compiles a path without validation Use only when the path is known to be safe
func (*CompiledPath) Exists ¶ added in v1.2.0
func (cp *CompiledPath) Exists(data any) bool
Exists checks if a value exists at the compiled path
func (*CompiledPath) Get ¶ added in v1.2.0
func (cp *CompiledPath) Get(data any) (any, error)
Get retrieves a value from parsed JSON data using the compiled path
func (*CompiledPath) GetFromRaw ¶ added in v1.2.0
func (cp *CompiledPath) GetFromRaw(raw []byte) (any, error)
GetFromRaw retrieves a value from raw JSON bytes using the compiled path
func (*CompiledPath) Hash ¶ added in v1.2.0
func (cp *CompiledPath) Hash() uint64
Hash returns the pre-computed hash of the path
func (*CompiledPath) IsEmpty ¶ added in v1.2.0
func (cp *CompiledPath) IsEmpty() bool
IsEmpty returns true if the path has no segments
func (*CompiledPath) Len ¶ added in v1.2.0
func (cp *CompiledPath) Len() int
Len returns the number of segments in the path
func (*CompiledPath) Path ¶ added in v1.2.0
func (cp *CompiledPath) Path() string
Path returns the original path string
func (*CompiledPath) Release ¶ added in v1.2.0
func (cp *CompiledPath) Release()
Release returns the CompiledPath to the pool Do not use the CompiledPath after calling Release
func (*CompiledPath) Segments ¶ added in v1.2.0
func (cp *CompiledPath) Segments() []PathSegment
Segments returns the parsed path segments
func (*CompiledPath) String ¶ added in v1.2.0
func (cp *CompiledPath) String() string
String returns the path string representation
type CompiledPathCache ¶ added in v1.2.0
type CompiledPathCache struct {
// contains filtered or unexported fields
}
CompiledPathCache caches compiled paths for reuse
func GetGlobalCompiledPathCache ¶ added in v1.2.0
func GetGlobalCompiledPathCache() *CompiledPathCache
GetGlobalCompiledPathCache returns the global compiled path cache
func NewCompiledPathCache ¶ added in v1.2.0
func NewCompiledPathCache(max int) *CompiledPathCache
NewCompiledPathCache creates a new compiled path cache
func (*CompiledPathCache) Clear ¶ added in v1.2.0
func (c *CompiledPathCache) Clear()
Clear clears the cache
func (*CompiledPathCache) Get ¶ added in v1.2.0
func (c *CompiledPathCache) Get(path string) (*CompiledPath, error)
Get retrieves a compiled path from the cache, compiling it if not found. The returned *CompiledPath is an independent copy; callers must call Release() when done to return it to the pool. Eviction of a cached entry does not affect previously returned copies.
func (*CompiledPathCache) Size ¶ added in v1.2.0
func (c *CompiledPathCache) Size() int
Size returns the number of cached paths
type CompiledPathError ¶ added in v1.2.0
CompiledPathError represents an error during compiled path operations
func (*CompiledPathError) Error ¶ added in v1.2.0
func (e *CompiledPathError) Error() string
Error implements the error interface
func (*CompiledPathError) Unwrap ¶ added in v1.2.0
func (e *CompiledPathError) Unwrap() error
Unwrap returns the underlying error
type ExtractionGroup ¶ added in v1.2.0
type ExtractionGroup struct {
Segments []PathSegment
}
ExtractionGroup represents a group of consecutive extraction segments used for processing complex extraction patterns in JSON paths.
func DetectConsecutiveExtractions ¶ added in v1.2.0
func DetectConsecutiveExtractions(segments []PathSegment) []ExtractionGroup
DetectConsecutiveExtractions identifies groups of consecutive extraction segments. This is useful for processing complex extraction patterns where multiple extractions need to be processed together.
type FastEncoder ¶ added in v1.2.0
type FastEncoder struct {
// contains filtered or unexported fields
}
FastEncoder provides fast JSON encoding without reflection for common types
func GetEncoder ¶ added in v1.2.0
func GetEncoder() *FastEncoder
GetEncoder retrieves an encoder from the pool
func GetEncoderWithSize ¶ added in v1.2.0
func GetEncoderWithSize(hint int) *FastEncoder
GetEncoderWithSize retrieves an encoder with appropriate capacity hint PERFORMANCE: Use tiered pools for better memory management and reduced allocations
func (*FastEncoder) Bytes ¶ added in v1.2.0
func (e *FastEncoder) Bytes() []byte
Bytes returns the encoded bytes
func (*FastEncoder) EncodeArray ¶ added in v1.2.0
func (e *FastEncoder) EncodeArray(arr []any) error
EncodeArray encodes a []any
func (*FastEncoder) EncodeBase64 ¶ added in v1.2.0
func (e *FastEncoder) EncodeBase64(b []byte)
EncodeBase64 encodes a []byte as base64 string
func (*FastEncoder) EncodeBool ¶ added in v1.2.0
func (e *FastEncoder) EncodeBool(b bool)
EncodeBool encodes a boolean
func (*FastEncoder) EncodeFloat ¶ added in v1.2.0
func (e *FastEncoder) EncodeFloat(n float64, bits int)
EncodeFloat encodes a floating point number PERFORMANCE: Uses pre-computed common values and fast integer conversion SECURITY: Special values (NaN, Inf) are encoded as null for JSON compatibility
func (*FastEncoder) EncodeFloat32Slice ¶ added in v1.2.0
func (e *FastEncoder) EncodeFloat32Slice(arr []float32)
EncodeFloat32Slice encodes a []float32 PERFORMANCE: Specialized encoder avoids interface conversion overhead
func (*FastEncoder) EncodeFloatSlice ¶ added in v1.2.0
func (e *FastEncoder) EncodeFloatSlice(arr []float64)
EncodeFloatSlice encodes a []float64
func (*FastEncoder) EncodeInt ¶ added in v1.2.0
func (e *FastEncoder) EncodeInt(n int64)
EncodeInt encodes an integer PERFORMANCE: Uses pre-computed lookup tables for integers -999 to 9999
func (*FastEncoder) EncodeInt32Slice ¶ added in v1.2.0
func (e *FastEncoder) EncodeInt32Slice(arr []int32)
EncodeInt32Slice encodes a []int32 PERFORMANCE: Specialized encoder avoids interface conversion overhead
func (*FastEncoder) EncodeInt64Slice ¶ added in v1.2.0
func (e *FastEncoder) EncodeInt64Slice(arr []int64)
EncodeInt64Slice encodes a []int64
func (*FastEncoder) EncodeIntSlice ¶ added in v1.2.0
func (e *FastEncoder) EncodeIntSlice(arr []int)
EncodeIntSlice encodes a []int
func (*FastEncoder) EncodeMap ¶ added in v1.2.0
func (e *FastEncoder) EncodeMap(m map[string]any) error
EncodeMap encodes a map[string]any
func (*FastEncoder) EncodeMapStringFloat64 ¶ added in v1.2.0
func (e *FastEncoder) EncodeMapStringFloat64(m map[string]float64) error
EncodeMapStringFloat64 encodes a map[string]float64
func (*FastEncoder) EncodeMapStringInt ¶ added in v1.2.0
func (e *FastEncoder) EncodeMapStringInt(m map[string]int) error
EncodeMapStringInt encodes a map[string]int
func (*FastEncoder) EncodeMapStringInt64 ¶ added in v1.2.0
func (e *FastEncoder) EncodeMapStringInt64(m map[string]int64) error
EncodeMapStringInt64 encodes a map[string]int64
func (*FastEncoder) EncodeMapStringString ¶ added in v1.2.0
func (e *FastEncoder) EncodeMapStringString(m map[string]string) error
EncodeMapStringString encodes a map[string]string
func (*FastEncoder) EncodeString ¶ added in v1.2.0
func (e *FastEncoder) EncodeString(s string)
EncodeString encodes a JSON string PERFORMANCE: Avoids reflection, uses inline escaping SECURITY: Validates UTF-8 encoding per RFC 8259
func (*FastEncoder) EncodeStringSlice ¶ added in v1.2.0
func (e *FastEncoder) EncodeStringSlice(arr []string)
EncodeStringSlice encodes a []string
func (*FastEncoder) EncodeTime ¶ added in v1.2.0
func (e *FastEncoder) EncodeTime(t time.Time)
EncodeTime encodes a time.Time in RFC3339 format
func (*FastEncoder) EncodeUint ¶ added in v1.2.0
func (e *FastEncoder) EncodeUint(n uint64)
EncodeUint encodes an unsigned integer PERFORMANCE: Uses pre-computed lookup tables for integers 0-9999
func (*FastEncoder) EncodeUint64Slice ¶ added in v1.2.0
func (e *FastEncoder) EncodeUint64Slice(arr []uint64)
EncodeUint64Slice encodes a []uint64
func (*FastEncoder) EncodeValue ¶ added in v1.2.0
func (e *FastEncoder) EncodeValue(v any) error
EncodeValue encodes any value to JSON Uses fast paths for common types, falls back to stdlib for complex types
func (*FastEncoder) Reset ¶ added in v1.2.0
func (e *FastEncoder) Reset()
Reset clears the encoder buffer
type HealthChecker ¶
type HealthChecker struct {
// contains filtered or unexported fields
}
HealthChecker provides health checking functionality for the JSON processor
func NewHealthChecker ¶
func NewHealthChecker(metrics *MetricsCollector, config *HealthCheckerConfig) *HealthChecker
NewHealthChecker creates a new health checker with optional custom thresholds
func (*HealthChecker) CheckHealth ¶
func (hc *HealthChecker) CheckHealth() HealthStatus
CheckHealth performs health checks and returns overall status
type HealthCheckerConfig ¶ added in v1.0.7
HealthCheckerConfig holds configuration for health checker
type HealthStatus ¶
type HealthStatus struct {
Timestamp time.Time `json:"timestamp"`
Healthy bool `json:"healthy"`
Checks map[string]CheckResult `json:"checks"`
}
HealthStatus represents the health status of the processor
func (*HealthStatus) GetFailedChecks ¶
func (hs *HealthStatus) GetFailedChecks() []string
GetFailedChecks returns a list of failed health check names
func (*HealthStatus) GetSummary ¶
func (hs *HealthStatus) GetSummary() string
GetSummary returns a formatted summary of the health status
type InternStats ¶ added in v1.2.0
Stats returns statistics about the string intern
type KeyIntern ¶ added in v1.2.0
type KeyIntern struct {
// contains filtered or unexported fields
}
KeyIntern is a specialized interner for JSON keys Uses sharding for better concurrent performance with hot key cache
func NewKeyIntern ¶ added in v1.2.0
func NewKeyIntern() *KeyIntern
NewKeyIntern creates a new sharded key interner with 64 shards
func (*KeyIntern) Clear ¶ added in v1.2.0
func (ki *KeyIntern) Clear()
Clear removes all interned keys
func (*KeyIntern) GetStats ¶ added in v1.2.0
func (ki *KeyIntern) GetStats() KeyInternStats
GetStats returns current statistics
func (*KeyIntern) Intern ¶ added in v1.2.0
Intern returns an interned version of the key PERFORMANCE: First checks hot key cache (lock-free), then falls back to sharded lookup SECURITY FIX: Added memory-based eviction and hot key cache size limit
func (*KeyIntern) InternBytes ¶ added in v1.2.0
InternBytes returns an interned string from a byte slice SECURITY: Uses safe conversion to avoid potential race conditions with pooled buffers
type KeyInternStats ¶ added in v1.2.0
Stats returns statistics about the key interner
type MergeMode ¶ added in v1.3.0
type MergeMode int
MergeMode defines the merge strategy for combining JSON objects and arrays
type Metrics ¶
type Metrics struct {
// Operation metrics
TotalOperations int64 `json:"total_operations"`
SuccessfulOps int64 `json:"successful_ops"`
FailedOps int64 `json:"failed_ops"`
CacheHits int64 `json:"cache_hits"`
CacheMisses int64 `json:"cache_misses"`
// Performance metrics
TotalProcessingTime time.Duration `json:"total_processing_time"`
AvgProcessingTime time.Duration `json:"avg_processing_time"`
MaxProcessingTime time.Duration `json:"max_processing_time"`
MinProcessingTime time.Duration `json:"min_processing_time"`
// Memory metrics
TotalMemoryAllocated int64 `json:"total_memory_allocated"`
PeakMemoryUsage int64 `json:"peak_memory_usage"`
CurrentMemoryUsage int64 `json:"current_memory_usage"`
// Concurrency metrics
ActiveConcurrentOps int64 `json:"active_concurrent_ops"`
MaxConcurrentOps int64 `json:"max_concurrent_ops"`
// Runtime metrics
RuntimeMemStats runtime.MemStats `json:"runtime_mem_stats"`
Uptime time.Duration `json:"uptime"`
ErrorsByType map[string]int64 `json:"errors_by_type"`
}
Metrics represents collected performance metrics
type MetricsCollector ¶
type MetricsCollector struct {
// contains filtered or unexported fields
}
MetricsCollector collects and provides performance metrics for the JSON processor
func NewMetricsCollector ¶
func NewMetricsCollector() *MetricsCollector
NewMetricsCollector creates a new metrics collector
func (*MetricsCollector) EndConcurrentOperation ¶
func (mc *MetricsCollector) EndConcurrentOperation()
EndConcurrentOperation records the end of a concurrent operation
func (*MetricsCollector) GetMetrics ¶
func (mc *MetricsCollector) GetMetrics() Metrics
GetMetrics returns current metrics with runtime stats
func (*MetricsCollector) GetSummary ¶
func (mc *MetricsCollector) GetSummary() string
GetSummary returns a formatted summary of metrics
func (*MetricsCollector) RecordCacheHit ¶
func (mc *MetricsCollector) RecordCacheHit()
RecordCacheHit records a cache hit
func (*MetricsCollector) RecordCacheMiss ¶
func (mc *MetricsCollector) RecordCacheMiss()
RecordCacheMiss records a cache miss
func (*MetricsCollector) RecordError ¶
func (mc *MetricsCollector) RecordError(errorType string)
RecordError records an error by type
func (*MetricsCollector) RecordOperation ¶
func (mc *MetricsCollector) RecordOperation(duration time.Duration, success bool, memoryUsed int64)
RecordOperation records a completed operation
func (*MetricsCollector) StartConcurrentOperation ¶
func (mc *MetricsCollector) StartConcurrentOperation()
StartConcurrentOperation records the start of a concurrent operation
type ParallelConfig ¶ added in v1.2.0
type ParallelConfig struct {
Workers int // Number of worker goroutines
BatchSize int // Items per batch
MinParallel int // Minimum items to trigger parallel processing
MaxWorkers int // Maximum number of workers (0 = no limit, default 64)
}
ParallelConfig holds configuration for parallel operations
func DefaultParallelConfig ¶ added in v1.2.0
func DefaultParallelConfig() ParallelConfig
DefaultParallelConfig returns the default parallel configuration
type ParallelMapResult ¶ added in v1.2.0
ParallelMapResult represents a result from parallel map processing
type ParallelProcessor ¶ added in v1.2.0
type ParallelProcessor struct {
// contains filtered or unexported fields
}
ParallelProcessor handles parallel batch operations
func NewParallelProcessor ¶ added in v1.2.0
func NewParallelProcessor(config ParallelConfig) *ParallelProcessor
NewParallelProcessor creates a new parallel processor
func (*ParallelProcessor) ParallelFilter ¶ added in v1.2.0
func (pp *ParallelProcessor) ParallelFilter(arr []any, predicate func(value any) bool) []any
ParallelFilter filters slice elements in parallel
func (*ParallelProcessor) ParallelForEach ¶ added in v1.2.0
ParallelForEach iterates over slice elements in parallel The function is called concurrently, ensure thread safety
func (*ParallelProcessor) ParallelForEachMap ¶ added in v1.2.0
func (pp *ParallelProcessor) ParallelForEachMap(m map[string]any, fn func(key string, value any) error) error
ParallelForEachMap iterates over map entries in parallel
func (*ParallelProcessor) ParallelMap ¶ added in v1.2.0
func (pp *ParallelProcessor) ParallelMap(m map[string]any, fn func(key string, value any) (any, error)) (map[string]any, error)
ParallelMap processes map entries in parallel
func (*ParallelProcessor) ParallelSlice ¶ added in v1.2.0
func (pp *ParallelProcessor) ParallelSlice(arr []any, fn func(index int, value any) (any, error)) ([]any, error)
ParallelSlice processes slice elements in parallel
func (*ParallelProcessor) ParallelTransform ¶ added in v1.2.0
func (pp *ParallelProcessor) ParallelTransform(arr []any, transform func(value any) any) []any
ParallelTransform transforms slice elements in parallel
type ParallelSliceResult ¶ added in v1.2.0
ParallelSliceResult represents a result from parallel slice processing
type PathIntern ¶ added in v1.2.0
type PathIntern struct {
// contains filtered or unexported fields
}
PathIntern caches parsed path segments with their string representations SECURITY FIX: Added memory-based eviction to prevent unbounded growth
func NewPathIntern ¶ added in v1.2.0
func NewPathIntern(maxSize int) *PathIntern
NewPathIntern creates a new path interner
func (*PathIntern) Clear ¶ added in v1.2.0
func (pi *PathIntern) Clear()
Clear removes all cached paths
func (*PathIntern) Get ¶ added in v1.2.0
func (pi *PathIntern) Get(path string) ([]PathSegment, bool)
Get retrieves cached path segments
func (*PathIntern) Set ¶ added in v1.2.0
func (pi *PathIntern) Set(path string, segments []PathSegment)
Set stores path segments in cache SECURITY FIX: Added memory-based eviction at 80% watermark
type PathSegment ¶
type PathSegment struct {
Type PathSegmentType
Key string // Used for PropertySegment and ExtractSegment
Index int // Used for ArrayIndexSegment and slice start
End int // Direct value (was *int) for ArraySliceSegment
Step int // Direct value (was *int) for ArraySliceSegment
Flags PathSegmentFlags // Bit-packed flags
}
PathSegment represents a single segment in a JSON path Optimized to avoid pointer allocations by using direct values and bit flags
func NewArrayIndexSegment ¶
func NewArrayIndexSegment(index int) PathSegment
NewArrayIndexSegment creates an array index access segment
func NewArraySliceSegment ¶
func NewArraySliceSegment(start, end, step int, hasStart, hasEnd, hasStep bool) PathSegment
NewArraySliceSegment creates an array slice access segment Now accepts direct values instead of pointers to avoid heap allocations
func NewExtractSegment ¶
func NewExtractSegment(extract string) PathSegment
NewExtractSegment creates an extraction segment
func NewExtractSegmentWithFlat ¶ added in v1.3.0
func NewExtractSegmentWithFlat(key string, flat bool) PathSegment
NewExtractSegmentWithFlat creates an extraction segment with explicit flat flag
func NewPropertySegment ¶
func NewPropertySegment(key string) PathSegment
NewPropertySegment creates a property access segment
func NewRecursiveSegment ¶ added in v1.3.0
func NewRecursiveSegment() PathSegment
NewRecursiveSegment creates a recursive descent segment
func NewWildcardSegment ¶ added in v1.3.0
func NewWildcardSegment() PathSegment
NewWildcardSegment creates a wildcard segment
func ParseArraySegment ¶ added in v1.2.0
func ParseArraySegment(part string, segments []PathSegment) []PathSegment
ParseArraySegment parses array access segments like [0], [1:3], etc.
func ParseComplexSegment ¶ added in v1.0.7
func ParseComplexSegment(part string) ([]PathSegment, error)
ParseComplexSegment parses a complex segment that may contain mixed syntax
func ParseExtractionSegment ¶ added in v1.2.0
func ParseExtractionSegment(part string, segments []PathSegment) []PathSegment
ParseExtractionSegment parses extraction segments like {key}, {flat:key}, etc.
func ParsePath ¶ added in v1.0.7
func ParsePath(path string) ([]PathSegment, error)
ParsePath parses a JSON path string into segments PERFORMANCE v3: Added sync.Map-based cache for lock-free reads PERFORMANCE v2: Added fast path for simple single-property access
func ParsePathSegment ¶ added in v1.2.0
func ParsePathSegment(part string, segments []PathSegment) []PathSegment
ParsePathSegment parses a single path segment and appends to segments slice
func SplitPathIntoSegments ¶ added in v1.2.0
func SplitPathIntoSegments(path string, segments []PathSegment) []PathSegment
SplitPathIntoSegments splits a path into segments by dots
func (PathSegment) GetArrayIndex ¶
func (ps PathSegment) GetArrayIndex(arrayLength int) (int, error)
GetArrayIndex returns the array index, handling negative indices
func (*PathSegment) GetEnd ¶ added in v1.2.0
func (ps *PathSegment) GetEnd() (int, bool)
GetEnd returns the end value and whether it was set
func (*PathSegment) GetStart ¶ added in v1.2.0
func (ps *PathSegment) GetStart() (int, bool)
GetStart returns the start value and whether it was set
func (*PathSegment) GetStep ¶ added in v1.2.0
func (ps *PathSegment) GetStep() (int, bool)
GetStep returns the step value and whether it was set
func (*PathSegment) HasEnd ¶ added in v1.2.0
func (ps *PathSegment) HasEnd() bool
HasEnd returns true if slice has an end value
func (*PathSegment) HasStart ¶ added in v1.2.0
func (ps *PathSegment) HasStart() bool
HasStart returns true if slice has a start value
func (*PathSegment) HasStep ¶ added in v1.2.0
func (ps *PathSegment) HasStep() bool
HasStep returns true if slice has a step value
func (PathSegment) IsArrayAccess ¶
func (ps PathSegment) IsArrayAccess() bool
IsArrayAccess returns true if this segment accesses an array
func (*PathSegment) IsFlatExtract ¶ added in v1.2.0
func (ps *PathSegment) IsFlatExtract() bool
IsFlatExtract returns true for flat extraction
func (*PathSegment) IsNegativeIndex ¶ added in v1.2.0
func (ps *PathSegment) IsNegativeIndex() bool
IsNegativeIndex returns true if Index is negative
func (*PathSegment) IsWildcardSegment ¶ added in v1.2.0
func (ps *PathSegment) IsWildcardSegment() bool
IsWildcardSegment returns true for WildcardSegment
func (PathSegment) String ¶
func (ps PathSegment) String() string
String returns a string representation of the path segment
func (PathSegment) TypeString ¶
func (ps PathSegment) TypeString() string
TypeString returns the string type for the segment
type PathSegmentFlags ¶ added in v1.3.0
type PathSegmentFlags uint8
PathSegmentFlags are bit flags for path segment options. This type is exported for use by the public API.
const ( FlagIsNegative PathSegmentFlags = 1 << iota FlagIsWildcard FlagIsFlat FlagHasStart FlagHasEnd FlagHasStep )
Bit flags for PathSegment fields to avoid pointer allocations
type PathSegmentType ¶
type PathSegmentType int
PathSegmentType represents the type of path segment
const ( PropertySegment PathSegmentType = iota ArrayIndexSegment ArraySliceSegment WildcardSegment RecursiveSegment FilterSegment ExtractSegment // For extract operations AppendSegment // For append operations [+] syntax )
func (PathSegmentType) String ¶
func (pst PathSegmentType) String() string
String returns the string representation of PathSegmentType
type StringIntern ¶ added in v1.2.0
type StringIntern struct {
// contains filtered or unexported fields
}
StringIntern stores interned strings for reuse
func NewStringIntern ¶ added in v1.2.0
func NewStringIntern(maxSize int64) *StringIntern
NewStringIntern creates a new string interner with a maximum size
func (*StringIntern) Clear ¶ added in v1.2.0
func (si *StringIntern) Clear()
Clear removes all interned strings
func (*StringIntern) GetStats ¶ added in v1.2.0
func (si *StringIntern) GetStats() InternStats
GetStats returns current statistics including eviction count
func (*StringIntern) Intern ¶ added in v1.2.0
func (si *StringIntern) Intern(s string) string
Intern returns an interned version of the string If the string is already interned, returns the existing copy Otherwise, stores and returns a copy of the string SECURITY: Fixed race condition and memory exhaustion with proactive eviction at 80% PERFORMANCE: Uses pooled buffers for string copying
func (*StringIntern) InternBytes ¶ added in v1.2.0
func (si *StringIntern) InternBytes(b []byte) string
InternBytes returns an interned string from a byte slice SECURITY: Uses safe conversion to avoid potential race conditions with pooled buffers
type StructFieldInfo ¶ added in v1.2.0
type StructFieldInfo struct {
Index int
Name string
OmitEmpty bool
EncodeFn func(*FastEncoder, reflect.Value) error // Type-specific encoder
Offset uintptr // Field offset for direct access
Type reflect.Type // Cached type information
IsPointer bool // Whether the field is a pointer type
}
StructFieldInfo contains cached information about a struct field
func GetStructEncoder ¶ added in v1.2.0
func GetStructEncoder(t reflect.Type) []StructFieldInfo
GetStructEncoder gets cached struct field info PERFORMANCE: Generates type-specific encoding functions for known types
type WorkerPool ¶ added in v1.2.0
type WorkerPool struct {
// contains filtered or unexported fields
}
WorkerPool manages a pool of worker goroutines
func NewWorkerPool ¶ added in v1.2.0
func NewWorkerPool(workers int) *WorkerPool
NewWorkerPool creates a new worker pool
func (*WorkerPool) Submit ¶ added in v1.2.0
func (wp *WorkerPool) Submit(task func()) bool
Submit adds a task to the pool Returns true if task was submitted/executed, false if pool was stopped SECURITY FIX: Returns status instead of silently dropping tasks
func (*WorkerPool) SubmitWait ¶ added in v1.2.2
func (wp *WorkerPool) SubmitWait(task func()) error
SubmitWait adds a task and blocks until submitted (but not necessarily completed) Returns error if pool is stopped
func (*WorkerPool) Wait ¶ added in v1.2.0
func (wp *WorkerPool) Wait()
Wait waits for all tasks to complete SECURITY FIX: Always acquire lock to avoid race with Broadcast PERFORMANCE: Uses condition variable instead of busy-wait for efficient CPU usage