searchparser

package
v0.9.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Aug 14, 2025 License: Apache-2.0 Imports: 5 Imported by: 0

Documentation

Index

Constants

View Source
const (
	ColorNone   = 0
	ColorRed    = 1
	ColorGreen  = 2
	ColorBlue   = 3
	ColorYellow = 4
	ColorPurple = 5
)

Color constants for log line coloring NOTE: These constants must be kept in sync with frontend/logviewer/colors.ts

View Source
const (
	NodeTypeSearch = "search"
	NodeTypeAnd    = "and"
	NodeTypeOr     = "or"
	NodeTypeError  = "error"
)
View Source
const (
	SearchTypeExact       = "exact"
	SearchTypeExactCase   = "exactcase"
	SearchTypeRegexp      = "regexp"
	SearchTypeRegexpCase  = "regexpcase"
	SearchTypeFzf         = "fzf"
	SearchTypeFzfCase     = "fzfcase"
	SearchTypeNot         = "not"
	SearchTypeTag         = "tag"
	SearchTypeUserQuery   = "userquery"
	SearchTypeMarked      = "marked"
	SearchTypeNumeric     = "numeric"
	SearchTypeColorFilter = "colorfilter"
)

Variables

View Source
var TagRegexp = regexp.MustCompile(`^` + utilfn.SimpleTagRegexStr + `$`)

TagRegexp is the regular expression pattern for valid tag names Uses SimpleTagRegexStr from utilfn/util.go for consistency

Functions

func ColorToInt8 added in v0.9.0

func ColorToInt8(color string) int8

ColorToInt8 converts a color string to an int8 value using the defined color constants

func TokensToString

func TokensToString(tokens []Token) string

TokensToString converts a slice of tokens to a string representation

Types

type Node

type Node struct {
	Type         string   // NodeTypeAnd, NodeTypeOr, NodeTypeSearch, NodeTypeError
	Position     Position // Position in the source text
	Children     []*Node  // For composite nodes (AND/OR)
	SearchType   string   // e.g., "exact", "regexp", "fzf", etc. (only for search nodes)
	SearchTerm   string   // The actual search text (only for search nodes)
	Field        string   // Optional field specifier (only for search nodes)
	Op           string   // Optional operator for numeric searches (>, <, >=, <=)
	Color        string   // Color for colorfilter tokens (only for colorfilter nodes)
	IsNot        bool     // Set to true if preceded by '-' (for not tokens)
	ErrorMessage string   // For error nodes, a simple error message
}

func (*Node) PrettyPrint

func (n *Node) PrettyPrint(indent string, originalQuery string) string

PrettyPrint formats a Node structure in a concise way

type Parser

type Parser struct {
	// contains filtered or unexported fields
}

func NewParser

func NewParser(input string) *Parser

NewParser creates a parser and tokenizes the input.

func (*Parser) Parse

func (p *Parser) Parse() *Node

Parse builds the AST for the entire search expression. search = WS? or_expr WS? EOF ;

type Position

type Position struct {
	Start int // Start position in the input string
	End   int // End position in the input string
}

type Token

type Token struct {
	Type       TokenType // Type of the token
	Value      string    // Value of the token
	Position   Position  // Position in the source
	Incomplete bool      // True if the token is incomplete (e.g., unterminated string)
}

Token represents a token in the search expression

type TokenType

type TokenType string

TokenType represents the type of token

const (
	// Token types for complex tokens
	TokenWord       TokenType = "WORD"    // Plain word token
	TokenDQuote     TokenType = "DQUOTE"  // Double quoted string
	TokenSQuote     TokenType = "SQUOTE"  // Single quoted string
	TokenRegexp     TokenType = "REGEXP"  // Regular expression
	TokenCRegexp    TokenType = "CREGEXP" // Case-sensitive regexp
	TokenWhitespace TokenType = "WS"      // Whitespace
	TokenEOF        TokenType = "EOF"     // End of input

	// Token types for simple characters (using the actual character)
	TokenLParen  TokenType = "(" // Left parenthesis
	TokenRParen  TokenType = ")" // Right parenthesis
	TokenPipe    TokenType = "|" // Pipe character
	TokenMinus   TokenType = "-" // Minus sign
	TokenDollar  TokenType = "$" // Dollar sign
	TokenTilde   TokenType = "~" // Tilde
	TokenHash    TokenType = "#" // Hash
	TokenPercent TokenType = "%" // Percent sign
)

type Tokenizer

type Tokenizer struct {
	// contains filtered or unexported fields
}

Tokenizer represents a lexical analyzer for search expressions

func NewTokenizer

func NewTokenizer(input string) *Tokenizer

NewTokenizer creates a new tokenizer for the given input

func (*Tokenizer) GetAllTokens

func (t *Tokenizer) GetAllTokens() []Token

GetAllTokens tokenizes the entire input and returns all tokens

func (*Tokenizer) NextToken

func (t *Tokenizer) NextToken() Token

NextToken returns the next token from the input

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL