tokenizer

package
v0.64.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 25, 2025 License: Apache-2.0 Imports: 6 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type ExistsTokenizer

type ExistsTokenizer struct{}

func NewExistsTokenizer

func NewExistsTokenizer() *ExistsTokenizer

func (*ExistsTokenizer) Tokenize

func (t *ExistsTokenizer) Tokenize(tokens []MetaToken, _, _ []byte, _ int) []MetaToken

type KeywordTokenizer

type KeywordTokenizer struct {
	// contains filtered or unexported fields
}

func NewKeywordTokenizer

func NewKeywordTokenizer(maxTokenSize int, caseSensitive, partialIndexing bool) *KeywordTokenizer

func (*KeywordTokenizer) Tokenize

func (t *KeywordTokenizer) Tokenize(tokens []MetaToken, name, value []byte, maxTokenSize int) []MetaToken

type MetaToken added in v0.62.4

type MetaToken struct {
	Key   []byte
	Value []byte
}

func (*MetaToken) MarshalBinaryTo added in v0.62.4

func (m *MetaToken) MarshalBinaryTo(b []byte) []byte

func (MetaToken) String added in v0.62.4

func (m MetaToken) String() string

String used in tests for human-readable output.

func (*MetaToken) UnmarshalBinary added in v0.62.4

func (m *MetaToken) UnmarshalBinary(b []byte) ([]byte, error)

type PathTokenizer

type PathTokenizer struct {
	// contains filtered or unexported fields
}

func NewPathTokenizer

func NewPathTokenizer(
	maxTokenSize int,
	caseSensitive bool,
	partialIndexing bool,
) *PathTokenizer

func (*PathTokenizer) Tokenize

func (t *PathTokenizer) Tokenize(tokens []MetaToken, name, value []byte, maxTokenSize int) []MetaToken

type TextTokenizer

type TextTokenizer struct {
	// contains filtered or unexported fields
}

func NewTextTokenizer

func NewTextTokenizer(maxTokenSize int, caseSensitive, partialIndexing bool, maxFieldValueLength int) *TextTokenizer

func (*TextTokenizer) Tokenize

func (t *TextTokenizer) Tokenize(tokens []MetaToken, name, value []byte, maxFieldValueLength int) []MetaToken

type Tokenizer

type Tokenizer interface {
	Tokenize(tokens []MetaToken, key, value []byte, maxLength int) []MetaToken
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL