seq

package
v0.60.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Aug 27, 2025 License: Apache-2.0 Imports: 18 Imported by: 0

Documentation

Index

Constants

View Source
const (
	AggFuncCount = iota
	AggFuncSum
	AggFuncMin
	AggFuncMax
	AggFuncAvg
	AggFuncQuantile
	AggFuncUnique
)
View Source
const (
	TokenAll    = "_all_"
	TokenExists = "_exists_"
	TokenIndex  = "_index"
)
View Source
const (
	DocPosNotFound = DocPos(math.MaxUint64)
)
View Source
const PathDelim = "."

Variables

View Source
var (
	ExistsTokenName = []byte(TokenExists)
	AllTokenName    = []byte(TokenAll)
)
View Source
var NamesToTokenTypes = map[string]TokenizerType{}
View Source
var TestMapping = Mapping{
	"service":  NewSingleType(TokenizerTypeKeyword, "", 0),
	"span_id":  NewSingleType(TokenizerTypeKeyword, "", 0),
	"trace_id": NewSingleType(TokenizerTypeKeyword, "", 0),
	"message": {
		Main: MappingType{TokenizerType: TokenizerTypeText},
		All: []MappingType{
			{Title: "message", TokenizerType: TokenizerTypeText},
			{Title: "message.keyword", TokenizerType: TokenizerTypeKeyword, MaxSize: 18},
		},
	},
	"message.keyword":     NewSingleType(TokenizerTypeKeyword, "message.keyword", 18),
	"text":                NewSingleType(TokenizerTypeText, "", 0),
	"k8s_pod":             NewSingleType(TokenizerTypeKeyword, "", 0),
	"level":               NewSingleType(TokenizerTypeKeyword, "", 0),
	"traceID":             NewSingleType(TokenizerTypeKeyword, "", 0),
	"request_uri":         NewSingleType(TokenizerTypePath, "", 0),
	"tags":                NewSingleType(TokenizerTypeTags, "", 0),
	"process":             NewSingleType(TokenizerTypeObject, "", 0),
	"process.tags":        NewSingleType(TokenizerTypeTags, "", 0),
	"process.serviceName": NewSingleType(TokenizerTypeKeyword, "", 0),
	"tags.sometag":        NewSingleType(TokenizerTypeKeyword, "", 0),
	"request_duration":    NewSingleType(TokenizerTypeKeyword, "", 0),
	"spans":               NewSingleType(TokenizerTypeNested, "", 0),
	"status":              NewSingleType(TokenizerTypeKeyword, "", 0),
	"clientip":            NewSingleType(TokenizerTypeKeyword, "", 0),
	"request":             NewSingleType(TokenizerTypeKeyword, "", 0),
	"spans.span_id":       NewSingleType(TokenizerTypeKeyword, "", 0),
	"_exists_":            NewSingleType(TokenizerTypeKeyword, "", 0),

	"m": NewSingleType(TokenizerTypeKeyword, "", 0),
}
View Source
var TokenTypesToNames = map[TokenizerType]string{
	TokenizerTypeNoop:    "noop",
	TokenizerTypeKeyword: "keyword",
	TokenizerTypeText:    "text",
	TokenizerTypeObject:  "object",
	TokenizerTypeTags:    "tags",
	TokenizerTypePath:    "path",
	TokenizerTypeNested:  "nested",
	TokenizerTypeExists:  "exists",
}

Functions

func GroupDocsOffsets

func GroupDocsOffsets(docsPos []DocPos) ([]uint32, [][]uint64, [][]int)

func Less

func Less(a, b ID) bool

func LessOrEqual

func LessOrEqual(a, b ID) bool

func MIDToDuration

func MIDToDuration(t MID) time.Duration

func MIDToTime

func MIDToTime(t MID) time.Time

func MergeQPRs

func MergeQPRs(dst *QPR, qprs []*QPR, limit int, histInterval MID, order DocsOrder)

Types

type AggBin

type AggBin struct {
	MID   MID
	Token string
}

type AggFunc

type AggFunc byte

type AggregatableSamples

type AggregatableSamples struct {
	SamplesByBin map[AggBin]*SamplesContainer
	NotExists    int64
}

func (*AggregatableSamples) Aggregate

func (*AggregatableSamples) Merge

type AggregateArgs

type AggregateArgs struct {
	Func                 AggFunc
	SkipWithoutTimestamp bool
	Quantiles            []float64
}

type AggregationBucket

type AggregationBucket struct {
	Name      string
	Value     float64
	Quantiles []float64
	NotExists int64
	MID       MID
}

type AggregationResult

type AggregationResult struct {
	Buckets   []AggregationBucket
	NotExists int64
}

type DocPos

type DocPos uint64

func PackDocPos

func PackDocPos(blockIndex uint32, offset uint64) DocPos

func (DocPos) Unpack

func (pos DocPos) Unpack() (uint32, uint64)

type DocsOrder

type DocsOrder uint8
const (
	DocsOrderDesc DocsOrder = 0
	DocsOrderAsc  DocsOrder = 1
)

func (DocsOrder) IsDesc

func (o DocsOrder) IsDesc() bool

func (DocsOrder) IsReverse

func (o DocsOrder) IsReverse() bool

type ErrorSource

type ErrorSource struct {
	ErrStr string
	Source uint64
}

type FieldMapping

type FieldMapping Mapping

type ID

type ID struct {
	MID MID
	RID RID
}

func FromString

func FromString(x string) (ID, error)

func NewID

func NewID(t time.Time, randomness uint64) ID

func SimpleID

func SimpleID(i int) ID

func (ID) Bytes

func (d ID) Bytes() []byte

func (ID) Equal

func (d ID) Equal(id ID) bool

func (ID) String

func (d ID) String() string

func (ID) Time

func (d ID) Time() string

type IDSource

type IDSource struct {
	ID     ID
	Source uint64
	Hint   string
}

func (*IDSource) Equal

func (id *IDSource) Equal(check IDSource) bool

type IDSources

type IDSources []IDSource

func (IDSources) ApplyHint

func (p IDSources) ApplyHint(hint string)

func (IDSources) IDs

func (p IDSources) IDs() []ID

func (IDSources) Len

func (p IDSources) Len() int

func (IDSources) Less

func (p IDSources) Less(i, j int) bool

func (IDSources) Swap

func (p IDSources) Swap(i, j int)

type LID

type LID uint32 // local id for a fraction

type MID

type MID uint64 // milliseconds part of ID

func DurationToMID

func DurationToMID(d time.Duration) MID

func TimeToMID

func TimeToMID(t time.Time) MID

func (MID) String

func (m MID) String() string

func (MID) Time

func (m MID) Time() time.Time

type MIDsDistribution

type MIDsDistribution struct {
	// contains filtered or unexported fields
}

func NewMIDsDistribution

func NewMIDsDistribution(from, to time.Time, bucket time.Duration) *MIDsDistribution

func (*MIDsDistribution) Add

func (d *MIDsDistribution) Add(mid MID)

func (*MIDsDistribution) GetDist

func (d *MIDsDistribution) GetDist() []time.Time

func (*MIDsDistribution) IsIntersecting

func (d *MIDsDistribution) IsIntersecting(from, to MID) bool

func (*MIDsDistribution) MarshalJSON

func (d *MIDsDistribution) MarshalJSON() ([]byte, error)

func (*MIDsDistribution) UnmarshalJSON

func (d *MIDsDistribution) UnmarshalJSON(data []byte) error

type Mapping

type Mapping map[string]MappingTypes

Mapping - maps fields to tokenizers. For fields with multiple types there must be a key for each type

func ReadMapping

func ReadMapping(data []byte) (Mapping, error)

type MappingFieldType

type MappingFieldType string
const (
	FieldTypeText    MappingFieldType = "text"
	FieldTypeKeyword MappingFieldType = "keyword"
	FieldTypePath    MappingFieldType = "path"

	FieldTypeObject MappingFieldType = "object"
	FieldTypeTags   MappingFieldType = "tags"
	FieldTypeNested MappingFieldType = "nested"
)

type MappingType

type MappingType struct {
	Title         string
	TokenizerType TokenizerType
	MaxSize       int
}

type MappingTypeIn

type MappingTypeIn struct {
	Title string           `yaml:"title"`
	Type  MappingFieldType `yaml:"type"`
	Size  int              `yaml:"size"`
}

type MappingTypes

type MappingTypes struct {
	// Main - original field, used in "read" requests to get tokenizer type for field from search query
	Main MappingType
	// All - all fields including main one, used in "write" requests to index tokens for each type
	All []MappingType
}

func NewSingleType

func NewSingleType(tokenizerType TokenizerType, title string, maxSize int) MappingTypes

type QPR

type QPR struct {
	IDs       IDSources
	Histogram map[MID]uint64
	Aggs      []AggregatableSamples
	Total     uint64
	Errors    []ErrorSource
}

QPR query partial result, stores intermediate result of running query e.g. result from only one fraction or particular store TODO: remove single Agg when n-agg support in proxy is deployed

func (*QPR) Aggregate

func (q *QPR) Aggregate(args []AggregateArgs) []AggregationResult

func (*QPR) CombineErrors

func (q *QPR) CombineErrors() string

type RID

type RID uint64 // random part of ID

type RawMapping

type RawMapping struct {
	// contains filtered or unexported fields
}

func NewRawMapping

func NewRawMapping(mapping Mapping) *RawMapping

func (*RawMapping) GetRawMappingBytes

func (a *RawMapping) GetRawMappingBytes() []byte

GetRawMappingBytes returns raw mapping represented as json stored in bytes

type SamplesContainer

type SamplesContainer struct {
	Min float64
	Max float64
	Sum float64
	// Total is the number of inserted values.
	Total int64
	// NotExists is the number of values without a token.
	NotExists int64
	Samples   []float64
	// contains filtered or unexported fields
}

SamplesContainer is a container that is used for aggregations. Implements reservoir sampling algorithm.

func NewSamplesContainers

func NewSamplesContainers() *SamplesContainer

func (*SamplesContainer) InsertNTimes

func (h *SamplesContainer) InsertNTimes(num float64, cnt int64)

func (*SamplesContainer) InsertSample

func (h *SamplesContainer) InsertSample(num float64)

func (*SamplesContainer) InsertSampleNTimes

func (h *SamplesContainer) InsertSampleNTimes(sample float64, cnt int64)

func (*SamplesContainer) Merge

func (h *SamplesContainer) Merge(hist *SamplesContainer)

func (*SamplesContainer) Quantile

func (h *SamplesContainer) Quantile(quantile float64) float64

Quantile calculates the quantile value of the histogram. The argument should be in [0, 1] range.

The implementation is taken and adapted from github.com/valyala/histogram.

type Token

type Token struct {
	Field []byte
	Val   []byte
}

func Tokens

func Tokens(tokens ...string) []Token

type TokenizerType

type TokenizerType int
const (
	TokenizerTypeNoop    TokenizerType = 0
	TokenizerTypeKeyword TokenizerType = 1
	TokenizerTypeText    TokenizerType = 2
	TokenizerTypeObject  TokenizerType = 3
	TokenizerTypeTags    TokenizerType = 4
	TokenizerTypePath    TokenizerType = 6
	TokenizerTypeNested  TokenizerType = 7
	TokenizerTypeExists  TokenizerType = 8
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL