Documentation
¶
Index ¶
- Constants
- Variables
- func GroupDocsOffsets(docsPos []DocPos) ([]uint32, [][]uint64, [][]int)
- func Less(a, b ID) bool
- func LessOrEqual(a, b ID) bool
- func MIDToDuration(t MID) time.Duration
- func MIDToTime(t MID) time.Time
- func MergeQPRs(dst *QPR, qprs []*QPR, limit int, histInterval MID, order DocsOrder)
- type AggBin
- type AggFunc
- type AggregatableSamples
- type AggregateArgs
- type AggregationBucket
- type AggregationResult
- type DocPos
- type DocsOrder
- type ErrorSource
- type FieldMapping
- type ID
- type IDSource
- type IDSources
- type LID
- type MID
- type MIDsDistribution
- type Mapping
- type MappingFieldType
- type MappingType
- type MappingTypeIn
- type MappingTypes
- type QPR
- type RID
- type RawMapping
- type SamplesContainer
- func (h *SamplesContainer) InsertNTimes(num float64, cnt int64)
- func (h *SamplesContainer) InsertSample(num float64)
- func (h *SamplesContainer) InsertSampleNTimes(sample float64, cnt int64)
- func (h *SamplesContainer) Merge(hist *SamplesContainer)
- func (h *SamplesContainer) Quantile(quantile float64) float64
- type TokenizerType
Constants ¶
View Source
const ( AggFuncCount = iota AggFuncSum AggFuncMin AggFuncMax AggFuncAvg AggFuncQuantile AggFuncUnique )
View Source
const ( TokenAll = "_all_" TokenExists = "_exists_" TokenIndex = "_index" )
View Source
const (
DocPosNotFound = DocPos(math.MaxUint64)
)
View Source
const PathDelim = "."
Variables ¶
View Source
var ( ExistsTokenName = []byte(TokenExists) AllTokenName = []byte(TokenAll) )
View Source
var NamesToTokenTypes = map[string]TokenizerType{}
View Source
var TestMapping = Mapping{ "service": NewSingleType(TokenizerTypeKeyword, "", 0), "span_id": NewSingleType(TokenizerTypeKeyword, "", 0), "trace_id": NewSingleType(TokenizerTypeKeyword, "", 0), "message": { Main: MappingType{TokenizerType: TokenizerTypeText}, All: []MappingType{ {Title: "message", TokenizerType: TokenizerTypeText}, {Title: "message.keyword", TokenizerType: TokenizerTypeKeyword, MaxSize: 18}, }, }, "message.keyword": NewSingleType(TokenizerTypeKeyword, "message.keyword", 18), "text": NewSingleType(TokenizerTypeText, "", 0), "k8s_pod": NewSingleType(TokenizerTypeKeyword, "", 0), "level": NewSingleType(TokenizerTypeKeyword, "", 0), "traceID": NewSingleType(TokenizerTypeKeyword, "", 0), "request_uri": NewSingleType(TokenizerTypePath, "", 0), "tags": NewSingleType(TokenizerTypeTags, "", 0), "process": NewSingleType(TokenizerTypeObject, "", 0), "process.tags": NewSingleType(TokenizerTypeTags, "", 0), "process.serviceName": NewSingleType(TokenizerTypeKeyword, "", 0), "tags.sometag": NewSingleType(TokenizerTypeKeyword, "", 0), "request_duration": NewSingleType(TokenizerTypeKeyword, "", 0), "spans": NewSingleType(TokenizerTypeNested, "", 0), "status": NewSingleType(TokenizerTypeKeyword, "", 0), "clientip": NewSingleType(TokenizerTypeKeyword, "", 0), "request": NewSingleType(TokenizerTypeKeyword, "", 0), "spans.span_id": NewSingleType(TokenizerTypeKeyword, "", 0), "_exists_": NewSingleType(TokenizerTypeKeyword, "", 0), "m": NewSingleType(TokenizerTypeKeyword, "", 0), }
View Source
var TokenTypesToNames = map[TokenizerType]string{ TokenizerTypeNoop: "noop", TokenizerTypeKeyword: "keyword", TokenizerTypeText: "text", TokenizerTypeObject: "object", TokenizerTypeTags: "tags", TokenizerTypePath: "path", TokenizerTypeNested: "nested", TokenizerTypeExists: "exists", }
Functions ¶
func LessOrEqual ¶
func MIDToDuration ¶
Types ¶
type AggregatableSamples ¶
type AggregatableSamples struct {
SamplesByBin map[AggBin]*SamplesContainer
NotExists int64
}
func (*AggregatableSamples) Aggregate ¶
func (q *AggregatableSamples) Aggregate(args AggregateArgs) AggregationResult
func (*AggregatableSamples) Merge ¶
func (q *AggregatableSamples) Merge(agg AggregatableSamples)
type AggregateArgs ¶
type AggregationBucket ¶
type AggregationResult ¶
type AggregationResult struct {
Buckets []AggregationBucket
NotExists int64
}
type ErrorSource ¶
type FieldMapping ¶
type FieldMapping Mapping
type ID ¶
func FromString ¶
type MIDsDistribution ¶
type MIDsDistribution struct {
// contains filtered or unexported fields
}
func NewMIDsDistribution ¶
func NewMIDsDistribution(from, to time.Time, bucket time.Duration) *MIDsDistribution
func (*MIDsDistribution) Add ¶
func (d *MIDsDistribution) Add(mid MID)
func (*MIDsDistribution) GetDist ¶
func (d *MIDsDistribution) GetDist() []time.Time
func (*MIDsDistribution) IsIntersecting ¶
func (d *MIDsDistribution) IsIntersecting(from, to MID) bool
func (*MIDsDistribution) MarshalJSON ¶
func (d *MIDsDistribution) MarshalJSON() ([]byte, error)
func (*MIDsDistribution) UnmarshalJSON ¶
func (d *MIDsDistribution) UnmarshalJSON(data []byte) error
type Mapping ¶
type Mapping map[string]MappingTypes
Mapping - maps fields to tokenizers. For fields with multiple types there must be a key for each type
func ReadMapping ¶
type MappingFieldType ¶
type MappingFieldType string
const ( FieldTypeText MappingFieldType = "text" FieldTypeKeyword MappingFieldType = "keyword" FieldTypePath MappingFieldType = "path" FieldTypeObject MappingFieldType = "object" FieldTypeTags MappingFieldType = "tags" FieldTypeNested MappingFieldType = "nested" )
type MappingType ¶
type MappingType struct {
Title string
TokenizerType TokenizerType
MaxSize int
}
type MappingTypeIn ¶
type MappingTypeIn struct {
Title string `yaml:"title"`
Type MappingFieldType `yaml:"type"`
Size int `yaml:"size"`
}
type MappingTypes ¶
type MappingTypes struct {
// Main - original field, used in "read" requests to get tokenizer type for field from search query
Main MappingType
// All - all fields including main one, used in "write" requests to index tokens for each type
All []MappingType
}
func NewSingleType ¶
func NewSingleType(tokenizerType TokenizerType, title string, maxSize int) MappingTypes
type QPR ¶
type QPR struct {
IDs IDSources
Histogram map[MID]uint64
Aggs []AggregatableSamples
Total uint64
Errors []ErrorSource
}
QPR query partial result, stores intermediate result of running query e.g. result from only one fraction or particular store TODO: remove single Agg when n-agg support in proxy is deployed
func (*QPR) Aggregate ¶
func (q *QPR) Aggregate(args []AggregateArgs) []AggregationResult
func (*QPR) CombineErrors ¶
type RawMapping ¶
type RawMapping struct {
// contains filtered or unexported fields
}
func NewRawMapping ¶
func NewRawMapping(mapping Mapping) *RawMapping
func (*RawMapping) GetRawMappingBytes ¶
func (a *RawMapping) GetRawMappingBytes() []byte
GetRawMappingBytes returns raw mapping represented as json stored in bytes
type SamplesContainer ¶
type SamplesContainer struct {
Min float64
Max float64
Sum float64
// Total is the number of inserted values.
Total int64
// NotExists is the number of values without a token.
NotExists int64
Samples []float64
// contains filtered or unexported fields
}
SamplesContainer is a container that is used for aggregations. Implements reservoir sampling algorithm.
func NewSamplesContainers ¶
func NewSamplesContainers() *SamplesContainer
func (*SamplesContainer) InsertNTimes ¶
func (h *SamplesContainer) InsertNTimes(num float64, cnt int64)
func (*SamplesContainer) InsertSample ¶
func (h *SamplesContainer) InsertSample(num float64)
func (*SamplesContainer) InsertSampleNTimes ¶
func (h *SamplesContainer) InsertSampleNTimes(sample float64, cnt int64)
func (*SamplesContainer) Merge ¶
func (h *SamplesContainer) Merge(hist *SamplesContainer)
func (*SamplesContainer) Quantile ¶
func (h *SamplesContainer) Quantile(quantile float64) float64
Quantile calculates the quantile value of the histogram. The argument should be in [0, 1] range.
The implementation is taken and adapted from github.com/valyala/histogram.
type TokenizerType ¶
type TokenizerType int
const ( TokenizerTypeNoop TokenizerType = 0 TokenizerTypeKeyword TokenizerType = 1 TokenizerTypeText TokenizerType = 2 TokenizerTypeObject TokenizerType = 3 TokenizerTypeTags TokenizerType = 4 TokenizerTypePath TokenizerType = 6 TokenizerTypeNested TokenizerType = 7 TokenizerTypeExists TokenizerType = 8 )
Click to show internal directories.
Click to hide internal directories.