Documentation
¶
Index ¶
- Constants
- Variables
- func BinaryFieldFromRecord(ar arrow.Record, name string) (*array.Binary, error)
- func BooleanFieldFromRecord(ar arrow.Record, name string) (*array.Boolean, error)
- func BuildArrowLocations(allocator memory.Allocator, stacktraces []*pb.Stacktrace, ...) (arrow.Record, error)
- func CalculateBase(ei *profilestorepb.ExecutableInfo, start, limit, offset uint64) (uint64, error)
- func CreateDiffColumn(pool memory.Allocator, rows int) arrow.Array
- func DictionaryFromRecord(ar arrow.Record, name string) (*array.Dictionary, error)
- func LabelNamesFromSamples(takenLabels map[string]string, stringTable []string, samples []*pprofpb.Sample, ...)
- func LabelsFromSample(takenLabels map[string]string, stringTable []string, plabels []*pprofpb.Label) (map[string]string, map[string]int64)
- func MatcherToBooleanExpression(matcher *labels.Matcher) (logicalplan.Expr, error)
- func MatchersToBooleanExpressions(matchers []*labels.Matcher) ([]logicalplan.Expr, error)
- func NormalizeAddress(addr uint64, ei *profilestorepb.ExecutableInfo, start, limit, offset uint64) (uint64, error)
- func NormalizedIngest(ctx context.Context, addressNormalizationFailed prometheus.Counter, ...) error
- func ParquetBufToArrowRecord(ctx context.Context, buf *dynparquet.Buffer, rowsPerRecord uint) ([]arrow.Record, error)
- func SampleToParquetRow(schema *dynparquet.Schema, row parquet.Row, ...) parquet.Row
- func SeriesToArrowRecord(schema *dynparquet.Schema, series []Series, ...) (arrow.Record, error)
- func StringValueFromDictionary(arr *array.Dictionary, i int) string
- func ValidatePprofProfile(p *pprofpb.Profile, ei []*profilestorepb.ExecutableInfo) error
- type ArrowToProfileConverter
- type Engine
- type ErrMissingColumn
- type MetastoreNormalizer
- func (n *MetastoreNormalizer) NormalizeFunctions(ctx context.Context, functions []*pprofpb.Function, stringTable []string) ([]*pb.Function, error)
- func (n *MetastoreNormalizer) NormalizeLocations(ctx context.Context, locations []*pprofpb.Location, ...) ([]*pb.Location, error)
- func (n *MetastoreNormalizer) NormalizeMappings(ctx context.Context, mappings []*pprofpb.Mapping, stringTable []string) ([]mappingNormalizationInfo, error)
- func (n *MetastoreNormalizer) NormalizePprof(ctx context.Context, name string, takenLabelNames map[string]string, ...) ([]*profile.NormalizedProfile, error)
- func (n *MetastoreNormalizer) NormalizeStacktraces(ctx context.Context, samples []*pprofpb.Sample, locations []*pb.Location) ([]*pb.Stacktrace, error)
- type NormalizedIngester
- type NormalizedWriteRawRequest
- type Normalizer
- type ProfileSymbolizer
- type Querier
- func (q *Querier) Labels(ctx context.Context, match []string, start, end time.Time) ([]string, error)
- func (q *Querier) ProfileTypes(ctx context.Context) ([]*pb.ProfileType, error)
- func (q *Querier) QueryMerge(ctx context.Context, query string, start, end time.Time) (profile.Profile, error)
- func (q *Querier) QueryRange(ctx context.Context, query string, startTime, endTime time.Time, ...) ([]*pb.MetricsSeries, error)
- func (q *Querier) QuerySingle(ctx context.Context, query string, time time.Time) (profile.Profile, error)
- func (q *Querier) SymbolizeArrowRecord(ctx context.Context, records []arrow.Record, valueColumnName string) ([]arrow.Record, error)
- func (q *Querier) Values(ctx context.Context, labelName string, match []string, start, end time.Time) ([]string, error)
- type QueryParts
- type Series
- type Table
Constants ¶
const ( ColumnDurationSum = "sum(" + profile.ColumnDuration + ")" ColumnPeriodSum = "sum(" + profile.ColumnPeriod + ")" ColumnValueCount = "count(" + profile.ColumnValue + ")" ColumnValueSum = "sum(" + profile.ColumnValue + ")" )
const (
UnsymolizableLocationAddress = 0x0
)
Variables ¶
var ErrMissingNameLabel = errors.New("missing __name__ label")
var ExperimentalArrow bool
Functions ¶
func BinaryFieldFromRecord ¶ added in v0.13.0
func BooleanFieldFromRecord ¶ added in v0.13.0
func BuildArrowLocations ¶ added in v0.19.0
func CalculateBase ¶ added in v0.19.0
func CalculateBase(ei *profilestorepb.ExecutableInfo, start, limit, offset uint64) (uint64, error)
Base determines the base address to subtract from virtual address to get symbol table address. For an executable, the base is 0. Otherwise, it's a shared library, and the base is the address where the mapping starts. The kernel needs special handling.
func CreateDiffColumn ¶ added in v0.19.0
func DictionaryFromRecord ¶ added in v0.16.0
func LabelNamesFromSamples ¶ added in v0.15.0
func LabelsFromSample ¶ added in v0.15.0
func LabelsFromSample(takenLabels map[string]string, stringTable []string, plabels []*pprofpb.Label) (map[string]string, map[string]int64)
TODO: support num label units.
func MatcherToBooleanExpression ¶ added in v0.13.0
func MatcherToBooleanExpression(matcher *labels.Matcher) (logicalplan.Expr, error)
func MatchersToBooleanExpressions ¶ added in v0.13.0
func MatchersToBooleanExpressions(matchers []*labels.Matcher) ([]logicalplan.Expr, error)
func NormalizeAddress ¶ added in v0.19.0
func NormalizeAddress(addr uint64, ei *profilestorepb.ExecutableInfo, start, limit, offset uint64) (uint64, error)
func NormalizedIngest ¶ added in v0.18.0
func NormalizedIngest( ctx context.Context, addressNormalizationFailed prometheus.Counter, req *profilestorepb.WriteRawRequest, logger log.Logger, table Table, schema *dynparquet.Schema, metastore metastorepb.MetastoreServiceClient, bufferPool *sync.Pool, enableAddressNormalization bool, ) error
NormalizedIngest normalizes and persists pprof samples (mappings, functions, locations, stack traces). Note, normalization is used in broad terms (think db normalization), it doesn't necessarily mean address normalization (PIE).
func ParquetBufToArrowRecord ¶ added in v0.17.0
func ParquetBufToArrowRecord(ctx context.Context, buf *dynparquet.Buffer, rowsPerRecord uint) ([]arrow.Record, error)
ParquetBufToArrowRecord converts a parquet buffer to an arrow record. If rowsPerRecord is 0, then the entire buffer is converted to a single record.
func SampleToParquetRow ¶ added in v0.12.0
func SampleToParquetRow( schema *dynparquet.Schema, row parquet.Row, labelNames, profileLabelNames, profileNumLabelNames []string, lset map[string]string, meta profile.Meta, s *profile.NormalizedSample, ) parquet.Row
SampleToParquetRow converts a sample to a Parquet row. The passed labels must be sorted.
func SeriesToArrowRecord ¶ added in v0.16.0
func StringValueFromDictionary ¶ added in v0.16.0
func StringValueFromDictionary(arr *array.Dictionary, i int) string
func ValidatePprofProfile ¶ added in v0.15.0
func ValidatePprofProfile(p *pprofpb.Profile, ei []*profilestorepb.ExecutableInfo) error
Types ¶
type ArrowToProfileConverter ¶ added in v0.13.0
type ArrowToProfileConverter struct {
// contains filtered or unexported fields
}
func NewArrowToProfileConverter ¶ added in v0.13.0
func NewArrowToProfileConverter( tracer trace.Tracer, keyMaker *metastore.KeyMaker, ) *ArrowToProfileConverter
func (*ArrowToProfileConverter) Convert ¶ added in v0.13.0
func (c *ArrowToProfileConverter) Convert( ctx context.Context, p profile.Profile, ) (profile.OldProfile, error)
type ErrMissingColumn ¶ added in v0.12.0
func (ErrMissingColumn) Error ¶ added in v0.12.0
func (e ErrMissingColumn) Error() string
type MetastoreNormalizer ¶ added in v0.15.0
type MetastoreNormalizer struct {
// contains filtered or unexported fields
}
func NewNormalizer ¶ added in v0.12.0
func NewNormalizer( metastore pb.MetastoreServiceClient, enableAddressNormalization bool, addressNormalizationFailed prometheus.Counter, ) *MetastoreNormalizer
func (*MetastoreNormalizer) NormalizeFunctions ¶ added in v0.15.0
func (*MetastoreNormalizer) NormalizeLocations ¶ added in v0.15.0
func (n *MetastoreNormalizer) NormalizeLocations( ctx context.Context, locations []*pprofpb.Location, mappingsInfo []mappingNormalizationInfo, mappings []*pprofpb.Mapping, functions []*pb.Function, normalizedAddress bool, stringTable []string, executableInfo []*profilestorepb.ExecutableInfo, ) ([]*pb.Location, error)
func (*MetastoreNormalizer) NormalizeMappings ¶ added in v0.15.0
func (*MetastoreNormalizer) NormalizePprof ¶ added in v0.15.0
func (n *MetastoreNormalizer) NormalizePprof( ctx context.Context, name string, takenLabelNames map[string]string, p *pprofpb.Profile, normalizedAddress bool, executableInfo []*profilestorepb.ExecutableInfo, ) ([]*profile.NormalizedProfile, error)
func (*MetastoreNormalizer) NormalizeStacktraces ¶ added in v0.15.0
func (n *MetastoreNormalizer) NormalizeStacktraces(ctx context.Context, samples []*pprofpb.Sample, locations []*pb.Location) ([]*pb.Stacktrace, error)
type NormalizedIngester ¶ added in v0.15.0
type NormalizedIngester struct {
// contains filtered or unexported fields
}
func NewNormalizedIngester ¶ added in v0.15.0
type NormalizedWriteRawRequest ¶ added in v0.15.0
type NormalizedWriteRawRequest struct {
Series []Series
AllLabelNames []string
AllPprofLabelNames []string
AllPprofNumLabelNames []string
}
func NormalizeWriteRawRequest ¶ added in v0.15.0
func NormalizeWriteRawRequest(ctx context.Context, normalizer Normalizer, req *profilestorepb.WriteRawRequest) (NormalizedWriteRawRequest, error)
NormalizeWriteRawRequest normalizes the profiles (mappings, functions, locations, stack traces) to prepare for ingestion. It also validates label names of profiles' series, decompresses the samples, unmarshals and validates them.
type Normalizer ¶ added in v0.12.0
type Normalizer interface {
NormalizePprof(
ctx context.Context,
name string,
takenLabelNames map[string]string,
p *pprofpb.Profile,
normalizedAddress bool,
executableInfo []*profilestorepb.ExecutableInfo,
) ([]*profile.NormalizedProfile, error)
}
type ProfileSymbolizer ¶ added in v0.19.0
type ProfileSymbolizer struct {
// contains filtered or unexported fields
}
func NewProfileSymbolizer ¶ added in v0.19.0
func NewProfileSymbolizer( tracer trace.Tracer, m pb.MetastoreServiceClient, ) *ProfileSymbolizer
func (*ProfileSymbolizer) SymbolizeNormalizedProfile ¶ added in v0.19.0
func (s *ProfileSymbolizer) SymbolizeNormalizedProfile(ctx context.Context, p *profile.NormalizedProfile) (profile.OldProfile, error)
type Querier ¶ added in v0.13.0
type Querier struct {
// contains filtered or unexported fields
}
func NewQuerier ¶ added in v0.13.0
func (*Querier) ProfileTypes ¶ added in v0.13.0
func (*Querier) QueryMerge ¶ added in v0.13.0
func (*Querier) QueryRange ¶ added in v0.13.0
func (*Querier) QuerySingle ¶ added in v0.13.0
func (*Querier) SymbolizeArrowRecord ¶ added in v0.19.0
type QueryParts ¶ added in v0.16.0
func ParseQuery ¶ added in v0.17.0
func ParseQuery(query string) (QueryParts, error)
ParseQuery from a string into the QueryParts struct.
func QueryToFilterExprs ¶ added in v0.13.0
func QueryToFilterExprs(query string) (QueryParts, []logicalplan.Expr, error)