resolve

package
v2.0.0-rc.239 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 21, 2025 License: MIT Imports: 33 Imported by: 36

Documentation

Index

Constants

View Source
const (
	IntrospectionSchemaTypeDataSourceID     = "introspection__schema&__type"
	IntrospectionTypeFieldsDataSourceID     = "introspection__type__fields"
	IntrospectionTypeEnumValuesDataSourceID = "introspection__type__enumValues"
)
View Source
const (
	VariableRendererKindPlain                 = "plain"
	VariableRendererKindJson                  = "json"
	VariableRendererKindGraphqlWithValidation = "graphqlWithValidation"
	VariableRendererKindGraphqlResolve        = "graphqlResolve"
	VariableRendererKindCsv                   = "csv"
)
View Source
const (
	DefaultHeartbeatInterval = 5 * time.Second
)

Variables

View Source
var ConnectionIDs = atomic.NewInt64(0)

ConnectionIDs is used to create unique connection IDs for each subscription Whenever a new connection is created, use this to generate a new ID It is public because it can be used in more high level packages to instantiate a new connection

View Source
var (
	ErrInvalidSubscriptionFilterTemplate = errors.New("invalid subscription filter template")
)
View Source
var (
	ErrUnableToResolve = errors.New("unable to resolve operation")
)

Functions

func GetDurationNanoSinceTraceStart

func GetDurationNanoSinceTraceStart(ctx context.Context) int64

func IsIntrospectionDataSource

func IsIntrospectionDataSource(dataSourceID string) bool

func SetInputUndefinedVariables

func SetInputUndefinedVariables(preparedInput *bytes.Buffer, undefinedVariables []string) error

func SetNormalizeStats

func SetNormalizeStats(ctx context.Context, stats PhaseStats)

func SetParseStats

func SetParseStats(ctx context.Context, stats PhaseStats)

func SetPlannerStats

func SetPlannerStats(ctx context.Context, stats PhaseStats)

func SetRequest

func SetRequest(ctx context.Context, r *RequestData) context.Context

func SetTraceStart

func SetTraceStart(ctx context.Context, predictableDebugTimings bool) context.Context

func SetValidateStats

func SetValidateStats(ctx context.Context, stats PhaseStats)

func SingleFlightDisallowed

func SingleFlightDisallowed(ctx context.Context) bool

Types

type Array

type Array struct {
	Path     []string
	Nullable bool
	Item     Node

	SkipItem SkipArrayItem
}

func (*Array) Copy

func (a *Array) Copy() Node

func (*Array) Equals

func (a *Array) Equals(n Node) bool

func (*Array) NodeKind

func (*Array) NodeKind() NodeKind

func (*Array) NodeNullable

func (a *Array) NodeNullable() bool

func (*Array) NodePath

func (a *Array) NodePath() []string

type AsyncErrorWriter

type AsyncErrorWriter interface {
	WriteError(ctx *Context, err error, res *GraphQLResponse, w io.Writer)
}

type AsyncSubscriptionDataSource

type AsyncSubscriptionDataSource interface {
	AsyncStart(ctx *Context, id uint64, input []byte, updater SubscriptionUpdater) error
	AsyncStop(id uint64)
	UniqueRequestID(ctx *Context, input []byte, xxh *xxhash.Digest) (err error)
}

type AuthorizationDeny

type AuthorizationDeny struct {
	Reason string
}

type Authorizer

type Authorizer interface {
	// AuthorizePreFetch is called prior to making a fetch in the loader
	// This allows to implement policies to prevent fetches to an origin
	// E.g. for Mutations, it might be undesired to just filter out the response
	// You'd want to prevent sending the Operation to the Origin completely
	//
	// The input argument is the final render of the datasource input
	AuthorizePreFetch(ctx *Context, dataSourceID string, input json.RawMessage, coordinate GraphCoordinate) (result *AuthorizationDeny, err error)
	// AuthorizeObjectField operates on the response and can solely be used to implement policies to filter out response fields
	// In contrast to AuthorizePreFetch, this cannot be used to prevent origin requests
	// This function only allows you to filter the response before rendering it to the client
	//
	// The object argument is the flat render of the field-enclosing response object
	// Flat render means, we're only rendering scalars, not arrays or objects
	AuthorizeObjectField(ctx *Context, dataSourceID string, object json.RawMessage, coordinate GraphCoordinate) (result *AuthorizationDeny, err error)
	HasResponseExtensionData(ctx *Context) bool
	RenderResponseExtension(ctx *Context, out io.Writer) error
}

type BatchEntityFetch

type BatchEntityFetch struct {
	FetchDependencies

	Input                BatchInput
	DataSource           DataSource
	PostProcessing       PostProcessingConfiguration
	DataSourceIdentifier []byte
	Trace                *DataSourceLoadTrace
	Info                 *FetchInfo
}

BatchEntityFetch - represents nested entity fetch on array field allows to join nested fetches to the same subgraph into a single fetch representations variable will contain multiple items according to amount of entities matching this query

func (*BatchEntityFetch) Dependencies

func (b *BatchEntityFetch) Dependencies() *FetchDependencies

func (*BatchEntityFetch) FetchInfo

func (b *BatchEntityFetch) FetchInfo() *FetchInfo

func (*BatchEntityFetch) FetchKind

func (*BatchEntityFetch) FetchKind() FetchKind

type BatchInput

type BatchInput struct {
	Header InputTemplate
	Items  []InputTemplate
	// If SkipNullItems is set to true, items that render to null will not be included in the batch but skipped
	SkipNullItems bool
	// Same as SkipNullItems but for empty objects
	SkipEmptyObjectItems bool
	// If SkipErrItems is set to true, items that return an error during rendering will not be included in the batch but skipped
	// In this case, the error will be swallowed
	// E.g. if a field is not nullable and the value is null, the item will be skipped
	SkipErrItems bool
	Separator    InputTemplate
	Footer       InputTemplate
}

type BigInt

type BigInt struct {
	Path     []string
	Nullable bool
	Export   *FieldExport `json:"export,omitempty"`
}

func (*BigInt) Copy

func (b *BigInt) Copy() Node

func (*BigInt) Equals

func (b *BigInt) Equals(n Node) bool

func (*BigInt) NodeKind

func (*BigInt) NodeKind() NodeKind

func (*BigInt) NodeNullable

func (b *BigInt) NodeNullable() bool

func (*BigInt) NodePath

func (b *BigInt) NodePath() []string

type BodyData

type BodyData struct {
	Query         string          `json:"query,omitempty"`
	OperationName string          `json:"operationName,omitempty"`
	Variables     json.RawMessage `json:"variables,omitempty"`
}

type Boolean

type Boolean struct {
	Path     []string
	Nullable bool
	Export   *FieldExport `json:"export,omitempty"`
}

func (*Boolean) Copy

func (b *Boolean) Copy() Node

func (*Boolean) Equals

func (b *Boolean) Equals(n Node) bool

func (*Boolean) NodeKind

func (*Boolean) NodeKind() NodeKind

func (*Boolean) NodeNullable

func (b *Boolean) NodeNullable() bool

func (*Boolean) NodePath

func (b *Boolean) NodePath() []string

type BufPair

type BufPair struct {
	Data   *fastbuffer.FastBuffer
	Errors *fastbuffer.FastBuffer
}

func NewBufPair

func NewBufPair() *BufPair

func (*BufPair) HasData

func (b *BufPair) HasData() bool

func (*BufPair) HasErrors

func (b *BufPair) HasErrors() bool

func (*BufPair) Reset

func (b *BufPair) Reset()

func (*BufPair) WriteErr

func (b *BufPair) WriteErr(message, locations, path, extensions []byte)

type CSVVariableRenderer

type CSVVariableRenderer struct {
	Kind string
	// contains filtered or unexported fields
}

CSVVariableRenderer is an implementation of VariableRenderer It renders the provided list of Values as comma separated Values in plaintext (no JSON encoding of Values)

func NewCSVVariableRenderer

func NewCSVVariableRenderer(arrayValueType JsonRootType) *CSVVariableRenderer

func NewCSVVariableRendererFromTypeRef

func NewCSVVariableRendererFromTypeRef(operation, definition *ast.Document, variableTypeRef int) *CSVVariableRenderer

func (*CSVVariableRenderer) GetKind

func (c *CSVVariableRenderer) GetKind() string

func (*CSVVariableRenderer) RenderVariable

func (c *CSVVariableRenderer) RenderVariable(_ context.Context, data *astjson.Value, out io.Writer) (err error)

type ConnectDoneStats

type ConnectDoneStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	Network                  string `json:"network"`
	Addr                     string `json:"addr"`
	Err                      string `json:"err,omitempty"`
}

type ConnectStartStats

type ConnectStartStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	Network                  string `json:"network"`
	Addr                     string `json:"addr"`
}

type Context

type Context struct {
	Variables        *astjson.Value
	Files            []*httpclient.FileUpload
	Request          Request
	RenameTypeNames  []RenameTypeName
	RemapVariables   map[string]string
	TracingOptions   TraceOptions
	RateLimitOptions RateLimitOptions
	ExecutionOptions ExecutionOptions
	InitialPayload   []byte
	Extensions       []byte
	LoaderHooks      LoaderHooks
	// contains filtered or unexported fields
}

func NewContext

func NewContext(ctx context.Context) *Context

func (*Context) Context

func (c *Context) Context() context.Context

func (*Context) Free

func (c *Context) Free()

func (*Context) SetAuthorizer

func (c *Context) SetAuthorizer(authorizer Authorizer)

func (*Context) SetEngineLoaderHooks

func (c *Context) SetEngineLoaderHooks(hooks LoaderHooks)

func (*Context) SetFieldValueRenderer

func (c *Context) SetFieldValueRenderer(renderer FieldValueRenderer)

func (*Context) SetRateLimiter

func (c *Context) SetRateLimiter(limiter RateLimiter)

func (*Context) SubgraphErrors

func (c *Context) SubgraphErrors() error

func (*Context) WithContext

func (c *Context) WithContext(ctx context.Context) *Context

type ContextVariable

type ContextVariable struct {
	Path     []string
	Renderer VariableRenderer
}

func (*ContextVariable) Equals

func (c *ContextVariable) Equals(another Variable) bool

func (*ContextVariable) GetVariableKind

func (*ContextVariable) GetVariableKind() VariableKind

func (*ContextVariable) TemplateSegment

func (c *ContextVariable) TemplateSegment() TemplateSegment

type CustomNode

type CustomNode struct {
	CustomResolve

	Nullable bool
	Path     []string
}

func (*CustomNode) Copy

func (c *CustomNode) Copy() Node

func (*CustomNode) Equals

func (c *CustomNode) Equals(n Node) bool

func (*CustomNode) NodeKind

func (*CustomNode) NodeKind() NodeKind

func (*CustomNode) NodeNullable

func (c *CustomNode) NodeNullable() bool

func (*CustomNode) NodePath

func (c *CustomNode) NodePath() []string

type CustomResolve

type CustomResolve interface {
	Resolve(ctx *Context, value []byte) ([]byte, error)
}

type DNSDoneStats

type DNSDoneStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
}

type DNSStartStats

type DNSStartStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	Host                     string `json:"host"`
}

type DataSource

type DataSource interface {
	Load(ctx context.Context, input []byte, out *bytes.Buffer) (err error)
	LoadWithFiles(ctx context.Context, input []byte, files []*httpclient.FileUpload, out *bytes.Buffer) (err error)
}

type DataSourceInfo

type DataSourceInfo struct {
	ID   string
	Name string
}

type DataSourceLoadTrace

type DataSourceLoadTrace struct {
	RawInputData               json.RawMessage `json:"raw_input_data,omitempty"`
	Input                      json.RawMessage `json:"input,omitempty"`
	Output                     json.RawMessage `json:"output,omitempty"`
	LoadError                  string          `json:"error,omitempty"`
	DurationSinceStartNano     int64           `json:"duration_since_start_nanoseconds,omitempty"`
	DurationSinceStartPretty   string          `json:"duration_since_start_pretty,omitempty"`
	DurationLoadNano           int64           `json:"duration_load_nanoseconds,omitempty"`
	DurationLoadPretty         string          `json:"duration_load_pretty,omitempty"`
	SingleFlightUsed           bool            `json:"single_flight_used"`
	SingleFlightSharedResponse bool            `json:"single_flight_shared_response"`
	LoadSkipped                bool            `json:"load_skipped"`
	LoadStats                  *LoadStats      `json:"load_stats,omitempty"`
	Path                       string          `json:"-"`
}

type DeferField

type DeferField struct{}

type EmptyArray

type EmptyArray struct{}

func (*EmptyArray) Copy

func (*EmptyArray) Copy() Node

func (*EmptyArray) Equals

func (*EmptyArray) Equals(n Node) bool

func (*EmptyArray) NodeKind

func (*EmptyArray) NodeKind() NodeKind

func (*EmptyArray) NodeNullable

func (*EmptyArray) NodeNullable() bool

func (*EmptyArray) NodePath

func (*EmptyArray) NodePath() []string

type EmptyObject

type EmptyObject struct{}

func (*EmptyObject) Copy

func (*EmptyObject) Copy() Node

func (*EmptyObject) Equals

func (*EmptyObject) Equals(n Node) bool

func (*EmptyObject) NodeKind

func (*EmptyObject) NodeKind() NodeKind

func (*EmptyObject) NodeNullable

func (*EmptyObject) NodeNullable() bool

func (*EmptyObject) NodePath

func (*EmptyObject) NodePath() []string

type EntityFetch

type EntityFetch struct {
	FetchDependencies

	Input                EntityInput
	DataSource           DataSource
	PostProcessing       PostProcessingConfiguration
	DataSourceIdentifier []byte
	Trace                *DataSourceLoadTrace
	Info                 *FetchInfo
}

EntityFetch - represents nested entity fetch on object field representations variable will contain single item

func (*EntityFetch) Dependencies

func (e *EntityFetch) Dependencies() *FetchDependencies

func (*EntityFetch) FetchInfo

func (e *EntityFetch) FetchInfo() *FetchInfo

func (*EntityFetch) FetchKind

func (*EntityFetch) FetchKind() FetchKind

type EntityInput

type EntityInput struct {
	Header      InputTemplate
	Item        InputTemplate
	SkipErrItem bool
	Footer      InputTemplate
}

type Enum

type Enum struct {
	Path               []string
	Nullable           bool
	Export             *FieldExport `json:"export,omitempty"`
	TypeName           string
	Values             []string
	InaccessibleValues []string
}

func (*Enum) Copy

func (e *Enum) Copy() Node

func (*Enum) Equals

func (e *Enum) Equals(n Node) bool

func (*Enum) NodeKind

func (*Enum) NodeKind() NodeKind

func (*Enum) NodeNullable

func (e *Enum) NodeNullable() bool

func (*Enum) NodePath

func (e *Enum) NodePath() []string

type ErrMergeResult

type ErrMergeResult struct {
	Subgraph string
	Reason   error
	Path     string
}

func (ErrMergeResult) Error

func (e ErrMergeResult) Error() string

type ExecutionOptions

type ExecutionOptions struct {
	SkipLoader                 bool
	IncludeQueryPlanInResponse bool
	SendHeartbeat              bool
}

type Fetch

type Fetch interface {
	FetchKind() FetchKind
	Dependencies() *FetchDependencies

	// FetchInfo returns additional fetch-related information.
	// Callers must treat FetchInfo as read-only after planning; it may be nil when disabled by planner options.
	FetchInfo() *FetchInfo
}

type FetchConfiguration

type FetchConfiguration struct {
	Input      string
	Variables  Variables
	DataSource DataSource

	// RequiresParallelListItemFetch indicates that the single fetches should be executed without batching.
	// If we have multiple fetches attached to the object, then after post-processing of a plan
	// we will get ParallelListItemFetch instead of ParallelFetch.
	// Happens only for objects under the array path and used only for the introspection.
	RequiresParallelListItemFetch bool

	// RequiresEntityFetch will be set to true if the fetch is an entity fetch on an object.
	// After post-processing, we will get EntityFetch.
	RequiresEntityFetch bool

	// RequiresEntityBatchFetch indicates that entity fetches on array items should be batched.
	// After post-processing, we will get EntityBatchFetch.
	RequiresEntityBatchFetch bool

	// PostProcessing specifies the data and error extraction path in the response along with
	// the merge path where will insert the response.
	PostProcessing PostProcessingConfiguration

	// SetTemplateOutputToNullOnVariableNull will safely return "null" if one of the template variables renders to null
	// This is the case, e.g. when using batching and one sibling is null, resulting in a null value for one batch item
	// Returning null in this case tells the batch implementation to skip this item
	SetTemplateOutputToNullOnVariableNull bool

	QueryPlan *QueryPlan

	// OperationName is non-empty when the operation name is propagated to the upstream subgraph fetch.
	OperationName string
}

func (*FetchConfiguration) Equals

func (fc *FetchConfiguration) Equals(other *FetchConfiguration) bool

type FetchDependencies

type FetchDependencies struct {
	FetchID           int
	DependsOnFetchIDs []int
}

FetchDependencies holding current fetch id and ids of fetches that current fetch depends on e.g. should be fetched only after all dependent fetches are fetched

type FetchDependency

type FetchDependency struct {
	// Coordinate is the type+field which depends on one or more FetchDependencyOrigin
	Coordinate GraphCoordinate `json:"coordinate"`
	// IsUserRequested is true if the field was requested by the user/client
	// If false, this indicates that the Coordinate is a dependency for another fetch
	IsUserRequested bool `json:"isUserRequested"`
	// DependsOn are the FetchDependencyOrigins the Coordinate depends on
	DependsOn []FetchDependencyOrigin `json:"dependsOn"`
}

FetchDependency explains how a GraphCoordinate depends on other GraphCoordinates from other fetches

type FetchDependencyOrigin

type FetchDependencyOrigin struct {
	// FetchID is the fetch id providing the Coordinate
	FetchID int `json:"fetchId"`
	// Subgraph is the subgraph providing the Coordinate
	Subgraph string `json:"subgraph"`
	// Coordinate is the GraphCoordinate that another Coordinate depends on
	Coordinate GraphCoordinate `json:"coordinate"`
	// IsKey is true if the Coordinate is a @key dependency
	IsKey bool `json:"isKey"`
	// IsRequires is true if the Coordinate is a @requires dependency
	IsRequires bool `json:"isRequires"`
}

FetchDependencyOrigin defines a GraphCoordinate on a FetchID that another Coordinate depends on In addition, it contains information on the Subgraph providing the field, and if the Coordinate is a @key or a @requires field dependency

type FetchInfo

type FetchInfo struct {
	DataSourceID   string
	DataSourceName string
	RootFields     []GraphCoordinate
	OperationType  ast.OperationType
	QueryPlan      *QueryPlan

	// CoordinateDependencies contain a list of GraphCoordinates (typeName+fieldName)
	// and which fields from other fetches they depend on.
	// This information is useful to understand why a fetch depends on other fetches,
	// and how multiple dependencies lead to a chain of fetches
	CoordinateDependencies []FetchDependency

	// FetchReasons contains provenance for reasons why particular fields were fetched.
	// If this structure is built, then all the fields are processed.
	FetchReasons []FetchReason

	// PropagatedFetchReasons holds those FetchReasons that will be propagated
	// with the request to the subgraph as part of the "fetch_reason" extension.
	// Specifically, it is created only for fields stored in the DataSource.RequireFetchReasons().
	PropagatedFetchReasons []FetchReason
}

FetchInfo contains additional (derived) information about the fetch. Some fields may not be generated depending on planner flags.

type FetchItem

type FetchItem struct {
	Fetch                Fetch
	FetchPath            []FetchItemPathElement
	ResponsePath         string
	ResponsePathElements []string
}

func FetchItemWithPath

func FetchItemWithPath(fetch Fetch, responsePath string, path ...FetchItemPathElement) *FetchItem

func (*FetchItem) EqualSingleFetch

func (f *FetchItem) EqualSingleFetch(other *FetchItem) bool

EqualSingleFetch compares two FetchItem for equality, both items should be of kind FetchKindSingle

type FetchItemPathElement

type FetchItemPathElement struct {
	Kind      FetchItemPathElementKind
	Path      []string
	TypeNames []string
}

func ArrayPath

func ArrayPath(path ...string) FetchItemPathElement

func ObjectPath

func ObjectPath(path ...string) FetchItemPathElement

func PathElementWithTypeNames

func PathElementWithTypeNames(element FetchItemPathElement, typeNames []string) FetchItemPathElement

type FetchItemPathElementKind

type FetchItemPathElementKind string
const (
	FetchItemPathElementKindObject FetchItemPathElementKind = "object"
	FetchItemPathElementKindArray  FetchItemPathElementKind = "array"
)

type FetchKind

type FetchKind int
const (
	FetchKindSingle FetchKind = iota + 1
	FetchKindParallelListItem
	FetchKindEntity
	FetchKindEntityBatch
)

type FetchReason

type FetchReason struct {
	TypeName    string   `json:"typename"`
	FieldName   string   `json:"field"`
	BySubgraphs []string `json:"by_subgraphs,omitempty"`
	ByUser      bool     `json:"by_user,omitempty"`
	IsKey       bool     `json:"is_key,omitempty"`
	IsRequires  bool     `json:"is_requires,omitempty"`
	Nullable    bool     `json:"-"`
}

FetchReason explains who requested a specific (TypeName, FieldName) coordinate. A field can be requested by the user and/or by one or more subgraphs, with optional reasons.

type FetchTraceNode

type FetchTraceNode struct {
	Kind       string                 `json:"kind"`
	Path       string                 `json:"path"`
	SourceID   string                 `json:"source_id"`
	SourceName string                 `json:"source_name"`
	Trace      *DataSourceLoadTrace   `json:"trace,omitempty"`
	Traces     []*DataSourceLoadTrace `json:"traces,omitempty"`
}

type FetchTreeNode

type FetchTreeNode struct {
	Kind FetchTreeNodeKind `json:"kind"`
	// Trigger is only set for subscription
	Trigger         *FetchTreeNode   `json:"trigger"`
	Item            *FetchItem       `json:"item"`
	ChildNodes      []*FetchTreeNode `json:"child_nodes"`
	NormalizedQuery string           `json:"normalized_query"`
}

func Parallel

func Parallel(children ...*FetchTreeNode) *FetchTreeNode

func Sequence

func Sequence(children ...*FetchTreeNode) *FetchTreeNode

func Single

func Single(fetch Fetch, path ...FetchItemPathElement) *FetchTreeNode

func SingleWithPath

func SingleWithPath(fetch Fetch, responsePath string, path ...FetchItemPathElement) *FetchTreeNode

func (*FetchTreeNode) QueryPlan

func (n *FetchTreeNode) QueryPlan() *FetchTreeQueryPlanNode

func (*FetchTreeNode) Trace

func (n *FetchTreeNode) Trace() *FetchTreeTraceNode

type FetchTreeNodeKind

type FetchTreeNodeKind string
const (
	FetchTreeNodeKindSingle   FetchTreeNodeKind = "Single"
	FetchTreeNodeKindSequence FetchTreeNodeKind = "Sequence"
	FetchTreeNodeKindParallel FetchTreeNodeKind = "Parallel"
	FetchTreeNodeKindTrigger  FetchTreeNodeKind = "Trigger"
)

type FetchTreeQueryPlan

type FetchTreeQueryPlan struct {
	Kind              string            `json:"kind"`
	Path              string            `json:"path,omitempty"`
	SubgraphName      string            `json:"subgraphName"`
	SubgraphID        string            `json:"subgraphId"`
	FetchID           int               `json:"fetchId"`
	DependsOnFetchIDs []int             `json:"dependsOnFetchIds,omitempty"`
	Representations   []Representation  `json:"representations,omitempty"`
	Query             string            `json:"query,omitempty"`
	Dependencies      []FetchDependency `json:"dependencies,omitempty"`
}

type FetchTreeQueryPlanNode

type FetchTreeQueryPlanNode struct {
	Version         string                    `json:"version,omitempty"`
	Kind            FetchTreeNodeKind         `json:"kind"`
	Trigger         *FetchTreeQueryPlan       `json:"trigger,omitempty"`
	Children        []*FetchTreeQueryPlanNode `json:"children,omitempty"`
	Fetch           *FetchTreeQueryPlan       `json:"fetch,omitempty"`
	NormalizedQuery string                    `json:"normalizedQuery,omitempty"`
}

func (*FetchTreeQueryPlanNode) PrettyPrint

func (n *FetchTreeQueryPlanNode) PrettyPrint() string

type FetchTreeTraceNode

type FetchTreeTraceNode struct {
	Kind     FetchTreeNodeKind     `json:"kind"`
	Children []*FetchTreeTraceNode `json:"children,omitempty"`
	Fetch    *FetchTraceNode       `json:"fetch,omitempty"`
}

type Field

type Field struct {
	Name              []byte
	Value             Node
	Position          Position
	Defer             *DeferField
	Stream            *StreamField
	OnTypeNames       [][]byte
	ParentOnTypeNames []ParentOnTypeNames
	Info              *FieldInfo
}

func (*Field) Copy

func (f *Field) Copy() *Field

func (*Field) Equals

func (f *Field) Equals(n *Field) bool

type FieldExport

type FieldExport struct {
	Path     []string
	AsString bool
}

FieldExport takes the value of the field during evaluation (rendering of the field) and stores it in the variables using the Path as JSON pointer.

type FieldInfo

type FieldInfo struct {
	// Name is the name of the field.
	Name                string
	ExactParentTypeName string
	// ParentTypeNames is the list of possible parent types for this field.
	// E.g. for a root field, this will be Query, Mutation, Subscription.
	// For a field on an object type, this will be the name of that object type.
	// For a field on an interface type, this will be the name of that interface type and all of its possible implementations.
	ParentTypeNames []string
	// NamedType is the underlying node type of the field.
	// E.g. for a field of type Hobby! this will be Hobby.
	// For a field of type [Hobby] this will be Hobby.
	// For a field of type [Hobby!]! this will be Hobby.
	// For scalar fields, this will return string, int, float, boolean, ID.
	NamedType string
	Source    TypeFieldSource
	FetchID   int
	// HasAuthorizationRule needs to be set to true if the Authorizer should be called for this field
	HasAuthorizationRule bool
	// IndirectInterfaceNames is set to the interfaces name if the field is on a concrete type that implements an interface which wraps it
	// It's plural because interfaces and be overlapping with types that implement multiple interfaces
	IndirectInterfaceNames []string
}

func (*FieldInfo) Merge

func (i *FieldInfo) Merge(other *FieldInfo)

type FieldValue

type FieldValue struct {
	// Name is the name of the field, e.g. "id", "name", etc.
	Name string
	// Type is the type of the field, e.g. "String", "Int", etc.
	Type string
	// ParentType is the type of the parent object, e.g. "User", "Post", etc.
	ParentType string
	// IsListItem indicates whether the field is a list (array) item.
	IsListItem bool
	// IsNullable indicates whether the field is nullable.
	IsNullable bool
	// IsEnum is a value of Enum
	IsEnum bool

	// Path holds the path to the field in the response.
	Path string

	// Data holds the actual field value data.
	Data []byte

	// ParsedData is the astjson.Value representation of the field value data.
	ParsedData *astjson.Value
}

type FieldValueRenderer

type FieldValueRenderer interface {
	// RenderFieldValue renders a field value to the provided writer.
	RenderFieldValue(ctx *Context, value FieldValue, out io.Writer) error
}

type Float

type Float struct {
	Path     []string
	Nullable bool
	Export   *FieldExport `json:"export,omitempty"`
}

func (*Float) Copy

func (f *Float) Copy() Node

func (*Float) Equals

func (f *Float) Equals(n Node) bool

func (*Float) NodeKind

func (*Float) NodeKind() NodeKind

func (*Float) NodeNullable

func (f *Float) NodeNullable() bool

func (*Float) NodePath

func (f *Float) NodePath() []string

type GetConnStats

type GetConnStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	HostPort                 string `json:"host_port"`
}

type GotConnStats

type GotConnStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	Reused                   bool   `json:"reused"`
	WasIdle                  bool   `json:"was_idle"`
	IdleTimeNano             int64  `json:"idle_time_nanoseconds"`
	IdleTimePretty           string `json:"idle_time_pretty"`
}

type GotFirstResponseByteStats

type GotFirstResponseByteStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
}

type GraphCoordinate

type GraphCoordinate struct {
	TypeName             string `json:"typeName"`
	FieldName            string `json:"fieldName"`
	HasAuthorizationRule bool   `json:"-"`
}

type GraphQLError

type GraphQLError struct {
	Message   string     `json:"message"`
	Locations []Location `json:"locations,omitempty"`
	// Path is a list of path segments that lead to the error, can be number or string
	Path       []any          `json:"path"`
	Extensions map[string]any `json:"extensions,omitempty"`
}

type GraphQLResolveInfo

type GraphQLResolveInfo struct {
	ResolveAcquireWaitTime time.Duration
}

type GraphQLResponse

type GraphQLResponse struct {
	Data *Object

	RawFetches []*FetchItem
	Fetches    *FetchTreeNode

	Info        *GraphQLResponseInfo
	DataSources []DataSourceInfo
}

GraphQLResponse contains an ordered tree of fetches and the response shape. Fields are filled in this order:

  1. Planner fills RawFetches and Info fields.
  2. PostProcessor processes RawFetches to build DataSources and Fetches.
  3. Loader executes Fetches to collect all JSON data.
  4. Resolver uses Data to create a final JSON shape that is returned as a response.

type GraphQLResponseInfo

type GraphQLResponseInfo struct {
	OperationType ast.OperationType
}

type GraphQLSubscription

type GraphQLSubscription struct {
	Trigger  GraphQLSubscriptionTrigger
	Response *GraphQLResponse
	Filter   *SubscriptionFilter
}

type GraphQLSubscriptionTrigger

type GraphQLSubscriptionTrigger struct {
	Input          []byte
	InputTemplate  InputTemplate
	Variables      Variables
	Source         SubscriptionDataSource
	PostProcessing PostProcessingConfiguration
	QueryPlan      *QueryPlan
}

type GraphQLVariableRenderer

type GraphQLVariableRenderer struct {
	JSONSchema string
	Kind       string
	// contains filtered or unexported fields
}

GraphQLVariableRenderer is an implementation of VariableRenderer It renders variables according to the GraphQL Specification

func NewGraphQLVariableRendererFromTypeRefWithoutValidation

func NewGraphQLVariableRendererFromTypeRefWithoutValidation(operation, definition *ast.Document, variableTypeRef int) (*GraphQLVariableRenderer, error)

func (*GraphQLVariableRenderer) GetKind

func (g *GraphQLVariableRenderer) GetKind() string

func (*GraphQLVariableRenderer) RenderVariable

func (g *GraphQLVariableRenderer) RenderVariable(ctx context.Context, data *astjson.Value, out io.Writer) error

type GraphQLVariableResolveRenderer

type GraphQLVariableResolveRenderer struct {
	Kind string
	Node Node
}

func NewGraphQLVariableResolveRenderer

func NewGraphQLVariableResolveRenderer(node Node) *GraphQLVariableResolveRenderer

func (*GraphQLVariableResolveRenderer) GetKind

func (*GraphQLVariableResolveRenderer) RenderVariable

func (g *GraphQLVariableResolveRenderer) RenderVariable(ctx context.Context, data *astjson.Value, out io.Writer) error

type HeaderVariable

type HeaderVariable struct {
	Path []string
}

func (*HeaderVariable) Equals

func (h *HeaderVariable) Equals(another Variable) bool

func (*HeaderVariable) GetVariableKind

func (h *HeaderVariable) GetVariableKind() VariableKind

func (*HeaderVariable) TemplateSegment

func (h *HeaderVariable) TemplateSegment() TemplateSegment

type HookableSubscriptionDataSource

type HookableSubscriptionDataSource interface {
	// SubscriptionOnStart is called when a new subscription is created
	// If an error is returned, the error is propagated to the client.
	SubscriptionOnStart(ctx StartupHookContext, input []byte) (err error)
}

HookableSubscriptionDataSource is a hookable interface for subscription data sources. It is used to call a function when a subscription is started. This is useful for data sources that need to do some work when a subscription is started, e.g. to establish a connection to the data source or to emit updates to the client. The function is called with the context and the input of the subscription. The function is called before the subscription is started and can be used to emit updates to the client.

type InputTemplate

type InputTemplate struct {
	Segments []TemplateSegment
	// SetTemplateOutputToNullOnVariableNull will safely return "null" if one of the template variables renders to null
	// This is the case, e.g. when using batching and one sibling is null, resulting in a null value for one batch item
	// Returning null in this case tells the batch implementation to skip this item
	SetTemplateOutputToNullOnVariableNull bool
}

func (*InputTemplate) Render

func (i *InputTemplate) Render(ctx *Context, data *astjson.Value, preparedInput *bytes.Buffer) error

func (*InputTemplate) RenderAndCollectUndefinedVariables

func (i *InputTemplate) RenderAndCollectUndefinedVariables(ctx *Context, data *astjson.Value, preparedInput *bytes.Buffer, undefinedVariables *[]string) (err error)

type Integer

type Integer struct {
	Path     []string
	Nullable bool
	Export   *FieldExport `json:"export,omitempty"`
}

func (*Integer) Copy

func (i *Integer) Copy() Node

func (*Integer) Equals

func (i *Integer) Equals(n Node) bool

func (*Integer) NodeKind

func (*Integer) NodeKind() NodeKind

func (*Integer) NodeNullable

func (i *Integer) NodeNullable() bool

func (*Integer) NodePath

func (i *Integer) NodePath() []string

type IntrospectionData

type IntrospectionData struct {
	IncludeDeprecatedVariableName string
}

type JSONVariableRenderer

type JSONVariableRenderer struct {
	Kind string
	// contains filtered or unexported fields
}

JSONVariableRenderer is an implementation of VariableRenderer It renders the provided data as JSON If configured, it also does a JSON Validation Check before rendering

func NewJSONVariableRenderer

func NewJSONVariableRenderer() *JSONVariableRenderer

func (*JSONVariableRenderer) GetKind

func (r *JSONVariableRenderer) GetKind() string

func (*JSONVariableRenderer) RenderVariable

func (r *JSONVariableRenderer) RenderVariable(ctx context.Context, data *astjson.Value, out io.Writer) error

type JsonRootType

type JsonRootType struct {
	Value  jsonparser.ValueType
	Values []jsonparser.ValueType
	Kind   JsonRootTypeKind
}

func (JsonRootType) Satisfies

func (t JsonRootType) Satisfies(dataType jsonparser.ValueType) bool

type JsonRootTypeKind

type JsonRootTypeKind int
const (
	JsonRootTypeKindSingle JsonRootTypeKind = iota
	JsonRootTypeKindMultiple
)

type LoadStats

type LoadStats struct {
	GetConn              GetConnStats              `json:"get_conn"`
	GotConn              GotConnStats              `json:"got_conn"`
	GotFirstResponseByte GotFirstResponseByteStats `json:"got_first_response_byte"`
	DNSStart             DNSStartStats             `json:"dns_start"`
	DNSDone              DNSDoneStats              `json:"dns_done"`
	ConnectStart         ConnectStartStats         `json:"connect_start"`
	ConnectDone          ConnectDoneStats          `json:"connect_done"`
	TLSHandshakeStart    TLSHandshakeStartStats    `json:"tls_handshake_start"`
	TLSHandshakeDone     TLSHandshakeDoneStats     `json:"tls_handshake_done"`
	WroteHeaders         WroteHeadersStats         `json:"wrote_headers"`
	WroteRequest         WroteRequestStats         `json:"wrote_request"`
}

type Loader

type Loader struct {
	// contains filtered or unexported fields
}

func (*Loader) Free

func (l *Loader) Free()

func (*Loader) LoadGraphQLResponseData

func (l *Loader) LoadGraphQLResponseData(ctx *Context, response *GraphQLResponse, resolvable *Resolvable) (err error)

type LoaderHooks

type LoaderHooks interface {
	// OnLoad is called before the fetch is executed
	OnLoad(ctx context.Context, ds DataSourceInfo) context.Context
	// OnFinished is called after the fetch has been executed and the response has been processed and merged
	OnFinished(ctx context.Context, ds DataSourceInfo, info *ResponseInfo)
}

type Location

type Location struct {
	Line   uint32 `json:"line"`
	Column uint32 `json:"column"`
}

type Node

type Node interface {
	NodeKind() NodeKind
	NodePath() []string
	NodeNullable() bool
	Equals(Node) bool
	Copy() Node
}

type NodeKind

type NodeKind int
const (
	NodeKindObject NodeKind = iota + 1
	NodeKindEmptyObject
	NodeKindArray
	NodeKindEmptyArray
	NodeKindNull
	NodeKindString
	NodeKindBoolean
	NodeKindInteger
	NodeKindFloat
	NodeKindBigInt
	NodeKindCustom
	NodeKindScalar
	NodeKindStaticString
	NodeKindEnum
)

type Null

type Null struct {
}

func (*Null) Copy

func (*Null) Copy() Node

func (*Null) Equals

func (*Null) Equals(n Node) bool

func (*Null) NodeKind

func (*Null) NodeKind() NodeKind

func (*Null) NodeNullable

func (*Null) NodeNullable() bool

func (*Null) NodePath

func (*Null) NodePath() []string

type Object

type Object struct {
	Nullable bool
	Path     []string
	Fields   []*Field

	PossibleTypes map[string]struct{} `json:"-"`
	SourceName    string              `json:"-"`
	TypeName      string              `json:"-"`
}

func (*Object) Copy

func (o *Object) Copy() Node

func (*Object) Equals

func (o *Object) Equals(n Node) bool

func (*Object) NodeKind

func (*Object) NodeKind() NodeKind

func (*Object) NodeNullable

func (o *Object) NodeNullable() bool

func (*Object) NodePath

func (o *Object) NodePath() []string

type ObjectVariable

type ObjectVariable struct {
	Path     []string
	Renderer VariableRenderer
}

func (*ObjectVariable) Equals

func (o *ObjectVariable) Equals(another Variable) bool

func (*ObjectVariable) GetVariableKind

func (o *ObjectVariable) GetVariableKind() VariableKind

func (*ObjectVariable) TemplateSegment

func (o *ObjectVariable) TemplateSegment() TemplateSegment

type ParallelListItemFetch

type ParallelListItemFetch struct {
	Fetch  *SingleFetch
	Traces []*SingleFetch
	Trace  *DataSourceLoadTrace
}

The ParallelListItemFetch can be used to make nested parallel fetches within a list Usually, you want to batch fetches within a list, which is the default behavior of SingleFetch However, if the data source does not support batching, you can use this fetch to make parallel fetches within a list

func (*ParallelListItemFetch) Dependencies

func (p *ParallelListItemFetch) Dependencies() *FetchDependencies

func (*ParallelListItemFetch) FetchInfo

func (p *ParallelListItemFetch) FetchInfo() *FetchInfo

func (*ParallelListItemFetch) FetchKind

func (*ParallelListItemFetch) FetchKind() FetchKind

type ParentOnTypeNames

type ParentOnTypeNames struct {
	Depth int
	Names [][]byte
}

type PhaseStats

type PhaseStats struct {
	DurationNano             int64  `json:"duration_nanoseconds"`
	DurationPretty           string `json:"duration_pretty"`
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
}

func SetDebugStats

func SetDebugStats(info *TraceInfo, stats PhaseStats, phaseNo int64) PhaseStats

type PlainVariableRenderer

type PlainVariableRenderer struct {
	JSONSchema string
	Kind       string
}

PlainVariableRenderer is an implementation of VariableRenderer It renders the provided data as plain text E.g. a provided JSON string of "foo" will be rendered as foo, without quotes. If a nested JSON Object is provided, it will be rendered as is. This renderer can be used e.g. to render the provided scalar into a URL.

func NewPlainVariableRenderer

func NewPlainVariableRenderer() *PlainVariableRenderer

func (*PlainVariableRenderer) GetKind

func (p *PlainVariableRenderer) GetKind() string

func (*PlainVariableRenderer) RenderVariable

func (p *PlainVariableRenderer) RenderVariable(ctx context.Context, data *astjson.Value, out io.Writer) error

type PlanPrinter

type PlanPrinter struct {
	// contains filtered or unexported fields
}

func (*PlanPrinter) Print

func (p *PlanPrinter) Print(plan *FetchTreeQueryPlanNode) string

type Position

type Position struct {
	Line   uint32
	Column uint32
}

type PostProcessingConfiguration

type PostProcessingConfiguration struct {
	// SelectResponseDataPath used to make a jsonparser.Get call on the response data
	SelectResponseDataPath []string

	// SelectResponseErrorsPath is similar to SelectResponseDataPath, but for errors
	// If this is set, the response will be considered an error if the jsonparser.Get call returns a non-empty value
	// The value will be expected to be a GraphQL error object
	SelectResponseErrorsPath []string

	// MergePath can be defined to merge the result of the post-processing into the parent object at the given path
	// e.g. if the parent is {"a":1}, result is {"foo":"bar"} and the MergePath is ["b"],
	// the result will be {"a":1,"b":{"foo":"bar"}}
	// If the MergePath is empty, the result will be merged into the parent object
	// In this case, the result would be {"a":1,"foo":"bar"}
	// This is useful if we make multiple fetches, e.g. parallel fetches, that would otherwise overwrite each other
	MergePath []string
}

func (*PostProcessingConfiguration) Equals

Equals compares two PostProcessingConfiguration objects

type QueryPlan

type QueryPlan struct {
	DependsOnFields []Representation
	Query           string
}

type RateLimitDeny

type RateLimitDeny struct {
	Reason string
}

type RateLimitError

type RateLimitError struct {
	SubgraphName string
	Path         string
	Reason       string
}

func NewRateLimitError

func NewRateLimitError(subgraphName, path, reason string) *RateLimitError

func (*RateLimitError) Error

func (e *RateLimitError) Error() string

type RateLimitErrorExtensionCode

type RateLimitErrorExtensionCode struct {
	Enabled bool
	Code    string
}

type RateLimitOptions

type RateLimitOptions struct {
	// Enable switches rate limiting on or off
	Enable bool
	// IncludeStatsInResponseExtension includes the rate limit stats in the response extensions
	IncludeStatsInResponseExtension bool

	Rate                    int
	Burst                   int
	Period                  time.Duration
	RateLimitKey            string
	RejectExceedingRequests bool

	ErrorExtensionCode RateLimitErrorExtensionCode
}

type RateLimiter

type RateLimiter interface {
	RateLimitPreFetch(ctx *Context, info *FetchInfo, input json.RawMessage) (result *RateLimitDeny, err error)
	RenderResponseExtension(ctx *Context, out io.Writer) error
}

type RenameTypeName

type RenameTypeName struct {
	From, To []byte
}

type Reporter

type Reporter interface {
	// SubscriptionUpdateSent called when a new subscription update is sent
	SubscriptionUpdateSent()
	// SubscriptionCountInc increased when a new subscription is added to a trigger, this includes inflight subscriptions
	SubscriptionCountInc(count int)
	// SubscriptionCountDec decreased when a subscription is removed from a trigger e.g. on shutdown
	SubscriptionCountDec(count int)
	// TriggerCountInc increased when a new trigger is added e.g. when a trigger is started and initialized
	TriggerCountInc(count int)
	// TriggerCountDec decreased when a trigger is removed e.g. when a trigger is shutdown
	TriggerCountDec(count int)
}

type Representation

type Representation struct {
	Kind      RepresentationKind `json:"kind"`
	TypeName  string             `json:"typeName"`
	FieldName string             `json:"fieldName,omitempty"`
	Fragment  string             `json:"fragment"`
}

type RepresentationKind

type RepresentationKind string
const (
	RepresentationKindKey      RepresentationKind = "@key"
	RepresentationKindRequires RepresentationKind = "@requires"
)

type Request

type Request struct {
	ID     string
	Header http.Header
}

type RequestData

type RequestData struct {
	Method  string      `json:"method"`
	URL     string      `json:"url"`
	Headers http.Header `json:"headers"`
	Body    BodyData    `json:"body,omitempty"`
}

func GetRequest

func GetRequest(ctx context.Context) *RequestData

type Resolvable

type Resolvable struct {
	// contains filtered or unexported fields
}

func NewResolvable

func NewResolvable(options ResolvableOptions) *Resolvable

func (*Resolvable) Init

func (r *Resolvable) Init(ctx *Context, initialData []byte, operationType ast.OperationType) (err error)

func (*Resolvable) InitSubscription

func (r *Resolvable) InitSubscription(ctx *Context, initialData []byte, postProcessing PostProcessingConfiguration) (err error)

func (*Resolvable) Reset

func (r *Resolvable) Reset()

func (*Resolvable) Resolve

func (r *Resolvable) Resolve(ctx context.Context, rootData *Object, fetchTree *FetchTreeNode, out io.Writer) error

func (*Resolvable) ResolveNode

func (r *Resolvable) ResolveNode(node Node, data *astjson.Value, out io.Writer) error

func (*Resolvable) WroteErrorsWithoutData

func (r *Resolvable) WroteErrorsWithoutData() bool

type ResolvableObjectVariable

type ResolvableObjectVariable struct {
	Renderer *GraphQLVariableResolveRenderer
}

func NewResolvableObjectVariable

func NewResolvableObjectVariable(node *Object) *ResolvableObjectVariable

func (*ResolvableObjectVariable) Equals

func (h *ResolvableObjectVariable) Equals(another Variable) bool

func (*ResolvableObjectVariable) GetVariableKind

func (h *ResolvableObjectVariable) GetVariableKind() VariableKind

func (*ResolvableObjectVariable) TemplateSegment

func (h *ResolvableObjectVariable) TemplateSegment() TemplateSegment

type ResolvableOptions

type ResolvableOptions struct {
	ApolloCompatibilityValueCompletionInExtensions bool
	ApolloCompatibilityTruncateFloatValues         bool
	ApolloCompatibilitySuppressFetchErrors         bool
	ApolloCompatibilityReplaceInvalidVarError      bool
}

type Resolver

type Resolver struct {
	// contains filtered or unexported fields
}

Resolver is a single threaded event loop that processes all events on a single goroutine. It is absolutely critical to ensure that all events are processed quickly to prevent blocking and that resolver modifications are done on the event loop goroutine. Long-running operations should be offloaded to the subscription worker goroutine. If a different goroutine needs to emit an event, it should be done through the events channel to avoid race conditions.

func New

func New(ctx context.Context, options ResolverOptions) *Resolver

New returns a new Resolver. ctx.Done() is used to cancel all active subscriptions and streams.

func (*Resolver) AsyncCompleteSubscription

func (r *Resolver) AsyncCompleteSubscription(id SubscriptionIdentifier) error

func (*Resolver) AsyncResolveGraphQLSubscription

func (r *Resolver) AsyncResolveGraphQLSubscription(ctx *Context, subscription *GraphQLSubscription, writer SubscriptionResponseWriter, id SubscriptionIdentifier) (err error)

func (*Resolver) AsyncUnsubscribeClient

func (r *Resolver) AsyncUnsubscribeClient(connectionID int64) error

func (*Resolver) AsyncUnsubscribeSubscription

func (r *Resolver) AsyncUnsubscribeSubscription(id SubscriptionIdentifier) error

func (*Resolver) ResolveGraphQLResponse

func (r *Resolver) ResolveGraphQLResponse(ctx *Context, response *GraphQLResponse, data []byte, writer io.Writer) (*GraphQLResolveInfo, error)

func (*Resolver) ResolveGraphQLSubscription

func (r *Resolver) ResolveGraphQLSubscription(ctx *Context, subscription *GraphQLSubscription, writer SubscriptionResponseWriter) error

func (*Resolver) SetAsyncErrorWriter

func (r *Resolver) SetAsyncErrorWriter(w AsyncErrorWriter)

type ResolverOptions

type ResolverOptions struct {
	// MaxConcurrency limits the number of concurrent tool calls which is used to resolve operations.
	// The limit is only applied to getToolsWithLimit() calls. Intentionally, we don't use this limit for
	// subscription updates to prevent blocking the subscription during a network collapse because a one-to-one
	// relation is not given as in the case of single http request. We already enforce concurrency limits through
	// the MaxSubscriptionWorkers option that is a semaphore to limit the number of concurrent subscription updates.
	//
	// If set to 0, no limit is applied
	// It is advised to set this to a reasonable value to prevent excessive memory usage
	// Each concurrent resolve operation allocates ~50kb of memory
	// In addition, there's a limit of how many concurrent requests can be efficiently resolved
	// This depends on the number of CPU cores available, the complexity of the operations, and the origin services
	MaxConcurrency int

	Debug bool

	Reporter         Reporter
	AsyncErrorWriter AsyncErrorWriter

	// PropagateSubgraphErrors adds Subgraph Errors to the response
	PropagateSubgraphErrors bool

	// PropagateSubgraphStatusCodes adds the status code of the Subgraph to the extensions field of a Subgraph Error
	PropagateSubgraphStatusCodes bool

	// SubgraphErrorPropagationMode defines how Subgraph Errors are propagated
	// SubgraphErrorPropagationModeWrapped wraps Subgraph Errors in a Subgraph Error to prevent leaking internal information
	// SubgraphErrorPropagationModePassThrough passes Subgraph Errors through without modification
	SubgraphErrorPropagationMode SubgraphErrorPropagationMode

	// RewriteSubgraphErrorPaths rewrites the paths of Subgraph Errors to match the path of the field from the perspective of the client
	// This means that nested entity requests will have their paths rewritten from e.g. "_entities.foo.bar" to "person.foo.bar" if the root field above is "person"
	RewriteSubgraphErrorPaths bool

	// OmitSubgraphErrorLocations omits the locations field of Subgraph Errors
	OmitSubgraphErrorLocations bool

	// OmitSubgraphErrorExtensions omits the extensions field of Subgraph Errors
	OmitSubgraphErrorExtensions bool

	// AllowAllErrorExtensionFields allows all fields in the extensions field of a root subgraph error
	AllowAllErrorExtensionFields bool

	// AllowedErrorExtensionFields defines which fields are allowed in the extensions field of a root subgraph error
	AllowedErrorExtensionFields []string

	// AttachServiceNameToErrorExtensions attaches the service name to the extensions field of a root subgraph error
	AttachServiceNameToErrorExtensions bool

	// DefaultErrorExtensionCode is the default error code to use for subgraph errors if no code is provided
	DefaultErrorExtensionCode string

	// MaxRecyclableParserSize limits the size of the Parser that can be recycled back into the Pool.
	// If set to 0, no limit is applied
	// This helps keep the Heap size more maintainable if you regularly perform large queries.
	MaxRecyclableParserSize int

	// ResolvableOptions are configuration options for the Resolvable struct
	ResolvableOptions ResolvableOptions

	// AllowedCustomSubgraphErrorFields defines which fields are allowed in the subgraph error when in passthrough mode
	AllowedSubgraphErrorFields []string

	// SubscriptionHeartbeatInterval defines the interval in which a heartbeat is sent to all subscriptions (whether or not this does anything is determined by the subscription response writer)
	SubscriptionHeartbeatInterval time.Duration

	// MaxSubscriptionFetchTimeout defines the maximum time a subscription fetch can take before it is considered timed out
	MaxSubscriptionFetchTimeout time.Duration

	// ApolloRouterCompatibilitySubrequestHTTPError is a compatibility flag for Apollo Router, it is used to handle HTTP errors in subrequests differently
	ApolloRouterCompatibilitySubrequestHTTPError bool

	// PropagateFetchReasons enables adding the "fetch_reasons" extension to
	// upstream subgraph requests. This extension explains why each field was requested.
	// This flag does not expose the data to clients.
	PropagateFetchReasons bool

	ValidateRequiredExternalFields bool
}

type ResponseInfo

type ResponseInfo struct {
	StatusCode int
	Err        error
	// Request is the original request that was sent to the subgraph. This should only be used for reading purposes,
	// in order to ensure there aren't memory conflicts, and the body will be nil, as it was read already.
	Request *http.Request
	// ResponseHeaders contains a clone of the headers of the response from the subgraph.
	ResponseHeaders http.Header
	// contains filtered or unexported fields
}

func (*ResponseInfo) GetResponseBody

func (ri *ResponseInfo) GetResponseBody() string

type ResponseWriter

type ResponseWriter interface {
	io.Writer
}

type Scalar

type Scalar struct {
	Path     []string
	Nullable bool
	Export   *FieldExport `json:"export,omitempty"`
}

func (*Scalar) Copy

func (s *Scalar) Copy() Node

func (*Scalar) Equals

func (s *Scalar) Equals(n Node) bool

func (*Scalar) NodeKind

func (*Scalar) NodeKind() NodeKind

func (*Scalar) NodeNullable

func (s *Scalar) NodeNullable() bool

func (*Scalar) NodePath

func (s *Scalar) NodePath() []string

type SegmentType

type SegmentType int
const (
	StaticSegmentType SegmentType = iota + 1
	VariableSegmentType
)

type SingleFetch

type SingleFetch struct {
	FetchConfiguration
	FetchDependencies

	InputTemplate        InputTemplate
	DataSourceIdentifier []byte
	Trace                *DataSourceLoadTrace
	Info                 *FetchInfo
}

func (*SingleFetch) Dependencies

func (s *SingleFetch) Dependencies() *FetchDependencies

func (*SingleFetch) FetchInfo

func (s *SingleFetch) FetchInfo() *FetchInfo

func (*SingleFetch) FetchKind

func (*SingleFetch) FetchKind() FetchKind

type SingleFlightStats

type SingleFlightStats struct {
	SingleFlightUsed           bool
	SingleFlightSharedResponse bool
}

func GetSingleFlightStats

func GetSingleFlightStats(ctx context.Context) *SingleFlightStats

type SkipArrayItem

type SkipArrayItem func(ctx *Context, arrayItem *astjson.Value) bool

type StartupHookContext

type StartupHookContext struct {
	Context context.Context
	Updater func(data []byte)
}

type StaticString

type StaticString struct {
	Path  []string
	Value string
}

func (*StaticString) Copy

func (s *StaticString) Copy() Node

func (*StaticString) Equals

func (s *StaticString) Equals(n Node) bool

func (*StaticString) NodeKind

func (*StaticString) NodeKind() NodeKind

func (*StaticString) NodeNullable

func (s *StaticString) NodeNullable() bool

func (*StaticString) NodePath

func (s *StaticString) NodePath() []string

type StreamField

type StreamField struct {
	InitialBatchSize int
}

type String

type String struct {
	Path                 []string
	Nullable             bool
	Export               *FieldExport `json:"export,omitempty"`
	UnescapeResponseJson bool         `json:"unescape_response_json,omitempty"`
	IsTypeName           bool         `json:"is_type_name,omitempty"`
}

func (*String) Copy

func (s *String) Copy() Node

func (*String) Equals

func (s *String) Equals(n Node) bool

func (*String) NodeKind

func (*String) NodeKind() NodeKind

func (*String) NodeNullable

func (s *String) NodeNullable() bool

func (*String) NodePath

func (s *String) NodePath() []string

type SubgraphError

type SubgraphError struct {
	DataSourceInfo DataSourceInfo
	Path           string
	Reason         string
	ResponseCode   int

	DownstreamErrors []*GraphQLError
}

func NewSubgraphError

func NewSubgraphError(ds DataSourceInfo, path, reason string, responseCode int) *SubgraphError

func (*SubgraphError) AppendDownstreamError

func (e *SubgraphError) AppendDownstreamError(error *GraphQLError)

func (*SubgraphError) Codes

func (e *SubgraphError) Codes() []string

func (*SubgraphError) Error

func (e *SubgraphError) Error() string

Error returns the high-level error without downstream errors. For more details, call Summary().

type SubgraphErrorPropagationMode

type SubgraphErrorPropagationMode int
const (
	// SubgraphErrorPropagationModeWrapped collects all errors and exposes them as a list of errors on the extensions field "errors" of the gateway error.
	SubgraphErrorPropagationModeWrapped SubgraphErrorPropagationMode = iota
	// SubgraphErrorPropagationModePassThrough propagates all errors as root errors as they are.
	SubgraphErrorPropagationModePassThrough
)

type SubscriptionCloseKind

type SubscriptionCloseKind struct {
	WSCode ws.StatusCode
	Reason string
}
var (
	SubscriptionCloseKindNormal                 SubscriptionCloseKind = SubscriptionCloseKind{ws.StatusNormalClosure, "Normal closure"}
	SubscriptionCloseKindDownstreamServiceError SubscriptionCloseKind = SubscriptionCloseKind{ws.StatusGoingAway, "Downstream service error"}
	SubscriptionCloseKindGoingAway              SubscriptionCloseKind = SubscriptionCloseKind{ws.StatusGoingAway, "Going away"}
)

type SubscriptionDataSource

type SubscriptionDataSource interface {
	// Start is called when a new subscription is created. It establishes the connection to the data source.
	// The updater is used to send updates to the client. Deduplication of the request must be done before calling this method.
	Start(ctx *Context, input []byte, updater SubscriptionUpdater) error
	UniqueRequestID(ctx *Context, input []byte, xxh *xxhash.Digest) (err error)
}

type SubscriptionFieldFilter

type SubscriptionFieldFilter struct {
	FieldPath []string
	Values    []InputTemplate
}

func (*SubscriptionFieldFilter) SkipEvent

func (f *SubscriptionFieldFilter) SkipEvent(ctx *Context, data []byte, buf *bytes.Buffer) (bool, error)

type SubscriptionFilter

type SubscriptionFilter struct {
	And []SubscriptionFilter
	Or  []SubscriptionFilter
	Not *SubscriptionFilter
	In  *SubscriptionFieldFilter
}

func (*SubscriptionFilter) SkipEvent

func (f *SubscriptionFilter) SkipEvent(ctx *Context, data []byte, buf *bytes.Buffer) (bool, error)

type SubscriptionIdentifier

type SubscriptionIdentifier struct {
	ConnectionID   int64
	SubscriptionID int64
}

type SubscriptionResponseWriter

type SubscriptionResponseWriter interface {
	ResponseWriter
	Flush() error
	Complete()
	Heartbeat() error
	Close(kind SubscriptionCloseKind)
}

type SubscriptionUpdater

type SubscriptionUpdater interface {
	// Update sends an update to the client. It is not guaranteed that the update is sent immediately.
	Update(data []byte)
	// UpdateSubscription sends an update to a single subscription. It is not guaranteed that the update is sent immediately.
	UpdateSubscription(id SubscriptionIdentifier, data []byte)
	// Complete also takes care of cleaning up the trigger and all subscriptions. No more updates should be sent after calling Complete.
	Complete()
	// Close closes the subscription and cleans up the trigger and all subscriptions. No more updates should be sent after calling Close.
	Close(kind SubscriptionCloseKind)
	// CloseSubscription closes a single subscription. No more updates should be sent to that subscription after calling CloseSubscription.
	CloseSubscription(kind SubscriptionCloseKind, id SubscriptionIdentifier)
	// Subscriptions return all the subscriptions associated to this Updater
	Subscriptions() map[context.Context]SubscriptionIdentifier
}

type TLSHandshakeDoneStats

type TLSHandshakeDoneStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	Err                      string `json:"err,omitempty"`
}

type TLSHandshakeStartStats

type TLSHandshakeStartStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
}

type TemplateSegment

type TemplateSegment struct {
	SegmentType        SegmentType
	Data               []byte
	VariableKind       VariableKind
	VariableSourcePath []string
	Renderer           VariableRenderer
	Segments           []TemplateSegment
}

type TraceData

type TraceData struct {
	Version string              `json:"version"`
	Info    *TraceInfo          `json:"info"`
	Fetches *FetchTreeTraceNode `json:"fetches"`
	Request *RequestData        `json:"request,omitempty"`
}

func GetTrace

func GetTrace(ctx context.Context, fetchTree *FetchTreeNode) TraceData

type TraceInfo

type TraceInfo struct {
	TraceStart     time.Time  `json:"-"`
	TraceStartTime string     `json:"trace_start_time"`
	TraceStartUnix int64      `json:"trace_start_unix"`
	ParseStats     PhaseStats `json:"parse_stats"`
	NormalizeStats PhaseStats `json:"normalize_stats"`
	ValidateStats  PhaseStats `json:"validate_stats"`
	PlannerStats   PhaseStats `json:"planner_stats"`
	// contains filtered or unexported fields
}

func GetTraceInfo

func GetTraceInfo(ctx context.Context) *TraceInfo

type TraceOptions

type TraceOptions struct {
	// Enable switches tracing on or off
	Enable bool
	// ExcludeParseStats excludes parse timing information from the trace output
	ExcludeParseStats bool
	// ExcludeNormalizeStats excludes normalize timing information from the trace output
	ExcludeNormalizeStats bool
	// ExcludeValidateStats excludes validation timing information from the trace output
	ExcludeValidateStats bool
	// ExcludePlannerStats excludes planner timing information from the trace output
	ExcludePlannerStats bool
	// ExcludeRawInputData excludes the raw input for a load operation from the trace output
	ExcludeRawInputData bool
	// ExcludeInput excludes the rendered input for a load operation from the trace output
	ExcludeInput bool
	// ExcludeOutput excludes the result of a load operation from the trace output
	ExcludeOutput bool
	// ExcludeLoadStats excludes the load timing information from the trace output
	ExcludeLoadStats bool
	// EnablePredictableDebugTimings makes the timings in the trace output predictable for debugging purposes
	EnablePredictableDebugTimings bool
	// IncludeTraceOutputInResponseExtensions includes the trace output in the response extensions
	IncludeTraceOutputInResponseExtensions bool
	// Debug makes trace IDs of fetches predictable for debugging purposes
	Debug bool
}

func (*TraceOptions) DisableAll

func (r *TraceOptions) DisableAll()

func (*TraceOptions) EnableAll

func (r *TraceOptions) EnableAll()

type TypeFieldSource

type TypeFieldSource struct {
	IDs   []string
	Names []string
}

type Variable

type Variable interface {
	GetVariableKind() VariableKind
	Equals(another Variable) bool
	TemplateSegment() TemplateSegment
}

type VariableKind

type VariableKind int
const (
	ContextVariableKind VariableKind = iota + 1
	ObjectVariableKind
	HeaderVariableKind
	ResolvableObjectVariableKind
	ListVariableKind
)

type VariableRenderer

type VariableRenderer interface {
	GetKind() string
	RenderVariable(ctx context.Context, data *astjson.Value, out io.Writer) error
}

VariableRenderer is the interface to allow custom implementations of rendering Variables Depending on where a Variable is being used, a different method for rendering is required E.g. a Variable needs to be rendered conforming to the GraphQL specification, when used within a GraphQL Query If a Variable is used within a JSON Object, the contents need to be rendered as a JSON Object

type Variables

type Variables []Variable

func NewVariables

func NewVariables(variables ...Variable) Variables

func (*Variables) AddVariable

func (v *Variables) AddVariable(variable Variable) (name string, exists bool)

type WroteHeadersStats

type WroteHeadersStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
}

type WroteRequestStats

type WroteRequestStats struct {
	DurationSinceStartNano   int64  `json:"duration_since_start_nanoseconds"`
	DurationSinceStartPretty string `json:"duration_since_start_pretty"`
	Err                      string `json:"err,omitempty"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL