outputs

package
v0.7.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 4, 2018 License: MIT Imports: 22 Imported by: 0

Documentation

Index

Constants

View Source
const (
	ToHTTPKind           = "toHTTP"
	DefaultToHTTPTimeout = 1 * time.Second
)
View Source
const (
	// ToKafkaKind is the Kind for the ToKafka Flux function
	ToKafkaKind = "toKafka"
)

Variables

View Source
var DefaultKafkaWriterFactory = func(conf kafka.WriterConfig) KafkaWriter {
	return kafka.NewWriter(conf)
}

DefaultKafkaWriterFactory is a terrible name for a way to make a kafkaWriter that is injectable for testing

View Source
var DefaultToHTTPUserAgent = "fluxd/dev"

DefaultToHTTPUserAgent is the default user agent used by ToHttp

Functions

This section is empty.

Types

type KafkaWriter

type KafkaWriter interface {
	io.Closer
	WriteMessages(context.Context, ...kafka.Message) error
}

KafkaWriter is an interface for what we need fromDefaultKafkaWriterFactory

type ToHTTPOpSpec

type ToHTTPOpSpec struct {
	URL          string            `json:"url"`
	Method       string            `json:"method"` // default behavior should be POST
	Name         string            `json:"name"`
	NameColumn   string            `json:"nameColumn"` // either name or name_column must be set, if none is set try to use the "_measurement" column.
	Headers      map[string]string `json:"headers"`    // TODO: implement Headers after bug with keys and arrays and objects is fixed (new parser implemented, with string literals as keys)
	URLParams    map[string]string `json:"urlParams"`  // TODO: implement URLParams after bug with keys and arrays and objects is fixed (new parser implemented, with string literals as keys)
	Timeout      time.Duration     `json:"timeout"`    // default to something reasonable if zero
	NoKeepAlive  bool              `json:"noKeepAlive"`
	TimeColumn   string            `json:"timeColumn"`
	TagColumns   []string          `json:"tagColumns"`
	ValueColumns []string          `json:"valueColumns"`
}

func (ToHTTPOpSpec) Kind

func (*ToHTTPOpSpec) ReadArgs

func (o *ToHTTPOpSpec) ReadArgs(args flux.Arguments) error

ReadArgs loads a flux.Arguments into ToHTTPOpSpec. It sets several default values. If the http method isn't set, it defaults to POST, it also uppercases the http method. If the time_column isn't set, it defaults to execute.TimeColLabel. If the value_column isn't set it defaults to a []string{execute.DefaultValueColLabel}.

func (*ToHTTPOpSpec) UnmarshalJSON

func (o *ToHTTPOpSpec) UnmarshalJSON(b []byte) (err error)

UnmarshalJSON unmarshals and validates toHTTPOpSpec into JSON.

type ToHTTPProcedureSpec

type ToHTTPProcedureSpec struct {
	Spec *ToHTTPOpSpec
}

func (*ToHTTPProcedureSpec) Copy

func (*ToHTTPProcedureSpec) Kind

type ToHTTPTransformation

type ToHTTPTransformation struct {
	// contains filtered or unexported fields
}

func (*ToHTTPTransformation) Finish

func (t *ToHTTPTransformation) Finish(id execute.DatasetID, err error)

func (*ToHTTPTransformation) Process

func (t *ToHTTPTransformation) Process(id execute.DatasetID, tbl flux.Table) error

func (*ToHTTPTransformation) RetractTable

func (t *ToHTTPTransformation) RetractTable(id execute.DatasetID, key flux.GroupKey) error

func (*ToHTTPTransformation) UpdateProcessingTime

func (t *ToHTTPTransformation) UpdateProcessingTime(id execute.DatasetID, pt execute.Time) error

func (*ToHTTPTransformation) UpdateWatermark

func (t *ToHTTPTransformation) UpdateWatermark(id execute.DatasetID, pt execute.Time) error

type ToKafkaOpSpec

type ToKafkaOpSpec struct {
	Brokers      []string `json:"brokers"`
	Topic        string   `json:"topic"`
	Balancer     string   `json:"balancer"`
	Name         string   `json:"name"`
	NameColumn   string   `json:"nameColumn"` // either name or name_column must be set, if none is set try to use the "_measurement" column.
	TimeColumn   string   `json:"timeColumn"`
	TagColumns   []string `json:"tagColumns"`
	ValueColumns []string `json:"valueColumns"`
	MsgBufSize   int      `json:"msgBufferSize"` // the maximim number of messages to buffer before sending to kafka, the library we use defaults to 100
}

func (ToKafkaOpSpec) Kind

func (*ToKafkaOpSpec) ReadArgs

func (o *ToKafkaOpSpec) ReadArgs(args flux.Arguments) error

ReadArgs loads a flux.Arguments into ToKafkaOpSpec. It sets several default values. If the time_column isn't set, it defaults to execute.TimeColLabel. If the value_column isn't set it defaults to a []string{execute.DefaultValueColLabel}.

type ToKafkaProcedureSpec

type ToKafkaProcedureSpec struct {
	Spec *ToKafkaOpSpec
	// contains filtered or unexported fields
}

func (*ToKafkaProcedureSpec) Copy

func (*ToKafkaProcedureSpec) Kind

type ToKafkaTransformation

type ToKafkaTransformation struct {
	// contains filtered or unexported fields
}

func (*ToKafkaTransformation) Finish

func (t *ToKafkaTransformation) Finish(id execute.DatasetID, err error)

func (*ToKafkaTransformation) Process

func (t *ToKafkaTransformation) Process(id execute.DatasetID, tbl flux.Table) (err error)

func (*ToKafkaTransformation) RetractTable

func (t *ToKafkaTransformation) RetractTable(id execute.DatasetID, key flux.GroupKey) error

func (*ToKafkaTransformation) UpdateProcessingTime

func (t *ToKafkaTransformation) UpdateProcessingTime(id execute.DatasetID, pt execute.Time) error

func (*ToKafkaTransformation) UpdateWatermark

func (t *ToKafkaTransformation) UpdateWatermark(id execute.DatasetID, pt execute.Time) error

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL