Documentation
¶
Index ¶
- func NewApiEventSource_Override(a ApiEventSource, method *string, path *string, ...)
- func NewDynamoEventSource_Override(d DynamoEventSource, table awsdynamodb.ITable, props *DynamoEventSourceProps)
- func NewKinesisEventSource_Override(k KinesisEventSource, stream awskinesis.IStream, ...)
- func NewManagedKafkaEventSource_Override(m ManagedKafkaEventSource, props *ManagedKafkaEventSourceProps)
- func NewS3EventSource_Override(s S3EventSource, bucket awss3.Bucket, props *S3EventSourceProps)
- func NewSelfManagedKafkaEventSource_Override(s SelfManagedKafkaEventSource, props *SelfManagedKafkaEventSourceProps)
- func NewSnsDlq_Override(s SnsDlq, topic awssns.ITopic)
- func NewSnsEventSource_Override(s SnsEventSource, topic awssns.ITopic, props *SnsEventSourceProps)
- func NewSqsDlq_Override(s SqsDlq, queue awssqs.IQueue)
- func NewSqsEventSource_Override(s SqsEventSource, queue awssqs.IQueue, props *SqsEventSourceProps)
- func NewStreamEventSource_Override(s StreamEventSource, props *StreamEventSourceProps)
- type ApiEventSource
- type AuthenticationMethod
- type DynamoEventSource
- type DynamoEventSourceProps
- type KafkaEventSourceProps
- type KinesisEventSource
- type KinesisEventSourceProps
- type ManagedKafkaEventSource
- type ManagedKafkaEventSourceProps
- type S3EventSource
- type S3EventSourceProps
- type SelfManagedKafkaEventSource
- type SelfManagedKafkaEventSourceProps
- type SnsDlq
- type SnsEventSource
- type SnsEventSourceProps
- type SqsDlq
- type SqsEventSource
- type SqsEventSourceProps
- type StreamEventSource
- type StreamEventSourceProps
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func NewApiEventSource_Override ¶
func NewApiEventSource_Override(a ApiEventSource, method *string, path *string, options *awsapigateway.MethodOptions)
Experimental.
func NewDynamoEventSource_Override ¶
func NewDynamoEventSource_Override(d DynamoEventSource, table awsdynamodb.ITable, props *DynamoEventSourceProps)
Experimental.
func NewKinesisEventSource_Override ¶
func NewKinesisEventSource_Override(k KinesisEventSource, stream awskinesis.IStream, props *KinesisEventSourceProps)
Experimental.
func NewManagedKafkaEventSource_Override ¶
func NewManagedKafkaEventSource_Override(m ManagedKafkaEventSource, props *ManagedKafkaEventSourceProps)
Experimental.
func NewS3EventSource_Override ¶
func NewS3EventSource_Override(s S3EventSource, bucket awss3.Bucket, props *S3EventSourceProps)
Experimental.
func NewSelfManagedKafkaEventSource_Override ¶
func NewSelfManagedKafkaEventSource_Override(s SelfManagedKafkaEventSource, props *SelfManagedKafkaEventSourceProps)
Experimental.
func NewSnsEventSource_Override ¶
func NewSnsEventSource_Override(s SnsEventSource, topic awssns.ITopic, props *SnsEventSourceProps)
Experimental.
func NewSqsEventSource_Override ¶
func NewSqsEventSource_Override(s SqsEventSource, queue awssqs.IQueue, props *SqsEventSourceProps)
Experimental.
func NewStreamEventSource_Override ¶
func NewStreamEventSource_Override(s StreamEventSource, props *StreamEventSourceProps)
Experimental.
Types ¶
type ApiEventSource ¶
type ApiEventSource interface {
awslambda.IEventSource
Bind(target awslambda.IFunction)
}
Experimental.
func NewApiEventSource ¶
func NewApiEventSource(method *string, path *string, options *awsapigateway.MethodOptions) ApiEventSource
Experimental.
type AuthenticationMethod ¶
type AuthenticationMethod string
The authentication method to use with SelfManagedKafkaEventSource. Experimental.
const ( AuthenticationMethod_SASL_SCRAM_512_AUTH AuthenticationMethod = "SASL_SCRAM_512_AUTH" AuthenticationMethod_SASL_SCRAM_256_AUTH AuthenticationMethod = "SASL_SCRAM_256_AUTH" )
type DynamoEventSource ¶
type DynamoEventSource interface {
StreamEventSource
EventSourceMappingId() *string
Props() *StreamEventSourceProps
Bind(target awslambda.IFunction)
EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions
}
Use an Amazon DynamoDB stream as an event source for AWS Lambda. Experimental.
func NewDynamoEventSource ¶
func NewDynamoEventSource(table awsdynamodb.ITable, props *DynamoEventSourceProps) DynamoEventSource
Experimental.
type DynamoEventSourceProps ¶
type DynamoEventSourceProps struct {
// Where to begin consuming the stream.
// Experimental.
StartingPosition awslambda.StartingPosition `json:"startingPosition"`
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of:
// * 1000 for {@link DynamoEventSource}
// * 10000 for {@link KinesisEventSource}
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the function returns an error, split the batch in two and retry.
// Experimental.
BisectBatchOnError *bool `json:"bisectBatchOnError"`
// If the stream event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Maximum of Duration.minutes(5)
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
// The maximum age of a record that Lambda sends to a function for processing.
//
// Valid Range:
// * Minimum value of 60 seconds
// * Maximum value of 7 days
// Experimental.
MaxRecordAge awscdk.Duration `json:"maxRecordAge"`
// An Amazon SQS queue or Amazon SNS topic destination for discarded records.
// Experimental.
OnFailure awslambda.IEventSourceDlq `json:"onFailure"`
// The number of batches to process from each shard concurrently.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of 10
// Experimental.
ParallelizationFactor *float64 `json:"parallelizationFactor"`
// Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000.
// Experimental.
RetryAttempts *float64 `json:"retryAttempts"`
// The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes.
// Experimental.
TumblingWindow awscdk.Duration `json:"tumblingWindow"`
}
Experimental.
type KafkaEventSourceProps ¶
type KafkaEventSourceProps struct {
// Where to begin consuming the stream.
// Experimental.
StartingPosition awslambda.StartingPosition `json:"startingPosition"`
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of:
// * 1000 for {@link DynamoEventSource}
// * 10000 for {@link KinesisEventSource}
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the function returns an error, split the batch in two and retry.
// Experimental.
BisectBatchOnError *bool `json:"bisectBatchOnError"`
// If the stream event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Maximum of Duration.minutes(5)
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
// The maximum age of a record that Lambda sends to a function for processing.
//
// Valid Range:
// * Minimum value of 60 seconds
// * Maximum value of 7 days
// Experimental.
MaxRecordAge awscdk.Duration `json:"maxRecordAge"`
// An Amazon SQS queue or Amazon SNS topic destination for discarded records.
// Experimental.
OnFailure awslambda.IEventSourceDlq `json:"onFailure"`
// The number of batches to process from each shard concurrently.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of 10
// Experimental.
ParallelizationFactor *float64 `json:"parallelizationFactor"`
// Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000.
// Experimental.
RetryAttempts *float64 `json:"retryAttempts"`
// The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes.
// Experimental.
TumblingWindow awscdk.Duration `json:"tumblingWindow"`
// the secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details.
// Experimental.
Secret awssecretsmanager.ISecret `json:"secret"`
// the Kafka topic to subscribe to.
// Experimental.
Topic *string `json:"topic"`
}
Properties for a Kafka event source. Experimental.
type KinesisEventSource ¶
type KinesisEventSource interface {
StreamEventSource
EventSourceMappingId() *string
Props() *StreamEventSourceProps
Stream() awskinesis.IStream
Bind(target awslambda.IFunction)
EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions
}
Use an Amazon Kinesis stream as an event source for AWS Lambda. Experimental.
func NewKinesisEventSource ¶
func NewKinesisEventSource(stream awskinesis.IStream, props *KinesisEventSourceProps) KinesisEventSource
Experimental.
type KinesisEventSourceProps ¶
type KinesisEventSourceProps struct {
// Where to begin consuming the stream.
// Experimental.
StartingPosition awslambda.StartingPosition `json:"startingPosition"`
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of:
// * 1000 for {@link DynamoEventSource}
// * 10000 for {@link KinesisEventSource}
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the function returns an error, split the batch in two and retry.
// Experimental.
BisectBatchOnError *bool `json:"bisectBatchOnError"`
// If the stream event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Maximum of Duration.minutes(5)
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
// The maximum age of a record that Lambda sends to a function for processing.
//
// Valid Range:
// * Minimum value of 60 seconds
// * Maximum value of 7 days
// Experimental.
MaxRecordAge awscdk.Duration `json:"maxRecordAge"`
// An Amazon SQS queue or Amazon SNS topic destination for discarded records.
// Experimental.
OnFailure awslambda.IEventSourceDlq `json:"onFailure"`
// The number of batches to process from each shard concurrently.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of 10
// Experimental.
ParallelizationFactor *float64 `json:"parallelizationFactor"`
// Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000.
// Experimental.
RetryAttempts *float64 `json:"retryAttempts"`
// The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes.
// Experimental.
TumblingWindow awscdk.Duration `json:"tumblingWindow"`
}
Experimental.
type ManagedKafkaEventSource ¶
type ManagedKafkaEventSource interface {
StreamEventSource
Props() *StreamEventSourceProps
Bind(target awslambda.IFunction)
EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions
}
Use a MSK cluster as a streaming source for AWS Lambda. Experimental.
func NewManagedKafkaEventSource ¶
func NewManagedKafkaEventSource(props *ManagedKafkaEventSourceProps) ManagedKafkaEventSource
Experimental.
type ManagedKafkaEventSourceProps ¶
type ManagedKafkaEventSourceProps struct {
// Where to begin consuming the stream.
// Experimental.
StartingPosition awslambda.StartingPosition `json:"startingPosition"`
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of:
// * 1000 for {@link DynamoEventSource}
// * 10000 for {@link KinesisEventSource}
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the function returns an error, split the batch in two and retry.
// Experimental.
BisectBatchOnError *bool `json:"bisectBatchOnError"`
// If the stream event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Maximum of Duration.minutes(5)
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
// The maximum age of a record that Lambda sends to a function for processing.
//
// Valid Range:
// * Minimum value of 60 seconds
// * Maximum value of 7 days
// Experimental.
MaxRecordAge awscdk.Duration `json:"maxRecordAge"`
// An Amazon SQS queue or Amazon SNS topic destination for discarded records.
// Experimental.
OnFailure awslambda.IEventSourceDlq `json:"onFailure"`
// The number of batches to process from each shard concurrently.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of 10
// Experimental.
ParallelizationFactor *float64 `json:"parallelizationFactor"`
// Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000.
// Experimental.
RetryAttempts *float64 `json:"retryAttempts"`
// The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes.
// Experimental.
TumblingWindow awscdk.Duration `json:"tumblingWindow"`
// the secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details.
// Experimental.
Secret awssecretsmanager.ISecret `json:"secret"`
// the Kafka topic to subscribe to.
// Experimental.
Topic *string `json:"topic"`
// an MSK cluster construct.
// Experimental.
ClusterArn *string `json:"clusterArn"`
}
Properties for a MSK event source. Experimental.
type S3EventSource ¶
type S3EventSource interface {
awslambda.IEventSource
Bucket() awss3.Bucket
Bind(target awslambda.IFunction)
}
Use S3 bucket notifications as an event source for AWS Lambda. Experimental.
func NewS3EventSource ¶
func NewS3EventSource(bucket awss3.Bucket, props *S3EventSourceProps) S3EventSource
Experimental.
type S3EventSourceProps ¶
type S3EventSourceProps struct {
// The s3 event types that will trigger the notification.
// Experimental.
Events *[]awss3.EventType `json:"events"`
// S3 object key filter rules to determine which objects trigger this event.
//
// Each filter must include a `prefix` and/or `suffix` that will be matched
// against the s3 object key. Refer to the S3 Developer Guide for details
// about allowed filter rules.
// Experimental.
Filters *[]*awss3.NotificationKeyFilter `json:"filters"`
}
Experimental.
type SelfManagedKafkaEventSource ¶
type SelfManagedKafkaEventSource interface {
StreamEventSource
Props() *StreamEventSourceProps
Bind(target awslambda.IFunction)
EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions
}
Use a self hosted Kafka installation as a streaming source for AWS Lambda. Experimental.
func NewSelfManagedKafkaEventSource ¶
func NewSelfManagedKafkaEventSource(props *SelfManagedKafkaEventSourceProps) SelfManagedKafkaEventSource
Experimental.
type SelfManagedKafkaEventSourceProps ¶
type SelfManagedKafkaEventSourceProps struct {
// Where to begin consuming the stream.
// Experimental.
StartingPosition awslambda.StartingPosition `json:"startingPosition"`
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of:
// * 1000 for {@link DynamoEventSource}
// * 10000 for {@link KinesisEventSource}
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the function returns an error, split the batch in two and retry.
// Experimental.
BisectBatchOnError *bool `json:"bisectBatchOnError"`
// If the stream event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Maximum of Duration.minutes(5)
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
// The maximum age of a record that Lambda sends to a function for processing.
//
// Valid Range:
// * Minimum value of 60 seconds
// * Maximum value of 7 days
// Experimental.
MaxRecordAge awscdk.Duration `json:"maxRecordAge"`
// An Amazon SQS queue or Amazon SNS topic destination for discarded records.
// Experimental.
OnFailure awslambda.IEventSourceDlq `json:"onFailure"`
// The number of batches to process from each shard concurrently.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of 10
// Experimental.
ParallelizationFactor *float64 `json:"parallelizationFactor"`
// Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000.
// Experimental.
RetryAttempts *float64 `json:"retryAttempts"`
// The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes.
// Experimental.
TumblingWindow awscdk.Duration `json:"tumblingWindow"`
// the secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details.
// Experimental.
Secret awssecretsmanager.ISecret `json:"secret"`
// the Kafka topic to subscribe to.
// Experimental.
Topic *string `json:"topic"`
// The list of host and port pairs that are the addresses of the Kafka brokers in a "bootstrap" Kafka cluster that a Kafka client connects to initially to bootstrap itself.
//
// They are in the format `abc.xyz.com:xxxx`.
// Experimental.
BootstrapServers *[]*string `json:"bootstrapServers"`
// The authentication method for your Kafka cluster.
// Experimental.
AuthenticationMethod AuthenticationMethod `json:"authenticationMethod"`
// If your Kafka brokers are only reachable via VPC, provide the security group here.
// Experimental.
SecurityGroup awsec2.ISecurityGroup `json:"securityGroup"`
// If your Kafka brokers are only reachable via VPC provide the VPC here.
// Experimental.
Vpc awsec2.IVpc `json:"vpc"`
// If your Kafka brokers are only reachable via VPC, provide the subnets selection here.
// Experimental.
VpcSubnets *awsec2.SubnetSelection `json:"vpcSubnets"`
}
Properties for a self managed Kafka cluster event source.
If your Kafka cluster is only reachable via VPC make sure to configure it. Experimental.
type SnsDlq ¶
type SnsDlq interface {
awslambda.IEventSourceDlq
Bind(_target awslambda.IEventSourceMapping, targetHandler awslambda.IFunction) *awslambda.DlqDestinationConfig
}
An SNS dead letter queue destination configuration for a Lambda event source. Experimental.
type SnsEventSource ¶
type SnsEventSource interface {
awslambda.IEventSource
Topic() awssns.ITopic
Bind(target awslambda.IFunction)
}
Use an Amazon SNS topic as an event source for AWS Lambda. Experimental.
func NewSnsEventSource ¶
func NewSnsEventSource(topic awssns.ITopic, props *SnsEventSourceProps) SnsEventSource
Experimental.
type SnsEventSourceProps ¶
type SnsEventSourceProps struct {
// Queue to be used as dead letter queue.
//
// If not passed no dead letter queue is enabled.
// Experimental.
DeadLetterQueue awssqs.IQueue `json:"deadLetterQueue"`
// The filter policy.
// Experimental.
FilterPolicy *map[string]awssns.SubscriptionFilter `json:"filterPolicy"`
}
Properties forwarded to the Lambda Subscription. Experimental.
type SqsDlq ¶
type SqsDlq interface {
awslambda.IEventSourceDlq
Bind(_target awslambda.IEventSourceMapping, targetHandler awslambda.IFunction) *awslambda.DlqDestinationConfig
}
An SQS dead letter queue destination configuration for a Lambda event source. Experimental.
type SqsEventSource ¶
type SqsEventSource interface {
awslambda.IEventSource
EventSourceMappingId() *string
Queue() awssqs.IQueue
Bind(target awslambda.IFunction)
}
Use an Amazon SQS queue as an event source for AWS Lambda. Experimental.
func NewSqsEventSource ¶
func NewSqsEventSource(queue awssqs.IQueue, props *SqsEventSourceProps) SqsEventSource
Experimental.
type SqsEventSourceProps ¶
type SqsEventSourceProps struct {
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range: Minimum value of 1. Maximum value of 10.
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the SQS event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Valid Range: Minimum value of 0 minutes. Maximum value of 5 minutes.
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
}
Experimental.
type StreamEventSource ¶
type StreamEventSource interface {
awslambda.IEventSource
Props() *StreamEventSourceProps
Bind(_target awslambda.IFunction)
EnrichMappingOptions(options *awslambda.EventSourceMappingOptions) *awslambda.EventSourceMappingOptions
}
Use an stream as an event source for AWS Lambda. Experimental.
type StreamEventSourceProps ¶
type StreamEventSourceProps struct {
// Where to begin consuming the stream.
// Experimental.
StartingPosition awslambda.StartingPosition `json:"startingPosition"`
// The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function.
//
// Your function receives an
// event with all the retrieved records.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of:
// * 1000 for {@link DynamoEventSource}
// * 10000 for {@link KinesisEventSource}
// Experimental.
BatchSize *float64 `json:"batchSize"`
// If the function returns an error, split the batch in two and retry.
// Experimental.
BisectBatchOnError *bool `json:"bisectBatchOnError"`
// If the stream event source mapping should be enabled.
// Experimental.
Enabled *bool `json:"enabled"`
// The maximum amount of time to gather records before invoking the function.
//
// Maximum of Duration.minutes(5)
// Experimental.
MaxBatchingWindow awscdk.Duration `json:"maxBatchingWindow"`
// The maximum age of a record that Lambda sends to a function for processing.
//
// Valid Range:
// * Minimum value of 60 seconds
// * Maximum value of 7 days
// Experimental.
MaxRecordAge awscdk.Duration `json:"maxRecordAge"`
// An Amazon SQS queue or Amazon SNS topic destination for discarded records.
// Experimental.
OnFailure awslambda.IEventSourceDlq `json:"onFailure"`
// The number of batches to process from each shard concurrently.
//
// Valid Range:
// * Minimum value of 1
// * Maximum value of 10
// Experimental.
ParallelizationFactor *float64 `json:"parallelizationFactor"`
// Maximum number of retry attempts Valid Range: * Minimum value of 0 * Maximum value of 10000.
// Experimental.
RetryAttempts *float64 `json:"retryAttempts"`
// The size of the tumbling windows to group records sent to DynamoDB or Kinesis Valid Range: 0 - 15 minutes.
// Experimental.
TumblingWindow awscdk.Duration `json:"tumblingWindow"`
}
The set of properties for event sources that follow the streaming model, such as, Dynamo, Kinesis and Kafka. Experimental.