1. Packages
  2. AWS Classic
  3. API Docs
  4. lambda
  5. EventSourceMapping

Try AWS Native preview for resources not in the classic version.

AWS Classic v6.3.0 published on Thursday, Sep 28, 2023 by Pulumi

aws.lambda.EventSourceMapping

Explore with Pulumi AI

aws logo

Try AWS Native preview for resources not in the classic version.

AWS Classic v6.3.0 published on Thursday, Sep 28, 2023 by Pulumi

    Provides a Lambda event source mapping. This allows Lambda functions to get events from Kinesis, DynamoDB, SQS, Amazon MQ and Managed Streaming for Apache Kafka (MSK).

    For information about Lambda and how to use it, see What is AWS Lambda?. For information about event source mappings, see CreateEventSourceMapping in the API docs.

    Example Usage

    DynamoDB

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Lambda.EventSourceMapping("example", new()
        {
            EventSourceArn = aws_dynamodb_table.Example.Stream_arn,
            FunctionName = aws_lambda_function.Example.Arn,
            StartingPosition = "LATEST",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
    			EventSourceArn:   pulumi.Any(aws_dynamodb_table.Example.Stream_arn),
    			FunctionName:     pulumi.Any(aws_lambda_function.Example.Arn),
    			StartingPosition: pulumi.String("LATEST"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .eventSourceArn(aws_dynamodb_table.example().stream_arn())
                .functionName(aws_lambda_function.example().arn())
                .startingPosition("LATEST")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.lambda_.EventSourceMapping("example",
        event_source_arn=aws_dynamodb_table["example"]["stream_arn"],
        function_name=aws_lambda_function["example"]["arn"],
        starting_position="LATEST")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.lambda.EventSourceMapping("example", {
        eventSourceArn: aws_dynamodb_table.example.stream_arn,
        functionName: aws_lambda_function.example.arn,
        startingPosition: "LATEST",
    });
    
    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          eventSourceArn: ${aws_dynamodb_table.example.stream_arn}
          functionName: ${aws_lambda_function.example.arn}
          startingPosition: LATEST
    

    Kinesis

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Lambda.EventSourceMapping("example", new()
        {
            EventSourceArn = aws_kinesis_stream.Example.Arn,
            FunctionName = aws_lambda_function.Example.Arn,
            StartingPosition = "LATEST",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
    			EventSourceArn:   pulumi.Any(aws_kinesis_stream.Example.Arn),
    			FunctionName:     pulumi.Any(aws_lambda_function.Example.Arn),
    			StartingPosition: pulumi.String("LATEST"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .eventSourceArn(aws_kinesis_stream.example().arn())
                .functionName(aws_lambda_function.example().arn())
                .startingPosition("LATEST")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.lambda_.EventSourceMapping("example",
        event_source_arn=aws_kinesis_stream["example"]["arn"],
        function_name=aws_lambda_function["example"]["arn"],
        starting_position="LATEST")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.lambda.EventSourceMapping("example", {
        eventSourceArn: aws_kinesis_stream.example.arn,
        functionName: aws_lambda_function.example.arn,
        startingPosition: "LATEST",
    });
    
    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          eventSourceArn: ${aws_kinesis_stream.example.arn}
          functionName: ${aws_lambda_function.example.arn}
          startingPosition: LATEST
    

    Managed Streaming for Apache Kafka (MSK)

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Lambda.EventSourceMapping("example", new()
        {
            EventSourceArn = aws_msk_cluster.Example.Arn,
            FunctionName = aws_lambda_function.Example.Arn,
            Topics = new[]
            {
                "Example",
            },
            StartingPosition = "TRIM_HORIZON",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
    			EventSourceArn: pulumi.Any(aws_msk_cluster.Example.Arn),
    			FunctionName:   pulumi.Any(aws_lambda_function.Example.Arn),
    			Topics: pulumi.StringArray{
    				pulumi.String("Example"),
    			},
    			StartingPosition: pulumi.String("TRIM_HORIZON"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .eventSourceArn(aws_msk_cluster.example().arn())
                .functionName(aws_lambda_function.example().arn())
                .topics("Example")
                .startingPosition("TRIM_HORIZON")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.lambda_.EventSourceMapping("example",
        event_source_arn=aws_msk_cluster["example"]["arn"],
        function_name=aws_lambda_function["example"]["arn"],
        topics=["Example"],
        starting_position="TRIM_HORIZON")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.lambda.EventSourceMapping("example", {
        eventSourceArn: aws_msk_cluster.example.arn,
        functionName: aws_lambda_function.example.arn,
        topics: ["Example"],
        startingPosition: "TRIM_HORIZON",
    });
    
    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          eventSourceArn: ${aws_msk_cluster.example.arn}
          functionName: ${aws_lambda_function.example.arn}
          topics:
            - Example
          startingPosition: TRIM_HORIZON
    

    Self Managed Apache Kafka

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Lambda.EventSourceMapping("example", new()
        {
            FunctionName = aws_lambda_function.Example.Arn,
            Topics = new[]
            {
                "Example",
            },
            StartingPosition = "TRIM_HORIZON",
            SelfManagedEventSource = new Aws.Lambda.Inputs.EventSourceMappingSelfManagedEventSourceArgs
            {
                Endpoints = 
                {
                    { "KAFKA_BOOTSTRAP_SERVERS", "kafka1.example.com:9092,kafka2.example.com:9092" },
                },
            },
            SourceAccessConfigurations = new[]
            {
                new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
                {
                    Type = "VPC_SUBNET",
                    Uri = "subnet:subnet-example1",
                },
                new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
                {
                    Type = "VPC_SUBNET",
                    Uri = "subnet:subnet-example2",
                },
                new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
                {
                    Type = "VPC_SECURITY_GROUP",
                    Uri = "security_group:sg-example",
                },
            },
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
    			FunctionName: pulumi.Any(aws_lambda_function.Example.Arn),
    			Topics: pulumi.StringArray{
    				pulumi.String("Example"),
    			},
    			StartingPosition: pulumi.String("TRIM_HORIZON"),
    			SelfManagedEventSource: &lambda.EventSourceMappingSelfManagedEventSourceArgs{
    				Endpoints: pulumi.StringMap{
    					"KAFKA_BOOTSTRAP_SERVERS": pulumi.String("kafka1.example.com:9092,kafka2.example.com:9092"),
    				},
    			},
    			SourceAccessConfigurations: lambda.EventSourceMappingSourceAccessConfigurationArray{
    				&lambda.EventSourceMappingSourceAccessConfigurationArgs{
    					Type: pulumi.String("VPC_SUBNET"),
    					Uri:  pulumi.String("subnet:subnet-example1"),
    				},
    				&lambda.EventSourceMappingSourceAccessConfigurationArgs{
    					Type: pulumi.String("VPC_SUBNET"),
    					Uri:  pulumi.String("subnet:subnet-example2"),
    				},
    				&lambda.EventSourceMappingSourceAccessConfigurationArgs{
    					Type: pulumi.String("VPC_SECURITY_GROUP"),
    					Uri:  pulumi.String("security_group:sg-example"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import com.pulumi.aws.lambda.inputs.EventSourceMappingSelfManagedEventSourceArgs;
    import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .functionName(aws_lambda_function.example().arn())
                .topics("Example")
                .startingPosition("TRIM_HORIZON")
                .selfManagedEventSource(EventSourceMappingSelfManagedEventSourceArgs.builder()
                    .endpoints(Map.of("KAFKA_BOOTSTRAP_SERVERS", "kafka1.example.com:9092,kafka2.example.com:9092"))
                    .build())
                .sourceAccessConfigurations(            
                    EventSourceMappingSourceAccessConfigurationArgs.builder()
                        .type("VPC_SUBNET")
                        .uri("subnet:subnet-example1")
                        .build(),
                    EventSourceMappingSourceAccessConfigurationArgs.builder()
                        .type("VPC_SUBNET")
                        .uri("subnet:subnet-example2")
                        .build(),
                    EventSourceMappingSourceAccessConfigurationArgs.builder()
                        .type("VPC_SECURITY_GROUP")
                        .uri("security_group:sg-example")
                        .build())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.lambda_.EventSourceMapping("example",
        function_name=aws_lambda_function["example"]["arn"],
        topics=["Example"],
        starting_position="TRIM_HORIZON",
        self_managed_event_source=aws.lambda_.EventSourceMappingSelfManagedEventSourceArgs(
            endpoints={
                "KAFKA_BOOTSTRAP_SERVERS": "kafka1.example.com:9092,kafka2.example.com:9092",
            },
        ),
        source_access_configurations=[
            aws.lambda_.EventSourceMappingSourceAccessConfigurationArgs(
                type="VPC_SUBNET",
                uri="subnet:subnet-example1",
            ),
            aws.lambda_.EventSourceMappingSourceAccessConfigurationArgs(
                type="VPC_SUBNET",
                uri="subnet:subnet-example2",
            ),
            aws.lambda_.EventSourceMappingSourceAccessConfigurationArgs(
                type="VPC_SECURITY_GROUP",
                uri="security_group:sg-example",
            ),
        ])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.lambda.EventSourceMapping("example", {
        functionName: aws_lambda_function.example.arn,
        topics: ["Example"],
        startingPosition: "TRIM_HORIZON",
        selfManagedEventSource: {
            endpoints: {
                KAFKA_BOOTSTRAP_SERVERS: "kafka1.example.com:9092,kafka2.example.com:9092",
            },
        },
        sourceAccessConfigurations: [
            {
                type: "VPC_SUBNET",
                uri: "subnet:subnet-example1",
            },
            {
                type: "VPC_SUBNET",
                uri: "subnet:subnet-example2",
            },
            {
                type: "VPC_SECURITY_GROUP",
                uri: "security_group:sg-example",
            },
        ],
    });
    
    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          functionName: ${aws_lambda_function.example.arn}
          topics:
            - Example
          startingPosition: TRIM_HORIZON
          selfManagedEventSource:
            endpoints:
              KAFKA_BOOTSTRAP_SERVERS: kafka1.example.com:9092,kafka2.example.com:9092
          sourceAccessConfigurations:
            - type: VPC_SUBNET
              uri: subnet:subnet-example1
            - type: VPC_SUBNET
              uri: subnet:subnet-example2
            - type: VPC_SECURITY_GROUP
              uri: security_group:sg-example
    

    SQS

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Lambda.EventSourceMapping("example", new()
        {
            EventSourceArn = aws_sqs_queue.Sqs_queue_test.Arn,
            FunctionName = aws_lambda_function.Example.Arn,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
    			EventSourceArn: pulumi.Any(aws_sqs_queue.Sqs_queue_test.Arn),
    			FunctionName:   pulumi.Any(aws_lambda_function.Example.Arn),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .eventSourceArn(aws_sqs_queue.sqs_queue_test().arn())
                .functionName(aws_lambda_function.example().arn())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.lambda_.EventSourceMapping("example",
        event_source_arn=aws_sqs_queue["sqs_queue_test"]["arn"],
        function_name=aws_lambda_function["example"]["arn"])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.lambda.EventSourceMapping("example", {
        eventSourceArn: aws_sqs_queue.sqs_queue_test.arn,
        functionName: aws_lambda_function.example.arn,
    });
    
    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          eventSourceArn: ${aws_sqs_queue.sqs_queue_test.arn}
          functionName: ${aws_lambda_function.example.arn}
    

    SQS with event filter

    using System.Collections.Generic;
    using System.Linq;
    using System.Text.Json;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Lambda.EventSourceMapping("example", new()
        {
            EventSourceArn = aws_sqs_queue.Sqs_queue_test.Arn,
            FunctionName = aws_lambda_function.Example.Arn,
            FilterCriteria = new Aws.Lambda.Inputs.EventSourceMappingFilterCriteriaArgs
            {
                Filters = new[]
                {
                    new Aws.Lambda.Inputs.EventSourceMappingFilterCriteriaFilterArgs
                    {
                        Pattern = JsonSerializer.Serialize(new Dictionary<string, object?>
                        {
                            ["body"] = new Dictionary<string, object?>
                            {
                                ["Temperature"] = new[]
                                {
                                    new Dictionary<string, object?>
                                    {
                                        ["numeric"] = new[]
                                        {
                                            ">",
                                            0,
                                            "<=",
                                            100,
                                        },
                                    },
                                },
                                ["Location"] = new[]
                                {
                                    "New York",
                                },
                            },
                        }),
                    },
                },
            },
        });
    
    });
    
    package main
    
    import (
    	"encoding/json"
    
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		tmpJSON0, err := json.Marshal(map[string]interface{}{
    			"body": map[string]interface{}{
    				"Temperature": []map[string]interface{}{
    					map[string]interface{}{
    						"numeric": []interface{}{
    							">",
    							0,
    							"<=",
    							100,
    						},
    					},
    				},
    				"Location": []string{
    					"New York",
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		json0 := string(tmpJSON0)
    		_, err = lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
    			EventSourceArn: pulumi.Any(aws_sqs_queue.Sqs_queue_test.Arn),
    			FunctionName:   pulumi.Any(aws_lambda_function.Example.Arn),
    			FilterCriteria: &lambda.EventSourceMappingFilterCriteriaArgs{
    				Filters: lambda.EventSourceMappingFilterCriteriaFilterArray{
    					&lambda.EventSourceMappingFilterCriteriaFilterArgs{
    						Pattern: pulumi.String(json0),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import com.pulumi.aws.lambda.inputs.EventSourceMappingFilterCriteriaArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .eventSourceArn(aws_sqs_queue.sqs_queue_test().arn())
                .functionName(aws_lambda_function.example().arn())
                .filterCriteria(EventSourceMappingFilterCriteriaArgs.builder()
                    .filters(EventSourceMappingFilterCriteriaFilterArgs.builder()
                        .pattern(serializeJson(
                            jsonObject(
                                jsonProperty("body", jsonObject(
                                    jsonProperty("Temperature", jsonArray(jsonObject(
                                        jsonProperty("numeric", jsonArray(
                                            ">", 
                                            0, 
                                            "<=", 
                                            100
                                        ))
                                    ))),
                                    jsonProperty("Location", jsonArray("New York"))
                                ))
                            )))
                        .build())
                    .build())
                .build());
    
        }
    }
    
    import pulumi
    import json
    import pulumi_aws as aws
    
    example = aws.lambda_.EventSourceMapping("example",
        event_source_arn=aws_sqs_queue["sqs_queue_test"]["arn"],
        function_name=aws_lambda_function["example"]["arn"],
        filter_criteria=aws.lambda_.EventSourceMappingFilterCriteriaArgs(
            filters=[aws.lambda_.EventSourceMappingFilterCriteriaFilterArgs(
                pattern=json.dumps({
                    "body": {
                        "Temperature": [{
                            "numeric": [
                                ">",
                                0,
                                "<=",
                                100,
                            ],
                        }],
                        "Location": ["New York"],
                    },
                }),
            )],
        ))
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.lambda.EventSourceMapping("example", {
        eventSourceArn: aws_sqs_queue.sqs_queue_test.arn,
        functionName: aws_lambda_function.example.arn,
        filterCriteria: {
            filters: [{
                pattern: JSON.stringify({
                    body: {
                        Temperature: [{
                            numeric: [
                                ">",
                                0,
                                "<=",
                                100,
                            ],
                        }],
                        Location: ["New York"],
                    },
                }),
            }],
        },
    });
    
    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          eventSourceArn: ${aws_sqs_queue.sqs_queue_test.arn}
          functionName: ${aws_lambda_function.example.arn}
          filterCriteria:
            filters:
              - pattern:
                  fn::toJSON:
                    body:
                      Temperature:
                        - numeric:
                            - '>'
                            - 0
                            - <=
                            - 100
                      Location:
                        - New York
    

    Amazon MQ (ActiveMQ)

    Coming soon!

    Coming soon!

    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .batchSize(10)
                .eventSourceArn(aws_mq_broker.example().arn())
                .enabled(true)
                .functionName(aws_lambda_function.example().arn())
                .queues("example")
                .sourceAccessConfigurations(EventSourceMappingSourceAccessConfigurationArgs.builder()
                    .type("BASIC_AUTH")
                    .uri(aws_secretsmanager_secret_version.example().arn())
                    .build())
                .build());
    
        }
    }
    

    Coming soon!

    Coming soon!

    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          batchSize: 10
          eventSourceArn: ${aws_mq_broker.example.arn}
          enabled: true
          functionName: ${aws_lambda_function.example.arn}
          queues:
            - example
          sourceAccessConfigurations:
            - type: BASIC_AUTH
              uri: ${aws_secretsmanager_secret_version.example.arn}
    

    Amazon MQ (RabbitMQ)

    Coming soon!

    Coming soon!

    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.lambda.EventSourceMapping;
    import com.pulumi.aws.lambda.EventSourceMappingArgs;
    import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()        
                .batchSize(1)
                .eventSourceArn(aws_mq_broker.example().arn())
                .enabled(true)
                .functionName(aws_lambda_function.example().arn())
                .queues("example")
                .sourceAccessConfigurations(            
                    EventSourceMappingSourceAccessConfigurationArgs.builder()
                        .type("VIRTUAL_HOST")
                        .uri("/example")
                        .build(),
                    EventSourceMappingSourceAccessConfigurationArgs.builder()
                        .type("BASIC_AUTH")
                        .uri(aws_secretsmanager_secret_version.example().arn())
                        .build())
                .build());
    
        }
    }
    

    Coming soon!

    Coming soon!

    resources:
      example:
        type: aws:lambda:EventSourceMapping
        properties:
          batchSize: 1
          eventSourceArn: ${aws_mq_broker.example.arn}
          enabled: true
          functionName: ${aws_lambda_function.example.arn}
          queues:
            - example
          sourceAccessConfigurations:
            - type: VIRTUAL_HOST
              uri: /example
            - type: BASIC_AUTH
              uri: ${aws_secretsmanager_secret_version.example.arn}
    

    Create EventSourceMapping Resource

    new EventSourceMapping(name: string, args: EventSourceMappingArgs, opts?: CustomResourceOptions);
    @overload
    def EventSourceMapping(resource_name: str,
                           opts: Optional[ResourceOptions] = None,
                           amazon_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs] = None,
                           batch_size: Optional[int] = None,
                           bisect_batch_on_function_error: Optional[bool] = None,
                           destination_config: Optional[_lambda_.EventSourceMappingDestinationConfigArgs] = None,
                           document_db_event_source_config: Optional[_lambda_.EventSourceMappingDocumentDbEventSourceConfigArgs] = None,
                           enabled: Optional[bool] = None,
                           event_source_arn: Optional[str] = None,
                           filter_criteria: Optional[_lambda_.EventSourceMappingFilterCriteriaArgs] = None,
                           function_name: Optional[str] = None,
                           function_response_types: Optional[Sequence[str]] = None,
                           maximum_batching_window_in_seconds: Optional[int] = None,
                           maximum_record_age_in_seconds: Optional[int] = None,
                           maximum_retry_attempts: Optional[int] = None,
                           parallelization_factor: Optional[int] = None,
                           queues: Optional[str] = None,
                           scaling_config: Optional[_lambda_.EventSourceMappingScalingConfigArgs] = None,
                           self_managed_event_source: Optional[_lambda_.EventSourceMappingSelfManagedEventSourceArgs] = None,
                           self_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingSelfManagedKafkaEventSourceConfigArgs] = None,
                           source_access_configurations: Optional[Sequence[_lambda_.EventSourceMappingSourceAccessConfigurationArgs]] = None,
                           starting_position: Optional[str] = None,
                           starting_position_timestamp: Optional[str] = None,
                           topics: Optional[Sequence[str]] = None,
                           tumbling_window_in_seconds: Optional[int] = None)
    @overload
    def EventSourceMapping(resource_name: str,
                           args: EventSourceMappingArgs,
                           opts: Optional[ResourceOptions] = None)
    func NewEventSourceMapping(ctx *Context, name string, args EventSourceMappingArgs, opts ...ResourceOption) (*EventSourceMapping, error)
    public EventSourceMapping(string name, EventSourceMappingArgs args, CustomResourceOptions? opts = null)
    public EventSourceMapping(String name, EventSourceMappingArgs args)
    public EventSourceMapping(String name, EventSourceMappingArgs args, CustomResourceOptions options)
    
    type: aws:lambda:EventSourceMapping
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args EventSourceMappingArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args EventSourceMappingArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args EventSourceMappingArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args EventSourceMappingArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args EventSourceMappingArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    EventSourceMapping Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The EventSourceMapping resource accepts the following input properties:

    FunctionName string

    The name or the ARN of the Lambda function that will be subscribing to events.

    AmazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfig

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    BatchSize int

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    BisectBatchOnFunctionError bool
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    DestinationConfig EventSourceMappingDestinationConfig
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    DocumentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfig
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    Enabled bool

    Determines if the mapping will be enabled on creation. Defaults to true.

    EventSourceArn string

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    FilterCriteria EventSourceMappingFilterCriteria

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    FunctionResponseTypes List<string>

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    MaximumBatchingWindowInSeconds int

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    MaximumRecordAgeInSeconds int
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    MaximumRetryAttempts int
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    ParallelizationFactor int
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    Queues string

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    ScalingConfig EventSourceMappingScalingConfig

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    SelfManagedEventSource EventSourceMappingSelfManagedEventSource
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    SelfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfig

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    SourceAccessConfigurations List<EventSourceMappingSourceAccessConfiguration>

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    StartingPosition string

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    StartingPositionTimestamp string

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    Topics List<string>

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    TumblingWindowInSeconds int

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    FunctionName string

    The name or the ARN of the Lambda function that will be subscribing to events.

    AmazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    BatchSize int

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    BisectBatchOnFunctionError bool
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    DestinationConfig EventSourceMappingDestinationConfigArgs
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    DocumentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfigArgs
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    Enabled bool

    Determines if the mapping will be enabled on creation. Defaults to true.

    EventSourceArn string

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    FilterCriteria EventSourceMappingFilterCriteriaArgs

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    FunctionResponseTypes []string

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    MaximumBatchingWindowInSeconds int

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    MaximumRecordAgeInSeconds int
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    MaximumRetryAttempts int
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    ParallelizationFactor int
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    Queues string

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    ScalingConfig EventSourceMappingScalingConfigArgs

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    SelfManagedEventSource EventSourceMappingSelfManagedEventSourceArgs
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    SelfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfigArgs

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    SourceAccessConfigurations []EventSourceMappingSourceAccessConfigurationArgs

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    StartingPosition string

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    StartingPositionTimestamp string

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    Topics []string

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    TumblingWindowInSeconds int

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    functionName String

    The name or the ARN of the Lambda function that will be subscribing to events.

    amazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfig

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batchSize Integer

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisectBatchOnFunctionError Boolean
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destinationConfig EventSourceMappingDestinationConfig
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    documentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfig
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled Boolean

    Determines if the mapping will be enabled on creation. Defaults to true.

    eventSourceArn String

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filterCriteria EventSourceMappingFilterCriteria

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    functionResponseTypes List<String>

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    maximumBatchingWindowInSeconds Integer

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximumRecordAgeInSeconds Integer
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximumRetryAttempts Integer
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelizationFactor Integer
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues String

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scalingConfig EventSourceMappingScalingConfig

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    selfManagedEventSource EventSourceMappingSelfManagedEventSource
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    selfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfig

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    sourceAccessConfigurations List<EventSourceMappingSourceAccessConfiguration>

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    startingPosition String

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    startingPositionTimestamp String

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    topics List<String>

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumblingWindowInSeconds Integer

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    functionName string

    The name or the ARN of the Lambda function that will be subscribing to events.

    amazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfig

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batchSize number

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisectBatchOnFunctionError boolean
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destinationConfig EventSourceMappingDestinationConfig
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    documentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfig
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled boolean

    Determines if the mapping will be enabled on creation. Defaults to true.

    eventSourceArn string

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filterCriteria EventSourceMappingFilterCriteria

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    functionResponseTypes string[]

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    maximumBatchingWindowInSeconds number

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximumRecordAgeInSeconds number
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximumRetryAttempts number
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelizationFactor number
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues string

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scalingConfig EventSourceMappingScalingConfig

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    selfManagedEventSource EventSourceMappingSelfManagedEventSource
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    selfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfig

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    sourceAccessConfigurations EventSourceMappingSourceAccessConfiguration[]

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    startingPosition string

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    startingPositionTimestamp string

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    topics string[]

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumblingWindowInSeconds number

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    function_name str

    The name or the ARN of the Lambda function that will be subscribing to events.

    amazon_managed_kafka_event_source_config EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batch_size int

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisect_batch_on_function_error bool
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destination_config EventSourceMappingDestinationConfigArgs
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    document_db_event_source_config EventSourceMappingDocumentDbEventSourceConfigArgs
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled bool

    Determines if the mapping will be enabled on creation. Defaults to true.

    event_source_arn str

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filter_criteria EventSourceMappingFilterCriteriaArgs

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    function_response_types Sequence[str]

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    maximum_batching_window_in_seconds int

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximum_record_age_in_seconds int
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximum_retry_attempts int
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelization_factor int
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues str

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scaling_config EventSourceMappingScalingConfigArgs

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    self_managed_event_source EventSourceMappingSelfManagedEventSourceArgs
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    self_managed_kafka_event_source_config EventSourceMappingSelfManagedKafkaEventSourceConfigArgs

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    source_access_configurations EventSourceMappingSourceAccessConfigurationArgs]

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    starting_position str

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    starting_position_timestamp str

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    topics Sequence[str]

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumbling_window_in_seconds int

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    functionName String

    The name or the ARN of the Lambda function that will be subscribing to events.

    amazonManagedKafkaEventSourceConfig Property Map

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batchSize Number

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisectBatchOnFunctionError Boolean
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destinationConfig Property Map
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    documentDbEventSourceConfig Property Map
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled Boolean

    Determines if the mapping will be enabled on creation. Defaults to true.

    eventSourceArn String

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filterCriteria Property Map

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    functionResponseTypes List<String>

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    maximumBatchingWindowInSeconds Number

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximumRecordAgeInSeconds Number
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximumRetryAttempts Number
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelizationFactor Number
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues String

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scalingConfig Property Map

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    selfManagedEventSource Property Map
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    selfManagedKafkaEventSourceConfig Property Map

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    sourceAccessConfigurations List<Property Map>

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    startingPosition String

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    startingPositionTimestamp String

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    topics List<String>

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumblingWindowInSeconds Number

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    Outputs

    All input properties are implicitly available as output properties. Additionally, the EventSourceMapping resource produces the following output properties:

    FunctionArn string

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    Id string

    The provider-assigned unique ID for this managed resource.

    LastModified string

    The date this resource was last modified.

    LastProcessingResult string

    The result of the last AWS Lambda invocation of your Lambda function.

    State string

    The state of the event source mapping.

    StateTransitionReason string

    The reason the event source mapping is in its current state.

    Uuid string

    The UUID of the created event source mapping.

    FunctionArn string

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    Id string

    The provider-assigned unique ID for this managed resource.

    LastModified string

    The date this resource was last modified.

    LastProcessingResult string

    The result of the last AWS Lambda invocation of your Lambda function.

    State string

    The state of the event source mapping.

    StateTransitionReason string

    The reason the event source mapping is in its current state.

    Uuid string

    The UUID of the created event source mapping.

    functionArn String

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    id String

    The provider-assigned unique ID for this managed resource.

    lastModified String

    The date this resource was last modified.

    lastProcessingResult String

    The result of the last AWS Lambda invocation of your Lambda function.

    state String

    The state of the event source mapping.

    stateTransitionReason String

    The reason the event source mapping is in its current state.

    uuid String

    The UUID of the created event source mapping.

    functionArn string

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    id string

    The provider-assigned unique ID for this managed resource.

    lastModified string

    The date this resource was last modified.

    lastProcessingResult string

    The result of the last AWS Lambda invocation of your Lambda function.

    state string

    The state of the event source mapping.

    stateTransitionReason string

    The reason the event source mapping is in its current state.

    uuid string

    The UUID of the created event source mapping.

    function_arn str

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    id str

    The provider-assigned unique ID for this managed resource.

    last_modified str

    The date this resource was last modified.

    last_processing_result str

    The result of the last AWS Lambda invocation of your Lambda function.

    state str

    The state of the event source mapping.

    state_transition_reason str

    The reason the event source mapping is in its current state.

    uuid str

    The UUID of the created event source mapping.

    functionArn String

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    id String

    The provider-assigned unique ID for this managed resource.

    lastModified String

    The date this resource was last modified.

    lastProcessingResult String

    The result of the last AWS Lambda invocation of your Lambda function.

    state String

    The state of the event source mapping.

    stateTransitionReason String

    The reason the event source mapping is in its current state.

    uuid String

    The UUID of the created event source mapping.

    Look up Existing EventSourceMapping Resource

    Get an existing EventSourceMapping resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: EventSourceMappingState, opts?: CustomResourceOptions): EventSourceMapping
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            amazon_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs] = None,
            batch_size: Optional[int] = None,
            bisect_batch_on_function_error: Optional[bool] = None,
            destination_config: Optional[_lambda_.EventSourceMappingDestinationConfigArgs] = None,
            document_db_event_source_config: Optional[_lambda_.EventSourceMappingDocumentDbEventSourceConfigArgs] = None,
            enabled: Optional[bool] = None,
            event_source_arn: Optional[str] = None,
            filter_criteria: Optional[_lambda_.EventSourceMappingFilterCriteriaArgs] = None,
            function_arn: Optional[str] = None,
            function_name: Optional[str] = None,
            function_response_types: Optional[Sequence[str]] = None,
            last_modified: Optional[str] = None,
            last_processing_result: Optional[str] = None,
            maximum_batching_window_in_seconds: Optional[int] = None,
            maximum_record_age_in_seconds: Optional[int] = None,
            maximum_retry_attempts: Optional[int] = None,
            parallelization_factor: Optional[int] = None,
            queues: Optional[str] = None,
            scaling_config: Optional[_lambda_.EventSourceMappingScalingConfigArgs] = None,
            self_managed_event_source: Optional[_lambda_.EventSourceMappingSelfManagedEventSourceArgs] = None,
            self_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingSelfManagedKafkaEventSourceConfigArgs] = None,
            source_access_configurations: Optional[Sequence[_lambda_.EventSourceMappingSourceAccessConfigurationArgs]] = None,
            starting_position: Optional[str] = None,
            starting_position_timestamp: Optional[str] = None,
            state: Optional[str] = None,
            state_transition_reason: Optional[str] = None,
            topics: Optional[Sequence[str]] = None,
            tumbling_window_in_seconds: Optional[int] = None,
            uuid: Optional[str] = None) -> EventSourceMapping
    func GetEventSourceMapping(ctx *Context, name string, id IDInput, state *EventSourceMappingState, opts ...ResourceOption) (*EventSourceMapping, error)
    public static EventSourceMapping Get(string name, Input<string> id, EventSourceMappingState? state, CustomResourceOptions? opts = null)
    public static EventSourceMapping get(String name, Output<String> id, EventSourceMappingState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AmazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfig

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    BatchSize int

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    BisectBatchOnFunctionError bool
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    DestinationConfig EventSourceMappingDestinationConfig
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    DocumentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfig
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    Enabled bool

    Determines if the mapping will be enabled on creation. Defaults to true.

    EventSourceArn string

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    FilterCriteria EventSourceMappingFilterCriteria

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    FunctionArn string

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    FunctionName string

    The name or the ARN of the Lambda function that will be subscribing to events.

    FunctionResponseTypes List<string>

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    LastModified string

    The date this resource was last modified.

    LastProcessingResult string

    The result of the last AWS Lambda invocation of your Lambda function.

    MaximumBatchingWindowInSeconds int

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    MaximumRecordAgeInSeconds int
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    MaximumRetryAttempts int
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    ParallelizationFactor int
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    Queues string

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    ScalingConfig EventSourceMappingScalingConfig

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    SelfManagedEventSource EventSourceMappingSelfManagedEventSource
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    SelfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfig

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    SourceAccessConfigurations List<EventSourceMappingSourceAccessConfiguration>

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    StartingPosition string

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    StartingPositionTimestamp string

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    State string

    The state of the event source mapping.

    StateTransitionReason string

    The reason the event source mapping is in its current state.

    Topics List<string>

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    TumblingWindowInSeconds int

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    Uuid string

    The UUID of the created event source mapping.

    AmazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    BatchSize int

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    BisectBatchOnFunctionError bool
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    DestinationConfig EventSourceMappingDestinationConfigArgs
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    DocumentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfigArgs
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    Enabled bool

    Determines if the mapping will be enabled on creation. Defaults to true.

    EventSourceArn string

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    FilterCriteria EventSourceMappingFilterCriteriaArgs

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    FunctionArn string

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    FunctionName string

    The name or the ARN of the Lambda function that will be subscribing to events.

    FunctionResponseTypes []string

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    LastModified string

    The date this resource was last modified.

    LastProcessingResult string

    The result of the last AWS Lambda invocation of your Lambda function.

    MaximumBatchingWindowInSeconds int

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    MaximumRecordAgeInSeconds int
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    MaximumRetryAttempts int
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    ParallelizationFactor int
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    Queues string

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    ScalingConfig EventSourceMappingScalingConfigArgs

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    SelfManagedEventSource EventSourceMappingSelfManagedEventSourceArgs
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    SelfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfigArgs

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    SourceAccessConfigurations []EventSourceMappingSourceAccessConfigurationArgs

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    StartingPosition string

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    StartingPositionTimestamp string

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    State string

    The state of the event source mapping.

    StateTransitionReason string

    The reason the event source mapping is in its current state.

    Topics []string

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    TumblingWindowInSeconds int

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    Uuid string

    The UUID of the created event source mapping.

    amazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfig

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batchSize Integer

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisectBatchOnFunctionError Boolean
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destinationConfig EventSourceMappingDestinationConfig
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    documentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfig
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled Boolean

    Determines if the mapping will be enabled on creation. Defaults to true.

    eventSourceArn String

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filterCriteria EventSourceMappingFilterCriteria

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    functionArn String

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    functionName String

    The name or the ARN of the Lambda function that will be subscribing to events.

    functionResponseTypes List<String>

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    lastModified String

    The date this resource was last modified.

    lastProcessingResult String

    The result of the last AWS Lambda invocation of your Lambda function.

    maximumBatchingWindowInSeconds Integer

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximumRecordAgeInSeconds Integer
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximumRetryAttempts Integer
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelizationFactor Integer
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues String

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scalingConfig EventSourceMappingScalingConfig

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    selfManagedEventSource EventSourceMappingSelfManagedEventSource
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    selfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfig

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    sourceAccessConfigurations List<EventSourceMappingSourceAccessConfiguration>

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    startingPosition String

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    startingPositionTimestamp String

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    state String

    The state of the event source mapping.

    stateTransitionReason String

    The reason the event source mapping is in its current state.

    topics List<String>

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumblingWindowInSeconds Integer

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    uuid String

    The UUID of the created event source mapping.

    amazonManagedKafkaEventSourceConfig EventSourceMappingAmazonManagedKafkaEventSourceConfig

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batchSize number

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisectBatchOnFunctionError boolean
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destinationConfig EventSourceMappingDestinationConfig
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    documentDbEventSourceConfig EventSourceMappingDocumentDbEventSourceConfig
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled boolean

    Determines if the mapping will be enabled on creation. Defaults to true.

    eventSourceArn string

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filterCriteria EventSourceMappingFilterCriteria

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    functionArn string

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    functionName string

    The name or the ARN of the Lambda function that will be subscribing to events.

    functionResponseTypes string[]

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    lastModified string

    The date this resource was last modified.

    lastProcessingResult string

    The result of the last AWS Lambda invocation of your Lambda function.

    maximumBatchingWindowInSeconds number

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximumRecordAgeInSeconds number
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximumRetryAttempts number
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelizationFactor number
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues string

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scalingConfig EventSourceMappingScalingConfig

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    selfManagedEventSource EventSourceMappingSelfManagedEventSource
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    selfManagedKafkaEventSourceConfig EventSourceMappingSelfManagedKafkaEventSourceConfig

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    sourceAccessConfigurations EventSourceMappingSourceAccessConfiguration[]

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    startingPosition string

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    startingPositionTimestamp string

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    state string

    The state of the event source mapping.

    stateTransitionReason string

    The reason the event source mapping is in its current state.

    topics string[]

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumblingWindowInSeconds number

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    uuid string

    The UUID of the created event source mapping.

    amazon_managed_kafka_event_source_config EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batch_size int

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisect_batch_on_function_error bool
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destination_config EventSourceMappingDestinationConfigArgs
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    document_db_event_source_config EventSourceMappingDocumentDbEventSourceConfigArgs
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled bool

    Determines if the mapping will be enabled on creation. Defaults to true.

    event_source_arn str

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filter_criteria EventSourceMappingFilterCriteriaArgs

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    function_arn str

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    function_name str

    The name or the ARN of the Lambda function that will be subscribing to events.

    function_response_types Sequence[str]

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    last_modified str

    The date this resource was last modified.

    last_processing_result str

    The result of the last AWS Lambda invocation of your Lambda function.

    maximum_batching_window_in_seconds int

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximum_record_age_in_seconds int
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximum_retry_attempts int
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelization_factor int
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues str

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scaling_config EventSourceMappingScalingConfigArgs

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    self_managed_event_source EventSourceMappingSelfManagedEventSourceArgs
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    self_managed_kafka_event_source_config EventSourceMappingSelfManagedKafkaEventSourceConfigArgs

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    source_access_configurations EventSourceMappingSourceAccessConfigurationArgs]

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    starting_position str

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    starting_position_timestamp str

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    state str

    The state of the event source mapping.

    state_transition_reason str

    The reason the event source mapping is in its current state.

    topics Sequence[str]

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumbling_window_in_seconds int

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    uuid str

    The UUID of the created event source mapping.

    amazonManagedKafkaEventSourceConfig Property Map

    Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.

    batchSize Number

    The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100 for DynamoDB, Kinesis, MQ and MSK, 10 for SQS.

    bisectBatchOnFunctionError Boolean
    • (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
    destinationConfig Property Map
    • (Optional) An Amazon SQS queue or Amazon SNS topic destination for failed records. Only available for stream sources (DynamoDB and Kinesis). Detailed below.
    documentDbEventSourceConfig Property Map
    • (Optional) Configuration settings for a DocumentDB event source. Detailed below.
    enabled Boolean

    Determines if the mapping will be enabled on creation. Defaults to true.

    eventSourceArn String

    The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.

    filterCriteria Property Map

    The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.

    functionArn String

    The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_name above.)

    functionName String

    The name or the ARN of the Lambda function that will be subscribing to events.

    functionResponseTypes List<String>

    A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.

    lastModified String

    The date this resource was last modified.

    lastProcessingResult String

    The result of the last AWS Lambda invocation of your Lambda function.

    maximumBatchingWindowInSeconds Number

    The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_seconds expires or batch_size has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.

    maximumRecordAgeInSeconds Number
    • (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
    maximumRetryAttempts Number
    • (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
    parallelizationFactor Number
    • (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
    queues String

    The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.

    scalingConfig Property Map

    Scaling configuration of the event source. Only available for SQS queues. Detailed below.

    selfManagedEventSource Property Map
    • (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
    selfManagedKafkaEventSourceConfig Property Map

    Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.

    sourceAccessConfigurations List<Property Map>

    For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.

    startingPosition String

    The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP (Kinesis only), LATEST or TRIM_HORIZON if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.

    startingPositionTimestamp String

    A timestamp in RFC3339 format of the data record which to start reading when using starting_position set to AT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.

    state String

    The state of the event source mapping.

    stateTransitionReason String

    The reason the event source mapping is in its current state.

    topics List<String>

    The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.

    tumblingWindowInSeconds Number

    The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).

    uuid String

    The UUID of the created event source mapping.

    Supporting Types

    EventSourceMappingAmazonManagedKafkaEventSourceConfig, EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs

    ConsumerGroupId string

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.

    ConsumerGroupId string

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.

    consumerGroupId String

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.

    consumerGroupId string

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.

    consumer_group_id str

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.

    consumerGroupId String

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.

    EventSourceMappingDestinationConfig, EventSourceMappingDestinationConfigArgs

    OnFailure EventSourceMappingDestinationConfigOnFailure

    The destination configuration for failed invocations. Detailed below.

    OnFailure EventSourceMappingDestinationConfigOnFailure

    The destination configuration for failed invocations. Detailed below.

    onFailure EventSourceMappingDestinationConfigOnFailure

    The destination configuration for failed invocations. Detailed below.

    onFailure EventSourceMappingDestinationConfigOnFailure

    The destination configuration for failed invocations. Detailed below.

    on_failure EventSourceMappingDestinationConfigOnFailure

    The destination configuration for failed invocations. Detailed below.

    onFailure Property Map

    The destination configuration for failed invocations. Detailed below.

    EventSourceMappingDestinationConfigOnFailure, EventSourceMappingDestinationConfigOnFailureArgs

    DestinationArn string

    The Amazon Resource Name (ARN) of the destination resource.

    DestinationArn string

    The Amazon Resource Name (ARN) of the destination resource.

    destinationArn String

    The Amazon Resource Name (ARN) of the destination resource.

    destinationArn string

    The Amazon Resource Name (ARN) of the destination resource.

    destination_arn str

    The Amazon Resource Name (ARN) of the destination resource.

    destinationArn String

    The Amazon Resource Name (ARN) of the destination resource.

    EventSourceMappingDocumentDbEventSourceConfig, EventSourceMappingDocumentDbEventSourceConfigArgs

    DatabaseName string

    The name of the database to consume within the DocumentDB cluster.

    CollectionName string

    The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.

    FullDocument string

    Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values: UpdateLookup, Default.

    DatabaseName string

    The name of the database to consume within the DocumentDB cluster.

    CollectionName string

    The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.

    FullDocument string

    Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values: UpdateLookup, Default.

    databaseName String

    The name of the database to consume within the DocumentDB cluster.

    collectionName String

    The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.

    fullDocument String

    Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values: UpdateLookup, Default.

    databaseName string

    The name of the database to consume within the DocumentDB cluster.

    collectionName string

    The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.

    fullDocument string

    Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values: UpdateLookup, Default.

    database_name str

    The name of the database to consume within the DocumentDB cluster.

    collection_name str

    The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.

    full_document str

    Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values: UpdateLookup, Default.

    databaseName String

    The name of the database to consume within the DocumentDB cluster.

    collectionName String

    The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.

    fullDocument String

    Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values: UpdateLookup, Default.

    EventSourceMappingFilterCriteria, EventSourceMappingFilterCriteriaArgs

    Filters List<EventSourceMappingFilterCriteriaFilter>

    A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.

    Filters []EventSourceMappingFilterCriteriaFilter

    A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.

    filters List<EventSourceMappingFilterCriteriaFilter>

    A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.

    filters EventSourceMappingFilterCriteriaFilter[]

    A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.

    filters EventSourceMappingFilterCriteriaFilter]

    A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.

    filters List<Property Map>

    A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.

    EventSourceMappingFilterCriteriaFilter, EventSourceMappingFilterCriteriaFilterArgs

    Pattern string

    A filter pattern up to 4096 characters. See Filter Rule Syntax.

    Pattern string

    A filter pattern up to 4096 characters. See Filter Rule Syntax.

    pattern String

    A filter pattern up to 4096 characters. See Filter Rule Syntax.

    pattern string

    A filter pattern up to 4096 characters. See Filter Rule Syntax.

    pattern str

    A filter pattern up to 4096 characters. See Filter Rule Syntax.

    pattern String

    A filter pattern up to 4096 characters. See Filter Rule Syntax.

    EventSourceMappingScalingConfig, EventSourceMappingScalingConfigArgs

    MaximumConcurrency int

    Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be between 2 and 1000. See Configuring maximum concurrency for Amazon SQS event sources.

    MaximumConcurrency int

    Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be between 2 and 1000. See Configuring maximum concurrency for Amazon SQS event sources.

    maximumConcurrency Integer

    Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be between 2 and 1000. See Configuring maximum concurrency for Amazon SQS event sources.

    maximumConcurrency number

    Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be between 2 and 1000. See Configuring maximum concurrency for Amazon SQS event sources.

    maximum_concurrency int

    Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be between 2 and 1000. See Configuring maximum concurrency for Amazon SQS event sources.

    maximumConcurrency Number

    Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be between 2 and 1000. See Configuring maximum concurrency for Amazon SQS event sources.

    EventSourceMappingSelfManagedEventSource, EventSourceMappingSelfManagedEventSourceArgs

    Endpoints Dictionary<string, string>

    A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERS and the value should be a string with a comma separated list of broker endpoints.

    Endpoints map[string]string

    A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERS and the value should be a string with a comma separated list of broker endpoints.

    endpoints Map<String,String>

    A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERS and the value should be a string with a comma separated list of broker endpoints.

    endpoints {[key: string]: string}

    A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERS and the value should be a string with a comma separated list of broker endpoints.

    endpoints Mapping[str, str]

    A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERS and the value should be a string with a comma separated list of broker endpoints.

    endpoints Map<String>

    A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERS and the value should be a string with a comma separated list of broker endpoints.

    EventSourceMappingSelfManagedKafkaEventSourceConfig, EventSourceMappingSelfManagedKafkaEventSourceConfigArgs

    ConsumerGroupId string

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.

    ConsumerGroupId string

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.

    consumerGroupId String

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.

    consumerGroupId string

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.

    consumer_group_id str

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.

    consumerGroupId String

    A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.

    EventSourceMappingSourceAccessConfiguration, EventSourceMappingSourceAccessConfigurationArgs

    Type string

    The type of this configuration. For Self Managed Kafka you will need to supply blocks for type VPC_SUBNET and VPC_SECURITY_GROUP.

    Uri string

    The URI for this configuration. For type VPC_SUBNET the value should be subnet:subnet_id where subnet_id is the value you would find in an aws.ec2.Subnet resource's id attribute. For type VPC_SECURITY_GROUP the value should be security_group:security_group_id where security_group_id is the value you would find in an aws.ec2.SecurityGroup resource's id attribute.

    Type string

    The type of this configuration. For Self Managed Kafka you will need to supply blocks for type VPC_SUBNET and VPC_SECURITY_GROUP.

    Uri string

    The URI for this configuration. For type VPC_SUBNET the value should be subnet:subnet_id where subnet_id is the value you would find in an aws.ec2.Subnet resource's id attribute. For type VPC_SECURITY_GROUP the value should be security_group:security_group_id where security_group_id is the value you would find in an aws.ec2.SecurityGroup resource's id attribute.

    type String

    The type of this configuration. For Self Managed Kafka you will need to supply blocks for type VPC_SUBNET and VPC_SECURITY_GROUP.

    uri String

    The URI for this configuration. For type VPC_SUBNET the value should be subnet:subnet_id where subnet_id is the value you would find in an aws.ec2.Subnet resource's id attribute. For type VPC_SECURITY_GROUP the value should be security_group:security_group_id where security_group_id is the value you would find in an aws.ec2.SecurityGroup resource's id attribute.

    type string

    The type of this configuration. For Self Managed Kafka you will need to supply blocks for type VPC_SUBNET and VPC_SECURITY_GROUP.

    uri string

    The URI for this configuration. For type VPC_SUBNET the value should be subnet:subnet_id where subnet_id is the value you would find in an aws.ec2.Subnet resource's id attribute. For type VPC_SECURITY_GROUP the value should be security_group:security_group_id where security_group_id is the value you would find in an aws.ec2.SecurityGroup resource's id attribute.

    type str

    The type of this configuration. For Self Managed Kafka you will need to supply blocks for type VPC_SUBNET and VPC_SECURITY_GROUP.

    uri str

    The URI for this configuration. For type VPC_SUBNET the value should be subnet:subnet_id where subnet_id is the value you would find in an aws.ec2.Subnet resource's id attribute. For type VPC_SECURITY_GROUP the value should be security_group:security_group_id where security_group_id is the value you would find in an aws.ec2.SecurityGroup resource's id attribute.

    type String

    The type of this configuration. For Self Managed Kafka you will need to supply blocks for type VPC_SUBNET and VPC_SECURITY_GROUP.

    uri String

    The URI for this configuration. For type VPC_SUBNET the value should be subnet:subnet_id where subnet_id is the value you would find in an aws.ec2.Subnet resource's id attribute. For type VPC_SECURITY_GROUP the value should be security_group:security_group_id where security_group_id is the value you would find in an aws.ec2.SecurityGroup resource's id attribute.

    Import

    Using pulumi import, import Lambda event source mappings using the UUID (event source mapping identifier). For example:

     $ pulumi import aws:lambda/eventSourceMapping:EventSourceMapping event_source_mapping 12345kxodurf3443
    

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes

    This Pulumi package is based on the aws Terraform Provider.

    aws logo

    Try AWS Native preview for resources not in the classic version.

    AWS Classic v6.3.0 published on Thursday, Sep 28, 2023 by Pulumi