aws.lambda.EventSourceMapping
Explore with Pulumi AI
Provides a Lambda event source mapping. This allows Lambda functions to get events from Kinesis, DynamoDB, SQS, Amazon MQ and Managed Streaming for Apache Kafka (MSK).
For information about Lambda and how to use it, see What is AWS Lambda?. For information about event source mappings, see CreateEventSourceMapping in the API docs.
Example Usage
DynamoDB
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    eventSourceArn: exampleAwsDynamodbTable.streamArn,
    functionName: exampleAwsLambdaFunction.arn,
    startingPosition: "LATEST",
    tags: {
        Name: "dynamodb",
    },
});
import pulumi
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    event_source_arn=example_aws_dynamodb_table["streamArn"],
    function_name=example_aws_lambda_function["arn"],
    starting_position="LATEST",
    tags={
        "Name": "dynamodb",
    })
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			EventSourceArn:   pulumi.Any(exampleAwsDynamodbTable.StreamArn),
			FunctionName:     pulumi.Any(exampleAwsLambdaFunction.Arn),
			StartingPosition: pulumi.String("LATEST"),
			Tags: pulumi.StringMap{
				"Name": pulumi.String("dynamodb"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        EventSourceArn = exampleAwsDynamodbTable.StreamArn,
        FunctionName = exampleAwsLambdaFunction.Arn,
        StartingPosition = "LATEST",
        Tags = 
        {
            { "Name", "dynamodb" },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .eventSourceArn(exampleAwsDynamodbTable.streamArn())
            .functionName(exampleAwsLambdaFunction.arn())
            .startingPosition("LATEST")
            .tags(Map.of("Name", "dynamodb"))
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      eventSourceArn: ${exampleAwsDynamodbTable.streamArn}
      functionName: ${exampleAwsLambdaFunction.arn}
      startingPosition: LATEST
      tags:
        Name: dynamodb
Kinesis
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    eventSourceArn: exampleAwsKinesisStream.arn,
    functionName: exampleAwsLambdaFunction.arn,
    startingPosition: "LATEST",
});
import pulumi
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    event_source_arn=example_aws_kinesis_stream["arn"],
    function_name=example_aws_lambda_function["arn"],
    starting_position="LATEST")
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			EventSourceArn:   pulumi.Any(exampleAwsKinesisStream.Arn),
			FunctionName:     pulumi.Any(exampleAwsLambdaFunction.Arn),
			StartingPosition: pulumi.String("LATEST"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        EventSourceArn = exampleAwsKinesisStream.Arn,
        FunctionName = exampleAwsLambdaFunction.Arn,
        StartingPosition = "LATEST",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .eventSourceArn(exampleAwsKinesisStream.arn())
            .functionName(exampleAwsLambdaFunction.arn())
            .startingPosition("LATEST")
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      eventSourceArn: ${exampleAwsKinesisStream.arn}
      functionName: ${exampleAwsLambdaFunction.arn}
      startingPosition: LATEST
Managed Streaming for Apache Kafka (MSK)
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    eventSourceArn: exampleAwsMskCluster.arn,
    functionName: exampleAwsLambdaFunction.arn,
    topics: ["Example"],
    startingPosition: "TRIM_HORIZON",
});
import pulumi
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    event_source_arn=example_aws_msk_cluster["arn"],
    function_name=example_aws_lambda_function["arn"],
    topics=["Example"],
    starting_position="TRIM_HORIZON")
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			EventSourceArn: pulumi.Any(exampleAwsMskCluster.Arn),
			FunctionName:   pulumi.Any(exampleAwsLambdaFunction.Arn),
			Topics: pulumi.StringArray{
				pulumi.String("Example"),
			},
			StartingPosition: pulumi.String("TRIM_HORIZON"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        EventSourceArn = exampleAwsMskCluster.Arn,
        FunctionName = exampleAwsLambdaFunction.Arn,
        Topics = new[]
        {
            "Example",
        },
        StartingPosition = "TRIM_HORIZON",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .eventSourceArn(exampleAwsMskCluster.arn())
            .functionName(exampleAwsLambdaFunction.arn())
            .topics("Example")
            .startingPosition("TRIM_HORIZON")
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      eventSourceArn: ${exampleAwsMskCluster.arn}
      functionName: ${exampleAwsLambdaFunction.arn}
      topics:
        - Example
      startingPosition: TRIM_HORIZON
Self Managed Apache Kafka
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingProvisionedPollerConfigArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingSelfManagedEventSourceArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .functionName(exampleAwsLambdaFunction.arn())
            .topics("Example")
            .startingPosition("TRIM_HORIZON")
            .provisionedPollerConfig(EventSourceMappingProvisionedPollerConfigArgs.builder()
                .maximumPoller(80)
                .minimumPoller(10)
                .build())
            .selfManagedEventSource(EventSourceMappingSelfManagedEventSourceArgs.builder()
                .endpoints(Map.of("KAFKA_BOOTSTRAP_SERVERS", "kafka1.example.com:9092,kafka2.example.com:9092"))
                .build())
            .sourceAccessConfigurations(            
                EventSourceMappingSourceAccessConfigurationArgs.builder()
                    .type("VPC_SUBNET")
                    .uri("subnet:subnet-example1")
                    .build(),
                EventSourceMappingSourceAccessConfigurationArgs.builder()
                    .type("VPC_SUBNET")
                    .uri("subnet:subnet-example2")
                    .build(),
                EventSourceMappingSourceAccessConfigurationArgs.builder()
                    .type("VPC_SECURITY_GROUP")
                    .uri("security_group:sg-example")
                    .build())
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      functionName: ${exampleAwsLambdaFunction.arn}
      topics:
        - Example
      startingPosition: TRIM_HORIZON
      provisionedPollerConfig:
        maximumPoller: 80
        minimumPoller: 10
      selfManagedEventSource:
        endpoints:
          KAFKA_BOOTSTRAP_SERVERS: kafka1.example.com:9092,kafka2.example.com:9092
      sourceAccessConfigurations:
        - type: VPC_SUBNET
          uri: subnet:subnet-example1
        - type: VPC_SUBNET
          uri: subnet:subnet-example2
        - type: VPC_SECURITY_GROUP
          uri: security_group:sg-example
SQS
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    eventSourceArn: sqsQueueTest.arn,
    functionName: exampleAwsLambdaFunction.arn,
});
import pulumi
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    event_source_arn=sqs_queue_test["arn"],
    function_name=example_aws_lambda_function["arn"])
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			EventSourceArn: pulumi.Any(sqsQueueTest.Arn),
			FunctionName:   pulumi.Any(exampleAwsLambdaFunction.Arn),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        EventSourceArn = sqsQueueTest.Arn,
        FunctionName = exampleAwsLambdaFunction.Arn,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .eventSourceArn(sqsQueueTest.arn())
            .functionName(exampleAwsLambdaFunction.arn())
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      eventSourceArn: ${sqsQueueTest.arn}
      functionName: ${exampleAwsLambdaFunction.arn}
SQS with event filter
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    eventSourceArn: sqsQueueTest.arn,
    functionName: exampleAwsLambdaFunction.arn,
    filterCriteria: {
        filters: [{
            pattern: JSON.stringify({
                body: {
                    Temperature: [{
                        numeric: [
                            ">",
                            0,
                            "<=",
                            100,
                        ],
                    }],
                    Location: ["New York"],
                },
            }),
        }],
    },
});
import pulumi
import json
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    event_source_arn=sqs_queue_test["arn"],
    function_name=example_aws_lambda_function["arn"],
    filter_criteria={
        "filters": [{
            "pattern": json.dumps({
                "body": {
                    "Temperature": [{
                        "numeric": [
                            ">",
                            0,
                            "<=",
                            100,
                        ],
                    }],
                    "Location": ["New York"],
                },
            }),
        }],
    })
package main
import (
	"encoding/json"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"body": map[string]interface{}{
				"Temperature": []map[string]interface{}{
					map[string]interface{}{
						"numeric": []interface{}{
							">",
							0,
							"<=",
							100,
						},
					},
				},
				"Location": []string{
					"New York",
				},
			},
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		_, err = lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			EventSourceArn: pulumi.Any(sqsQueueTest.Arn),
			FunctionName:   pulumi.Any(exampleAwsLambdaFunction.Arn),
			FilterCriteria: &lambda.EventSourceMappingFilterCriteriaArgs{
				Filters: lambda.EventSourceMappingFilterCriteriaFilterArray{
					&lambda.EventSourceMappingFilterCriteriaFilterArgs{
						Pattern: pulumi.String(json0),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        EventSourceArn = sqsQueueTest.Arn,
        FunctionName = exampleAwsLambdaFunction.Arn,
        FilterCriteria = new Aws.Lambda.Inputs.EventSourceMappingFilterCriteriaArgs
        {
            Filters = new[]
            {
                new Aws.Lambda.Inputs.EventSourceMappingFilterCriteriaFilterArgs
                {
                    Pattern = JsonSerializer.Serialize(new Dictionary<string, object?>
                    {
                        ["body"] = new Dictionary<string, object?>
                        {
                            ["Temperature"] = new[]
                            {
                                new Dictionary<string, object?>
                                {
                                    ["numeric"] = new object?[]
                                    {
                                        ">",
                                        0,
                                        "<=",
                                        100,
                                    },
                                },
                            },
                            ["Location"] = new[]
                            {
                                "New York",
                            },
                        },
                    }),
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingFilterCriteriaArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .eventSourceArn(sqsQueueTest.arn())
            .functionName(exampleAwsLambdaFunction.arn())
            .filterCriteria(EventSourceMappingFilterCriteriaArgs.builder()
                .filters(EventSourceMappingFilterCriteriaFilterArgs.builder()
                    .pattern(serializeJson(
                        jsonObject(
                            jsonProperty("body", jsonObject(
                                jsonProperty("Temperature", jsonArray(jsonObject(
                                    jsonProperty("numeric", jsonArray(
                                        ">", 
                                        0, 
                                        "<=", 
                                        100
                                    ))
                                ))),
                                jsonProperty("Location", jsonArray("New York"))
                            ))
                        )))
                    .build())
                .build())
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      eventSourceArn: ${sqsQueueTest.arn}
      functionName: ${exampleAwsLambdaFunction.arn}
      filterCriteria:
        filters:
          - pattern:
              fn::toJSON:
                body:
                  Temperature:
                    - numeric:
                        - '>'
                        - 0
                        - <=
                        - 100
                  Location:
                    - New York
Amazon MQ (ActiveMQ)
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    batchSize: 10,
    eventSourceArn: exampleAwsMqBroker.arn,
    enabled: true,
    functionName: exampleAwsLambdaFunction.arn,
    queues: "example",
    sourceAccessConfigurations: [{
        type: "BASIC_AUTH",
        uri: exampleAwsSecretsmanagerSecretVersion.arn,
    }],
});
import pulumi
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    batch_size=10,
    event_source_arn=example_aws_mq_broker["arn"],
    enabled=True,
    function_name=example_aws_lambda_function["arn"],
    queues="example",
    source_access_configurations=[{
        "type": "BASIC_AUTH",
        "uri": example_aws_secretsmanager_secret_version["arn"],
    }])
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			BatchSize:      pulumi.Int(10),
			EventSourceArn: pulumi.Any(exampleAwsMqBroker.Arn),
			Enabled:        pulumi.Bool(true),
			FunctionName:   pulumi.Any(exampleAwsLambdaFunction.Arn),
			Queues:         pulumi.String("example"),
			SourceAccessConfigurations: lambda.EventSourceMappingSourceAccessConfigurationArray{
				&lambda.EventSourceMappingSourceAccessConfigurationArgs{
					Type: pulumi.String("BASIC_AUTH"),
					Uri:  pulumi.Any(exampleAwsSecretsmanagerSecretVersion.Arn),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        BatchSize = 10,
        EventSourceArn = exampleAwsMqBroker.Arn,
        Enabled = true,
        FunctionName = exampleAwsLambdaFunction.Arn,
        Queues = "example",
        SourceAccessConfigurations = new[]
        {
            new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
            {
                Type = "BASIC_AUTH",
                Uri = exampleAwsSecretsmanagerSecretVersion.Arn,
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .batchSize(10)
            .eventSourceArn(exampleAwsMqBroker.arn())
            .enabled(true)
            .functionName(exampleAwsLambdaFunction.arn())
            .queues("example")
            .sourceAccessConfigurations(EventSourceMappingSourceAccessConfigurationArgs.builder()
                .type("BASIC_AUTH")
                .uri(exampleAwsSecretsmanagerSecretVersion.arn())
                .build())
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      batchSize: 10
      eventSourceArn: ${exampleAwsMqBroker.arn}
      enabled: true
      functionName: ${exampleAwsLambdaFunction.arn}
      queues: example
      sourceAccessConfigurations:
        - type: BASIC_AUTH
          uri: ${exampleAwsSecretsmanagerSecretVersion.arn}
Amazon MQ (RabbitMQ)
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.lambda.EventSourceMapping("example", {
    batchSize: 1,
    eventSourceArn: exampleAwsMqBroker.arn,
    enabled: true,
    functionName: exampleAwsLambdaFunction.arn,
    queues: "example",
    sourceAccessConfigurations: [
        {
            type: "VIRTUAL_HOST",
            uri: "/example",
        },
        {
            type: "BASIC_AUTH",
            uri: exampleAwsSecretsmanagerSecretVersion.arn,
        },
    ],
});
import pulumi
import pulumi_aws as aws
example = aws.lambda_.EventSourceMapping("example",
    batch_size=1,
    event_source_arn=example_aws_mq_broker["arn"],
    enabled=True,
    function_name=example_aws_lambda_function["arn"],
    queues="example",
    source_access_configurations=[
        {
            "type": "VIRTUAL_HOST",
            "uri": "/example",
        },
        {
            "type": "BASIC_AUTH",
            "uri": example_aws_secretsmanager_secret_version["arn"],
        },
    ])
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/lambda"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := lambda.NewEventSourceMapping(ctx, "example", &lambda.EventSourceMappingArgs{
			BatchSize:      pulumi.Int(1),
			EventSourceArn: pulumi.Any(exampleAwsMqBroker.Arn),
			Enabled:        pulumi.Bool(true),
			FunctionName:   pulumi.Any(exampleAwsLambdaFunction.Arn),
			Queues:         pulumi.String("example"),
			SourceAccessConfigurations: lambda.EventSourceMappingSourceAccessConfigurationArray{
				&lambda.EventSourceMappingSourceAccessConfigurationArgs{
					Type: pulumi.String("VIRTUAL_HOST"),
					Uri:  pulumi.String("/example"),
				},
				&lambda.EventSourceMappingSourceAccessConfigurationArgs{
					Type: pulumi.String("BASIC_AUTH"),
					Uri:  pulumi.Any(exampleAwsSecretsmanagerSecretVersion.Arn),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Lambda.EventSourceMapping("example", new()
    {
        BatchSize = 1,
        EventSourceArn = exampleAwsMqBroker.Arn,
        Enabled = true,
        FunctionName = exampleAwsLambdaFunction.Arn,
        Queues = "example",
        SourceAccessConfigurations = new[]
        {
            new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
            {
                Type = "VIRTUAL_HOST",
                Uri = "/example",
            },
            new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
            {
                Type = "BASIC_AUTH",
                Uri = exampleAwsSecretsmanagerSecretVersion.Arn,
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.lambda.EventSourceMapping;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
            .batchSize(1)
            .eventSourceArn(exampleAwsMqBroker.arn())
            .enabled(true)
            .functionName(exampleAwsLambdaFunction.arn())
            .queues("example")
            .sourceAccessConfigurations(            
                EventSourceMappingSourceAccessConfigurationArgs.builder()
                    .type("VIRTUAL_HOST")
                    .uri("/example")
                    .build(),
                EventSourceMappingSourceAccessConfigurationArgs.builder()
                    .type("BASIC_AUTH")
                    .uri(exampleAwsSecretsmanagerSecretVersion.arn())
                    .build())
            .build());
    }
}
resources:
  example:
    type: aws:lambda:EventSourceMapping
    properties:
      batchSize: 1
      eventSourceArn: ${exampleAwsMqBroker.arn}
      enabled: true
      functionName: ${exampleAwsLambdaFunction.arn}
      queues: example
      sourceAccessConfigurations:
        - type: VIRTUAL_HOST
          uri: /example
        - type: BASIC_AUTH
          uri: ${exampleAwsSecretsmanagerSecretVersion.arn}
Create EventSourceMapping Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new EventSourceMapping(name: string, args: EventSourceMappingArgs, opts?: CustomResourceOptions);@overload
def EventSourceMapping(resource_name: str,
                       args: EventSourceMappingArgs,
                       opts: Optional[ResourceOptions] = None)
@overload
def EventSourceMapping(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       function_name: Optional[str] = None,
                       maximum_record_age_in_seconds: Optional[int] = None,
                       destination_config: Optional[_lambda_.EventSourceMappingDestinationConfigArgs] = None,
                       metrics_config: Optional[_lambda_.EventSourceMappingMetricsConfigArgs] = None,
                       parallelization_factor: Optional[int] = None,
                       enabled: Optional[bool] = None,
                       event_source_arn: Optional[str] = None,
                       filter_criteria: Optional[_lambda_.EventSourceMappingFilterCriteriaArgs] = None,
                       batch_size: Optional[int] = None,
                       function_response_types: Optional[Sequence[str]] = None,
                       kms_key_arn: Optional[str] = None,
                       maximum_batching_window_in_seconds: Optional[int] = None,
                       amazon_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs] = None,
                       tumbling_window_in_seconds: Optional[int] = None,
                       bisect_batch_on_function_error: Optional[bool] = None,
                       document_db_event_source_config: Optional[_lambda_.EventSourceMappingDocumentDbEventSourceConfigArgs] = None,
                       provisioned_poller_config: Optional[_lambda_.EventSourceMappingProvisionedPollerConfigArgs] = None,
                       queues: Optional[str] = None,
                       scaling_config: Optional[_lambda_.EventSourceMappingScalingConfigArgs] = None,
                       self_managed_event_source: Optional[_lambda_.EventSourceMappingSelfManagedEventSourceArgs] = None,
                       self_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingSelfManagedKafkaEventSourceConfigArgs] = None,
                       source_access_configurations: Optional[Sequence[_lambda_.EventSourceMappingSourceAccessConfigurationArgs]] = None,
                       starting_position: Optional[str] = None,
                       starting_position_timestamp: Optional[str] = None,
                       tags: Optional[Mapping[str, str]] = None,
                       topics: Optional[Sequence[str]] = None,
                       maximum_retry_attempts: Optional[int] = None)func NewEventSourceMapping(ctx *Context, name string, args EventSourceMappingArgs, opts ...ResourceOption) (*EventSourceMapping, error)public EventSourceMapping(string name, EventSourceMappingArgs args, CustomResourceOptions? opts = null)
public EventSourceMapping(String name, EventSourceMappingArgs args)
public EventSourceMapping(String name, EventSourceMappingArgs args, CustomResourceOptions options)
type: aws:lambda:EventSourceMapping
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args EventSourceMappingArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args EventSourceMappingArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args EventSourceMappingArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args EventSourceMappingArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args EventSourceMappingArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var eventSourceMappingResource = new Aws.Lambda.EventSourceMapping("eventSourceMappingResource", new()
{
    FunctionName = "string",
    MaximumRecordAgeInSeconds = 0,
    DestinationConfig = new Aws.Lambda.Inputs.EventSourceMappingDestinationConfigArgs
    {
        OnFailure = new Aws.Lambda.Inputs.EventSourceMappingDestinationConfigOnFailureArgs
        {
            DestinationArn = "string",
        },
    },
    MetricsConfig = new Aws.Lambda.Inputs.EventSourceMappingMetricsConfigArgs
    {
        Metrics = new[]
        {
            "string",
        },
    },
    ParallelizationFactor = 0,
    Enabled = false,
    EventSourceArn = "string",
    FilterCriteria = new Aws.Lambda.Inputs.EventSourceMappingFilterCriteriaArgs
    {
        Filters = new[]
        {
            new Aws.Lambda.Inputs.EventSourceMappingFilterCriteriaFilterArgs
            {
                Pattern = "string",
            },
        },
    },
    BatchSize = 0,
    FunctionResponseTypes = new[]
    {
        "string",
    },
    KmsKeyArn = "string",
    MaximumBatchingWindowInSeconds = 0,
    AmazonManagedKafkaEventSourceConfig = new Aws.Lambda.Inputs.EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs
    {
        ConsumerGroupId = "string",
    },
    TumblingWindowInSeconds = 0,
    BisectBatchOnFunctionError = false,
    DocumentDbEventSourceConfig = new Aws.Lambda.Inputs.EventSourceMappingDocumentDbEventSourceConfigArgs
    {
        DatabaseName = "string",
        CollectionName = "string",
        FullDocument = "string",
    },
    ProvisionedPollerConfig = new Aws.Lambda.Inputs.EventSourceMappingProvisionedPollerConfigArgs
    {
        MaximumPollers = 0,
        MinimumPollers = 0,
    },
    Queues = "string",
    ScalingConfig = new Aws.Lambda.Inputs.EventSourceMappingScalingConfigArgs
    {
        MaximumConcurrency = 0,
    },
    SelfManagedEventSource = new Aws.Lambda.Inputs.EventSourceMappingSelfManagedEventSourceArgs
    {
        Endpoints = 
        {
            { "string", "string" },
        },
    },
    SelfManagedKafkaEventSourceConfig = new Aws.Lambda.Inputs.EventSourceMappingSelfManagedKafkaEventSourceConfigArgs
    {
        ConsumerGroupId = "string",
    },
    SourceAccessConfigurations = new[]
    {
        new Aws.Lambda.Inputs.EventSourceMappingSourceAccessConfigurationArgs
        {
            Type = "string",
            Uri = "string",
        },
    },
    StartingPosition = "string",
    StartingPositionTimestamp = "string",
    Tags = 
    {
        { "string", "string" },
    },
    Topics = new[]
    {
        "string",
    },
    MaximumRetryAttempts = 0,
});
example, err := lambda.NewEventSourceMapping(ctx, "eventSourceMappingResource", &lambda.EventSourceMappingArgs{
	FunctionName:              pulumi.String("string"),
	MaximumRecordAgeInSeconds: pulumi.Int(0),
	DestinationConfig: &lambda.EventSourceMappingDestinationConfigArgs{
		OnFailure: &lambda.EventSourceMappingDestinationConfigOnFailureArgs{
			DestinationArn: pulumi.String("string"),
		},
	},
	MetricsConfig: &lambda.EventSourceMappingMetricsConfigArgs{
		Metrics: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	ParallelizationFactor: pulumi.Int(0),
	Enabled:               pulumi.Bool(false),
	EventSourceArn:        pulumi.String("string"),
	FilterCriteria: &lambda.EventSourceMappingFilterCriteriaArgs{
		Filters: lambda.EventSourceMappingFilterCriteriaFilterArray{
			&lambda.EventSourceMappingFilterCriteriaFilterArgs{
				Pattern: pulumi.String("string"),
			},
		},
	},
	BatchSize: pulumi.Int(0),
	FunctionResponseTypes: pulumi.StringArray{
		pulumi.String("string"),
	},
	KmsKeyArn:                      pulumi.String("string"),
	MaximumBatchingWindowInSeconds: pulumi.Int(0),
	AmazonManagedKafkaEventSourceConfig: &lambda.EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs{
		ConsumerGroupId: pulumi.String("string"),
	},
	TumblingWindowInSeconds:    pulumi.Int(0),
	BisectBatchOnFunctionError: pulumi.Bool(false),
	DocumentDbEventSourceConfig: &lambda.EventSourceMappingDocumentDbEventSourceConfigArgs{
		DatabaseName:   pulumi.String("string"),
		CollectionName: pulumi.String("string"),
		FullDocument:   pulumi.String("string"),
	},
	ProvisionedPollerConfig: &lambda.EventSourceMappingProvisionedPollerConfigArgs{
		MaximumPollers: pulumi.Int(0),
		MinimumPollers: pulumi.Int(0),
	},
	Queues: pulumi.String("string"),
	ScalingConfig: &lambda.EventSourceMappingScalingConfigArgs{
		MaximumConcurrency: pulumi.Int(0),
	},
	SelfManagedEventSource: &lambda.EventSourceMappingSelfManagedEventSourceArgs{
		Endpoints: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	SelfManagedKafkaEventSourceConfig: &lambda.EventSourceMappingSelfManagedKafkaEventSourceConfigArgs{
		ConsumerGroupId: pulumi.String("string"),
	},
	SourceAccessConfigurations: lambda.EventSourceMappingSourceAccessConfigurationArray{
		&lambda.EventSourceMappingSourceAccessConfigurationArgs{
			Type: pulumi.String("string"),
			Uri:  pulumi.String("string"),
		},
	},
	StartingPosition:          pulumi.String("string"),
	StartingPositionTimestamp: pulumi.String("string"),
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Topics: pulumi.StringArray{
		pulumi.String("string"),
	},
	MaximumRetryAttempts: pulumi.Int(0),
})
var eventSourceMappingResource = new EventSourceMapping("eventSourceMappingResource", EventSourceMappingArgs.builder()
    .functionName("string")
    .maximumRecordAgeInSeconds(0)
    .destinationConfig(EventSourceMappingDestinationConfigArgs.builder()
        .onFailure(EventSourceMappingDestinationConfigOnFailureArgs.builder()
            .destinationArn("string")
            .build())
        .build())
    .metricsConfig(EventSourceMappingMetricsConfigArgs.builder()
        .metrics("string")
        .build())
    .parallelizationFactor(0)
    .enabled(false)
    .eventSourceArn("string")
    .filterCriteria(EventSourceMappingFilterCriteriaArgs.builder()
        .filters(EventSourceMappingFilterCriteriaFilterArgs.builder()
            .pattern("string")
            .build())
        .build())
    .batchSize(0)
    .functionResponseTypes("string")
    .kmsKeyArn("string")
    .maximumBatchingWindowInSeconds(0)
    .amazonManagedKafkaEventSourceConfig(EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs.builder()
        .consumerGroupId("string")
        .build())
    .tumblingWindowInSeconds(0)
    .bisectBatchOnFunctionError(false)
    .documentDbEventSourceConfig(EventSourceMappingDocumentDbEventSourceConfigArgs.builder()
        .databaseName("string")
        .collectionName("string")
        .fullDocument("string")
        .build())
    .provisionedPollerConfig(EventSourceMappingProvisionedPollerConfigArgs.builder()
        .maximumPollers(0)
        .minimumPollers(0)
        .build())
    .queues("string")
    .scalingConfig(EventSourceMappingScalingConfigArgs.builder()
        .maximumConcurrency(0)
        .build())
    .selfManagedEventSource(EventSourceMappingSelfManagedEventSourceArgs.builder()
        .endpoints(Map.of("string", "string"))
        .build())
    .selfManagedKafkaEventSourceConfig(EventSourceMappingSelfManagedKafkaEventSourceConfigArgs.builder()
        .consumerGroupId("string")
        .build())
    .sourceAccessConfigurations(EventSourceMappingSourceAccessConfigurationArgs.builder()
        .type("string")
        .uri("string")
        .build())
    .startingPosition("string")
    .startingPositionTimestamp("string")
    .tags(Map.of("string", "string"))
    .topics("string")
    .maximumRetryAttempts(0)
    .build());
event_source_mapping_resource = aws.lambda_.EventSourceMapping("eventSourceMappingResource",
    function_name="string",
    maximum_record_age_in_seconds=0,
    destination_config={
        "on_failure": {
            "destination_arn": "string",
        },
    },
    metrics_config={
        "metrics": ["string"],
    },
    parallelization_factor=0,
    enabled=False,
    event_source_arn="string",
    filter_criteria={
        "filters": [{
            "pattern": "string",
        }],
    },
    batch_size=0,
    function_response_types=["string"],
    kms_key_arn="string",
    maximum_batching_window_in_seconds=0,
    amazon_managed_kafka_event_source_config={
        "consumer_group_id": "string",
    },
    tumbling_window_in_seconds=0,
    bisect_batch_on_function_error=False,
    document_db_event_source_config={
        "database_name": "string",
        "collection_name": "string",
        "full_document": "string",
    },
    provisioned_poller_config={
        "maximum_pollers": 0,
        "minimum_pollers": 0,
    },
    queues="string",
    scaling_config={
        "maximum_concurrency": 0,
    },
    self_managed_event_source={
        "endpoints": {
            "string": "string",
        },
    },
    self_managed_kafka_event_source_config={
        "consumer_group_id": "string",
    },
    source_access_configurations=[{
        "type": "string",
        "uri": "string",
    }],
    starting_position="string",
    starting_position_timestamp="string",
    tags={
        "string": "string",
    },
    topics=["string"],
    maximum_retry_attempts=0)
const eventSourceMappingResource = new aws.lambda.EventSourceMapping("eventSourceMappingResource", {
    functionName: "string",
    maximumRecordAgeInSeconds: 0,
    destinationConfig: {
        onFailure: {
            destinationArn: "string",
        },
    },
    metricsConfig: {
        metrics: ["string"],
    },
    parallelizationFactor: 0,
    enabled: false,
    eventSourceArn: "string",
    filterCriteria: {
        filters: [{
            pattern: "string",
        }],
    },
    batchSize: 0,
    functionResponseTypes: ["string"],
    kmsKeyArn: "string",
    maximumBatchingWindowInSeconds: 0,
    amazonManagedKafkaEventSourceConfig: {
        consumerGroupId: "string",
    },
    tumblingWindowInSeconds: 0,
    bisectBatchOnFunctionError: false,
    documentDbEventSourceConfig: {
        databaseName: "string",
        collectionName: "string",
        fullDocument: "string",
    },
    provisionedPollerConfig: {
        maximumPollers: 0,
        minimumPollers: 0,
    },
    queues: "string",
    scalingConfig: {
        maximumConcurrency: 0,
    },
    selfManagedEventSource: {
        endpoints: {
            string: "string",
        },
    },
    selfManagedKafkaEventSourceConfig: {
        consumerGroupId: "string",
    },
    sourceAccessConfigurations: [{
        type: "string",
        uri: "string",
    }],
    startingPosition: "string",
    startingPositionTimestamp: "string",
    tags: {
        string: "string",
    },
    topics: ["string"],
    maximumRetryAttempts: 0,
});
type: aws:lambda:EventSourceMapping
properties:
    amazonManagedKafkaEventSourceConfig:
        consumerGroupId: string
    batchSize: 0
    bisectBatchOnFunctionError: false
    destinationConfig:
        onFailure:
            destinationArn: string
    documentDbEventSourceConfig:
        collectionName: string
        databaseName: string
        fullDocument: string
    enabled: false
    eventSourceArn: string
    filterCriteria:
        filters:
            - pattern: string
    functionName: string
    functionResponseTypes:
        - string
    kmsKeyArn: string
    maximumBatchingWindowInSeconds: 0
    maximumRecordAgeInSeconds: 0
    maximumRetryAttempts: 0
    metricsConfig:
        metrics:
            - string
    parallelizationFactor: 0
    provisionedPollerConfig:
        maximumPollers: 0
        minimumPollers: 0
    queues: string
    scalingConfig:
        maximumConcurrency: 0
    selfManagedEventSource:
        endpoints:
            string: string
    selfManagedKafkaEventSourceConfig:
        consumerGroupId: string
    sourceAccessConfigurations:
        - type: string
          uri: string
    startingPosition: string
    startingPositionTimestamp: string
    tags:
        string: string
    topics:
        - string
    tumblingWindowInSeconds: 0
EventSourceMapping Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The EventSourceMapping resource accepts the following input properties:
- FunctionName string
- The name or the ARN of the Lambda function that will be subscribing to events.
- AmazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- BatchSize int
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- BisectBatch boolOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- DestinationConfig EventSource Mapping Destination Config 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- DocumentDb EventEvent Source Config Source Mapping Document Db Event Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- Enabled bool
- Determines if the mapping will be enabled on creation. Defaults to true.
- EventSource stringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- FilterCriteria EventSource Mapping Filter Criteria 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- FunctionResponse List<string>Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- KmsKey stringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- MaximumBatching intWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- MaximumRecord intAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- MaximumRetry intAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- MetricsConfig EventSource Mapping Metrics Config 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- ParallelizationFactor int
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- ProvisionedPoller EventConfig Source Mapping Provisioned Poller Config 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- Queues string
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- ScalingConfig EventSource Mapping Scaling Config 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- SelfManaged EventEvent Source Source Mapping Self Managed Event Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- SelfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- SourceAccess List<EventConfigurations Source Mapping Source Access Configuration> 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- StartingPosition string
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- StartingPosition stringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- Dictionary<string, string>
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Topics List<string>
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- TumblingWindow intIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- FunctionName string
- The name or the ARN of the Lambda function that will be subscribing to events.
- AmazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config Args 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- BatchSize int
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- BisectBatch boolOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- DestinationConfig EventSource Mapping Destination Config Args 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- DocumentDb EventEvent Source Config Source Mapping Document Db Event Source Config Args 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- Enabled bool
- Determines if the mapping will be enabled on creation. Defaults to true.
- EventSource stringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- FilterCriteria EventSource Mapping Filter Criteria Args 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- FunctionResponse []stringTypes 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- KmsKey stringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- MaximumBatching intWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- MaximumRecord intAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- MaximumRetry intAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- MetricsConfig EventSource Mapping Metrics Config Args 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- ParallelizationFactor int
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- ProvisionedPoller EventConfig Source Mapping Provisioned Poller Config Args 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- Queues string
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- ScalingConfig EventSource Mapping Scaling Config Args 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- SelfManaged EventEvent Source Source Mapping Self Managed Event Source Args 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- SelfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config Args 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- SourceAccess []EventConfigurations Source Mapping Source Access Configuration Args 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- StartingPosition string
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- StartingPosition stringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- map[string]string
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Topics []string
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- TumblingWindow intIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- functionName String
- The name or the ARN of the Lambda function that will be subscribing to events.
- amazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- batchSize Integer
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisectBatch BooleanOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destinationConfig EventSource Mapping Destination Config 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- documentDb EventEvent Source Config Source Mapping Document Db Event Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled Boolean
- Determines if the mapping will be enabled on creation. Defaults to true.
- eventSource StringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filterCriteria EventSource Mapping Filter Criteria 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- functionResponse List<String>Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kmsKey StringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- maximumBatching IntegerWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximumRecord IntegerAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximumRetry IntegerAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metricsConfig EventSource Mapping Metrics Config 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelizationFactor Integer
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisionedPoller EventConfig Source Mapping Provisioned Poller Config 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues String
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scalingConfig EventSource Mapping Scaling Config 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- selfManaged EventEvent Source Source Mapping Self Managed Event Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- selfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- sourceAccess List<EventConfigurations Source Mapping Source Access Configuration> 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- startingPosition String
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- startingPosition StringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- Map<String,String>
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- topics List<String>
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumblingWindow IntegerIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- functionName string
- The name or the ARN of the Lambda function that will be subscribing to events.
- amazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- batchSize number
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisectBatch booleanOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destinationConfig EventSource Mapping Destination Config 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- documentDb EventEvent Source Config Source Mapping Document Db Event Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled boolean
- Determines if the mapping will be enabled on creation. Defaults to true.
- eventSource stringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filterCriteria EventSource Mapping Filter Criteria 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- functionResponse string[]Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kmsKey stringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- maximumBatching numberWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximumRecord numberAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximumRetry numberAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metricsConfig EventSource Mapping Metrics Config 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelizationFactor number
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisionedPoller EventConfig Source Mapping Provisioned Poller Config 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues string
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scalingConfig EventSource Mapping Scaling Config 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- selfManaged EventEvent Source Source Mapping Self Managed Event Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- selfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- sourceAccess EventConfigurations Source Mapping Source Access Configuration[] 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- startingPosition string
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- startingPosition stringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- {[key: string]: string}
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- topics string[]
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumblingWindow numberIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- function_name str
- The name or the ARN of the Lambda function that will be subscribing to events.
- amazon_managed_ lambda_.kafka_ event_ source_ config Event Source Mapping Amazon Managed Kafka Event Source Config Args 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- batch_size int
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisect_batch_ boolon_ function_ error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destination_config lambda_.Event Source Mapping Destination Config Args 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- document_db_ lambda_.event_ source_ config Event Source Mapping Document Db Event Source Config Args 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled bool
- Determines if the mapping will be enabled on creation. Defaults to true.
- event_source_ strarn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filter_criteria lambda_.Event Source Mapping Filter Criteria Args 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- function_response_ Sequence[str]types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kms_key_ strarn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximum_record_ intage_ in_ seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximum_retry_ intattempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metrics_config lambda_.Event Source Mapping Metrics Config Args 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelization_factor int
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisioned_poller_ lambda_.config Event Source Mapping Provisioned Poller Config Args 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues str
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scaling_config lambda_.Event Source Mapping Scaling Config Args 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- self_managed_ lambda_.event_ source Event Source Mapping Self Managed Event Source Args 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- self_managed_ lambda_.kafka_ event_ source_ config Event Source Mapping Self Managed Kafka Event Source Config Args 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- source_access_ Sequence[lambda_.configurations Event Source Mapping Source Access Configuration Args] 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- starting_position str
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- starting_position_ strtimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- Mapping[str, str]
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- topics Sequence[str]
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumbling_window_ intin_ seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- functionName String
- The name or the ARN of the Lambda function that will be subscribing to events.
- amazonManaged Property MapKafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- batchSize Number
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisectBatch BooleanOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destinationConfig Property Map
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- documentDb Property MapEvent Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled Boolean
- Determines if the mapping will be enabled on creation. Defaults to true.
- eventSource StringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filterCriteria Property Map
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- functionResponse List<String>Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kmsKey StringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- maximumBatching NumberWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximumRecord NumberAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximumRetry NumberAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metricsConfig Property Map
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelizationFactor Number
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisionedPoller Property MapConfig 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues String
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scalingConfig Property Map
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- selfManaged Property MapEvent Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- selfManaged Property MapKafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- sourceAccess List<Property Map>Configurations 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- startingPosition String
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- startingPosition StringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- Map<String>
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- topics List<String>
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumblingWindow NumberIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
Outputs
All input properties are implicitly available as output properties. Additionally, the EventSourceMapping resource produces the following output properties:
- Arn string
- The event source mapping ARN.
- FunctionArn string
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- Id string
- The provider-assigned unique ID for this managed resource.
- LastModified string
- The date this resource was last modified.
- LastProcessing stringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- State string
- The state of the event source mapping.
- StateTransition stringReason 
- The reason the event source mapping is in its current state.
- Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Uuid string
- The UUID of the created event source mapping.
- Arn string
- The event source mapping ARN.
- FunctionArn string
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- Id string
- The provider-assigned unique ID for this managed resource.
- LastModified string
- The date this resource was last modified.
- LastProcessing stringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- State string
- The state of the event source mapping.
- StateTransition stringReason 
- The reason the event source mapping is in its current state.
- map[string]string
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Uuid string
- The UUID of the created event source mapping.
- arn String
- The event source mapping ARN.
- functionArn String
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- id String
- The provider-assigned unique ID for this managed resource.
- lastModified String
- The date this resource was last modified.
- lastProcessing StringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- state String
- The state of the event source mapping.
- stateTransition StringReason 
- The reason the event source mapping is in its current state.
- Map<String,String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uuid String
- The UUID of the created event source mapping.
- arn string
- The event source mapping ARN.
- functionArn string
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- id string
- The provider-assigned unique ID for this managed resource.
- lastModified string
- The date this resource was last modified.
- lastProcessing stringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- state string
- The state of the event source mapping.
- stateTransition stringReason 
- The reason the event source mapping is in its current state.
- {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uuid string
- The UUID of the created event source mapping.
- arn str
- The event source mapping ARN.
- function_arn str
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- id str
- The provider-assigned unique ID for this managed resource.
- last_modified str
- The date this resource was last modified.
- last_processing_ strresult 
- The result of the last AWS Lambda invocation of your Lambda function.
- state str
- The state of the event source mapping.
- state_transition_ strreason 
- The reason the event source mapping is in its current state.
- Mapping[str, str]
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uuid str
- The UUID of the created event source mapping.
- arn String
- The event source mapping ARN.
- functionArn String
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- id String
- The provider-assigned unique ID for this managed resource.
- lastModified String
- The date this resource was last modified.
- lastProcessing StringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- state String
- The state of the event source mapping.
- stateTransition StringReason 
- The reason the event source mapping is in its current state.
- Map<String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uuid String
- The UUID of the created event source mapping.
Look up Existing EventSourceMapping Resource
Get an existing EventSourceMapping resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: EventSourceMappingState, opts?: CustomResourceOptions): EventSourceMapping@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        amazon_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs] = None,
        arn: Optional[str] = None,
        batch_size: Optional[int] = None,
        bisect_batch_on_function_error: Optional[bool] = None,
        destination_config: Optional[_lambda_.EventSourceMappingDestinationConfigArgs] = None,
        document_db_event_source_config: Optional[_lambda_.EventSourceMappingDocumentDbEventSourceConfigArgs] = None,
        enabled: Optional[bool] = None,
        event_source_arn: Optional[str] = None,
        filter_criteria: Optional[_lambda_.EventSourceMappingFilterCriteriaArgs] = None,
        function_arn: Optional[str] = None,
        function_name: Optional[str] = None,
        function_response_types: Optional[Sequence[str]] = None,
        kms_key_arn: Optional[str] = None,
        last_modified: Optional[str] = None,
        last_processing_result: Optional[str] = None,
        maximum_batching_window_in_seconds: Optional[int] = None,
        maximum_record_age_in_seconds: Optional[int] = None,
        maximum_retry_attempts: Optional[int] = None,
        metrics_config: Optional[_lambda_.EventSourceMappingMetricsConfigArgs] = None,
        parallelization_factor: Optional[int] = None,
        provisioned_poller_config: Optional[_lambda_.EventSourceMappingProvisionedPollerConfigArgs] = None,
        queues: Optional[str] = None,
        scaling_config: Optional[_lambda_.EventSourceMappingScalingConfigArgs] = None,
        self_managed_event_source: Optional[_lambda_.EventSourceMappingSelfManagedEventSourceArgs] = None,
        self_managed_kafka_event_source_config: Optional[_lambda_.EventSourceMappingSelfManagedKafkaEventSourceConfigArgs] = None,
        source_access_configurations: Optional[Sequence[_lambda_.EventSourceMappingSourceAccessConfigurationArgs]] = None,
        starting_position: Optional[str] = None,
        starting_position_timestamp: Optional[str] = None,
        state: Optional[str] = None,
        state_transition_reason: Optional[str] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None,
        topics: Optional[Sequence[str]] = None,
        tumbling_window_in_seconds: Optional[int] = None,
        uuid: Optional[str] = None) -> EventSourceMappingfunc GetEventSourceMapping(ctx *Context, name string, id IDInput, state *EventSourceMappingState, opts ...ResourceOption) (*EventSourceMapping, error)public static EventSourceMapping Get(string name, Input<string> id, EventSourceMappingState? state, CustomResourceOptions? opts = null)public static EventSourceMapping get(String name, Output<String> id, EventSourceMappingState state, CustomResourceOptions options)resources:  _:    type: aws:lambda:EventSourceMapping    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AmazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- Arn string
- The event source mapping ARN.
- BatchSize int
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- BisectBatch boolOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- DestinationConfig EventSource Mapping Destination Config 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- DocumentDb EventEvent Source Config Source Mapping Document Db Event Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- Enabled bool
- Determines if the mapping will be enabled on creation. Defaults to true.
- EventSource stringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- FilterCriteria EventSource Mapping Filter Criteria 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- FunctionArn string
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- FunctionName string
- The name or the ARN of the Lambda function that will be subscribing to events.
- FunctionResponse List<string>Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- KmsKey stringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- LastModified string
- The date this resource was last modified.
- LastProcessing stringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- MaximumBatching intWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- MaximumRecord intAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- MaximumRetry intAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- MetricsConfig EventSource Mapping Metrics Config 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- ParallelizationFactor int
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- ProvisionedPoller EventConfig Source Mapping Provisioned Poller Config 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- Queues string
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- ScalingConfig EventSource Mapping Scaling Config 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- SelfManaged EventEvent Source Source Mapping Self Managed Event Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- SelfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- SourceAccess List<EventConfigurations Source Mapping Source Access Configuration> 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- StartingPosition string
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- StartingPosition stringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- State string
- The state of the event source mapping.
- StateTransition stringReason 
- The reason the event source mapping is in its current state.
- Dictionary<string, string>
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Topics List<string>
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- TumblingWindow intIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- Uuid string
- The UUID of the created event source mapping.
- AmazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config Args 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- Arn string
- The event source mapping ARN.
- BatchSize int
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- BisectBatch boolOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- DestinationConfig EventSource Mapping Destination Config Args 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- DocumentDb EventEvent Source Config Source Mapping Document Db Event Source Config Args 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- Enabled bool
- Determines if the mapping will be enabled on creation. Defaults to true.
- EventSource stringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- FilterCriteria EventSource Mapping Filter Criteria Args 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- FunctionArn string
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- FunctionName string
- The name or the ARN of the Lambda function that will be subscribing to events.
- FunctionResponse []stringTypes 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- KmsKey stringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- LastModified string
- The date this resource was last modified.
- LastProcessing stringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- MaximumBatching intWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- MaximumRecord intAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- MaximumRetry intAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- MetricsConfig EventSource Mapping Metrics Config Args 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- ParallelizationFactor int
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- ProvisionedPoller EventConfig Source Mapping Provisioned Poller Config Args 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- Queues string
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- ScalingConfig EventSource Mapping Scaling Config Args 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- SelfManaged EventEvent Source Source Mapping Self Managed Event Source Args 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- SelfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config Args 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- SourceAccess []EventConfigurations Source Mapping Source Access Configuration Args 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- StartingPosition string
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- StartingPosition stringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- State string
- The state of the event source mapping.
- StateTransition stringReason 
- The reason the event source mapping is in its current state.
- map[string]string
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- map[string]string
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Topics []string
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- TumblingWindow intIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- Uuid string
- The UUID of the created event source mapping.
- amazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- arn String
- The event source mapping ARN.
- batchSize Integer
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisectBatch BooleanOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destinationConfig EventSource Mapping Destination Config 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- documentDb EventEvent Source Config Source Mapping Document Db Event Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled Boolean
- Determines if the mapping will be enabled on creation. Defaults to true.
- eventSource StringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filterCriteria EventSource Mapping Filter Criteria 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- functionArn String
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- functionName String
- The name or the ARN of the Lambda function that will be subscribing to events.
- functionResponse List<String>Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kmsKey StringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- lastModified String
- The date this resource was last modified.
- lastProcessing StringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- maximumBatching IntegerWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximumRecord IntegerAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximumRetry IntegerAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metricsConfig EventSource Mapping Metrics Config 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelizationFactor Integer
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisionedPoller EventConfig Source Mapping Provisioned Poller Config 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues String
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scalingConfig EventSource Mapping Scaling Config 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- selfManaged EventEvent Source Source Mapping Self Managed Event Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- selfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- sourceAccess List<EventConfigurations Source Mapping Source Access Configuration> 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- startingPosition String
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- startingPosition StringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- state String
- The state of the event source mapping.
- stateTransition StringReason 
- The reason the event source mapping is in its current state.
- Map<String,String>
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String,String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- topics List<String>
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumblingWindow IntegerIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- uuid String
- The UUID of the created event source mapping.
- amazonManaged EventKafka Event Source Config Source Mapping Amazon Managed Kafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- arn string
- The event source mapping ARN.
- batchSize number
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisectBatch booleanOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destinationConfig EventSource Mapping Destination Config 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- documentDb EventEvent Source Config Source Mapping Document Db Event Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled boolean
- Determines if the mapping will be enabled on creation. Defaults to true.
- eventSource stringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filterCriteria EventSource Mapping Filter Criteria 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- functionArn string
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- functionName string
- The name or the ARN of the Lambda function that will be subscribing to events.
- functionResponse string[]Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kmsKey stringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- lastModified string
- The date this resource was last modified.
- lastProcessing stringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- maximumBatching numberWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximumRecord numberAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximumRetry numberAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metricsConfig EventSource Mapping Metrics Config 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelizationFactor number
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisionedPoller EventConfig Source Mapping Provisioned Poller Config 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues string
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scalingConfig EventSource Mapping Scaling Config 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- selfManaged EventEvent Source Source Mapping Self Managed Event Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- selfManaged EventKafka Event Source Config Source Mapping Self Managed Kafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- sourceAccess EventConfigurations Source Mapping Source Access Configuration[] 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- startingPosition string
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- startingPosition stringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- state string
- The state of the event source mapping.
- stateTransition stringReason 
- The reason the event source mapping is in its current state.
- {[key: string]: string}
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- topics string[]
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumblingWindow numberIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- uuid string
- The UUID of the created event source mapping.
- amazon_managed_ lambda_.kafka_ event_ source_ config Event Source Mapping Amazon Managed Kafka Event Source Config Args 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- arn str
- The event source mapping ARN.
- batch_size int
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisect_batch_ boolon_ function_ error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destination_config lambda_.Event Source Mapping Destination Config Args 
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- document_db_ lambda_.event_ source_ config Event Source Mapping Document Db Event Source Config Args 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled bool
- Determines if the mapping will be enabled on creation. Defaults to true.
- event_source_ strarn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filter_criteria lambda_.Event Source Mapping Filter Criteria Args 
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- function_arn str
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- function_name str
- The name or the ARN of the Lambda function that will be subscribing to events.
- function_response_ Sequence[str]types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kms_key_ strarn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- last_modified str
- The date this resource was last modified.
- last_processing_ strresult 
- The result of the last AWS Lambda invocation of your Lambda function.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximum_record_ intage_ in_ seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximum_retry_ intattempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metrics_config lambda_.Event Source Mapping Metrics Config Args 
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelization_factor int
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisioned_poller_ lambda_.config Event Source Mapping Provisioned Poller Config Args 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues str
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scaling_config lambda_.Event Source Mapping Scaling Config Args 
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- self_managed_ lambda_.event_ source Event Source Mapping Self Managed Event Source Args 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- self_managed_ lambda_.kafka_ event_ source_ config Event Source Mapping Self Managed Kafka Event Source Config Args 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- source_access_ Sequence[lambda_.configurations Event Source Mapping Source Access Configuration Args] 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- starting_position str
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- starting_position_ strtimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- state str
- The state of the event source mapping.
- state_transition_ strreason 
- The reason the event source mapping is in its current state.
- Mapping[str, str]
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Mapping[str, str]
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- topics Sequence[str]
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumbling_window_ intin_ seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- uuid str
- The UUID of the created event source mapping.
- amazonManaged Property MapKafka Event Source Config 
- Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below.
- arn String
- The event source mapping ARN.
- batchSize Number
- The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to 100for DynamoDB, Kinesis, MQ and MSK,10for SQS.
- bisectBatch BooleanOn Function Error 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to false.
 
- (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to 
- destinationConfig Property Map
- (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below.
 
- documentDb Property MapEvent Source Config 
- (Optional) Configuration settings for a DocumentDB event source. Detailed below.
 
- enabled Boolean
- Determines if the mapping will be enabled on creation. Defaults to true.
- eventSource StringArn 
- The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source.
- filterCriteria Property Map
- The criteria to use for event filtering Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below.
- functionArn String
- The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from function_nameabove.)
- functionName String
- The name or the ARN of the Lambda function that will be subscribing to events.
- functionResponse List<String>Types 
- A list of current response type enums applied to the event source mapping for AWS Lambda checkpointing. Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: ReportBatchItemFailures.
- kmsKey StringArn 
- The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria.
- lastModified String
- The date this resource was last modified.
- lastProcessing StringResult 
- The result of the last AWS Lambda invocation of your Lambda function.
- maximumBatching NumberWindow In Seconds 
- The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either maximum_batching_window_in_secondsexpires orbatch_sizehas been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues.
- maximumRecord NumberAge In Seconds 
- (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive).
 
- maximumRetry NumberAttempts 
- (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000.
 
- metricsConfig Property Map
- (Optional) CloudWatch metrics configuration of the event source. Only available for stream sources (DynamoDB and Kinesis) and SQS queues. Detailed below.
 
- parallelizationFactor Number
- (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10.
 
- provisionedPoller Property MapConfig 
- (Optional) Event poller configuration for the event source. Only valid for Amazon MSK or self-managed Apache Kafka sources. Detailed below.
 
- queues String
- The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name.
- scalingConfig Property Map
- Scaling configuration of the event source. Only available for SQS queues. Detailed below.
- selfManaged Property MapEvent Source 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include source_access_configuration. Detailed below.
 
- (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include 
- selfManaged Property MapKafka Event Source Config 
- Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below.
- sourceAccess List<Property Map>Configurations 
- For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include self_managed_event_source. Detailed below.
- startingPosition String
- The position in the stream where AWS Lambda should start reading. Must be one of AT_TIMESTAMP(Kinesis only),LATESTorTRIM_HORIZONif getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the AWS DynamoDB Streams API Reference and AWS Kinesis API Reference.
- startingPosition StringTimestamp 
- A timestamp in RFC3339 format of the data record which to start reading when using starting_positionset toAT_TIMESTAMP. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen.
- state String
- The state of the event source mapping.
- stateTransition StringReason 
- The reason the event source mapping is in its current state.
- Map<String>
- Map of tags to assign to the object. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- topics List<String>
- The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified.
- tumblingWindow NumberIn Seconds 
- The duration in seconds of a processing window for AWS Lambda streaming analytics. The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis).
- uuid String
- The UUID of the created event source mapping.
Supporting Types
EventSourceMappingAmazonManagedKafkaEventSourceConfig, EventSourceMappingAmazonManagedKafkaEventSourceConfigArgs                  
- ConsumerGroup stringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.
- ConsumerGroup stringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.
- consumerGroup StringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.
- consumerGroup stringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.
- consumer_group_ strid 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.
- consumerGroup StringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See AmazonManagedKafkaEventSourceConfig Syntax.
EventSourceMappingDestinationConfig, EventSourceMappingDestinationConfigArgs          
- OnFailure EventSource Mapping Destination Config On Failure 
- The destination configuration for failed invocations. Detailed below.
- OnFailure EventSource Mapping Destination Config On Failure 
- The destination configuration for failed invocations. Detailed below.
- onFailure EventSource Mapping Destination Config On Failure 
- The destination configuration for failed invocations. Detailed below.
- onFailure EventSource Mapping Destination Config On Failure 
- The destination configuration for failed invocations. Detailed below.
- on_failure lambda_.Event Source Mapping Destination Config On Failure 
- The destination configuration for failed invocations. Detailed below.
- onFailure Property Map
- The destination configuration for failed invocations. Detailed below.
EventSourceMappingDestinationConfigOnFailure, EventSourceMappingDestinationConfigOnFailureArgs              
- DestinationArn string
- The Amazon Resource Name (ARN) of the destination resource.
- DestinationArn string
- The Amazon Resource Name (ARN) of the destination resource.
- destinationArn String
- The Amazon Resource Name (ARN) of the destination resource.
- destinationArn string
- The Amazon Resource Name (ARN) of the destination resource.
- destination_arn str
- The Amazon Resource Name (ARN) of the destination resource.
- destinationArn String
- The Amazon Resource Name (ARN) of the destination resource.
EventSourceMappingDocumentDbEventSourceConfig, EventSourceMappingDocumentDbEventSourceConfigArgs                
- DatabaseName string
- The name of the database to consume within the DocumentDB cluster.
- CollectionName string
- The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.
- FullDocument string
- Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values:UpdateLookup,Default.
- DatabaseName string
- The name of the database to consume within the DocumentDB cluster.
- CollectionName string
- The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.
- FullDocument string
- Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values:UpdateLookup,Default.
- databaseName String
- The name of the database to consume within the DocumentDB cluster.
- collectionName String
- The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.
- fullDocument String
- Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values:UpdateLookup,Default.
- databaseName string
- The name of the database to consume within the DocumentDB cluster.
- collectionName string
- The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.
- fullDocument string
- Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values:UpdateLookup,Default.
- database_name str
- The name of the database to consume within the DocumentDB cluster.
- collection_name str
- The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.
- full_document str
- Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values:UpdateLookup,Default.
- databaseName String
- The name of the database to consume within the DocumentDB cluster.
- collectionName String
- The name of the collection to consume within the database. If you do not specify a collection, Lambda consumes all collections.
- fullDocument String
- Determines what DocumentDB sends to your event stream during document update operations. If set to UpdateLookup, DocumentDB sends a delta describing the changes, along with a copy of the entire document. Otherwise, DocumentDB sends only a partial document that contains the changes. Valid values:UpdateLookup,Default.
EventSourceMappingFilterCriteria, EventSourceMappingFilterCriteriaArgs          
- Filters
List<EventSource Mapping Filter Criteria Filter> 
- A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.
- Filters
[]EventSource Mapping Filter Criteria Filter 
- A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.
- filters
List<EventSource Mapping Filter Criteria Filter> 
- A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.
- filters
EventSource Mapping Filter Criteria Filter[] 
- A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.
- filters
Sequence[lambda_.Event Source Mapping Filter Criteria Filter] 
- A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.
- filters List<Property Map>
- A set of up to 5 filter. If an event satisfies at least one, Lambda sends the event to the function or adds it to the next batch. Detailed below.
EventSourceMappingFilterCriteriaFilter, EventSourceMappingFilterCriteriaFilterArgs            
- Pattern string
- A filter pattern up to 4096 characters. See Filter Rule Syntax.
- Pattern string
- A filter pattern up to 4096 characters. See Filter Rule Syntax.
- pattern String
- A filter pattern up to 4096 characters. See Filter Rule Syntax.
- pattern string
- A filter pattern up to 4096 characters. See Filter Rule Syntax.
- pattern str
- A filter pattern up to 4096 characters. See Filter Rule Syntax.
- pattern String
- A filter pattern up to 4096 characters. See Filter Rule Syntax.
EventSourceMappingMetricsConfig, EventSourceMappingMetricsConfigArgs          
- Metrics List<string>
- A list containing the metrics to be produced by the event source mapping. Valid values: EventCount.
- Metrics []string
- A list containing the metrics to be produced by the event source mapping. Valid values: EventCount.
- metrics List<String>
- A list containing the metrics to be produced by the event source mapping. Valid values: EventCount.
- metrics string[]
- A list containing the metrics to be produced by the event source mapping. Valid values: EventCount.
- metrics Sequence[str]
- A list containing the metrics to be produced by the event source mapping. Valid values: EventCount.
- metrics List<String>
- A list containing the metrics to be produced by the event source mapping. Valid values: EventCount.
EventSourceMappingProvisionedPollerConfig, EventSourceMappingProvisionedPollerConfigArgs            
- MaximumPollers int
- The maximum number of event pollers this event source can scale up to. The range is between 1 and 2000.
- MinimumPollers int
- The minimum number of event pollers this event source can scale down to. The range is between 1 and 200.
- MaximumPollers int
- The maximum number of event pollers this event source can scale up to. The range is between 1 and 2000.
- MinimumPollers int
- The minimum number of event pollers this event source can scale down to. The range is between 1 and 200.
- maximumPollers Integer
- The maximum number of event pollers this event source can scale up to. The range is between 1 and 2000.
- minimumPollers Integer
- The minimum number of event pollers this event source can scale down to. The range is between 1 and 200.
- maximumPollers number
- The maximum number of event pollers this event source can scale up to. The range is between 1 and 2000.
- minimumPollers number
- The minimum number of event pollers this event source can scale down to. The range is between 1 and 200.
- maximum_pollers int
- The maximum number of event pollers this event source can scale up to. The range is between 1 and 2000.
- minimum_pollers int
- The minimum number of event pollers this event source can scale down to. The range is between 1 and 200.
- maximumPollers Number
- The maximum number of event pollers this event source can scale up to. The range is between 1 and 2000.
- minimumPollers Number
- The minimum number of event pollers this event source can scale down to. The range is between 1 and 200.
EventSourceMappingScalingConfig, EventSourceMappingScalingConfigArgs          
- MaximumConcurrency int
- Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be greater than or equal to 2. See Configuring maximum concurrency for Amazon SQS event sources. You need to raise a Service Quota Ticket to increase the concurrency beyond 1000.
- MaximumConcurrency int
- Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be greater than or equal to 2. See Configuring maximum concurrency for Amazon SQS event sources. You need to raise a Service Quota Ticket to increase the concurrency beyond 1000.
- maximumConcurrency Integer
- Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be greater than or equal to 2. See Configuring maximum concurrency for Amazon SQS event sources. You need to raise a Service Quota Ticket to increase the concurrency beyond 1000.
- maximumConcurrency number
- Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be greater than or equal to 2. See Configuring maximum concurrency for Amazon SQS event sources. You need to raise a Service Quota Ticket to increase the concurrency beyond 1000.
- maximum_concurrency int
- Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be greater than or equal to 2. See Configuring maximum concurrency for Amazon SQS event sources. You need to raise a Service Quota Ticket to increase the concurrency beyond 1000.
- maximumConcurrency Number
- Limits the number of concurrent instances that the Amazon SQS event source can invoke. Must be greater than or equal to 2. See Configuring maximum concurrency for Amazon SQS event sources. You need to raise a Service Quota Ticket to increase the concurrency beyond 1000.
EventSourceMappingSelfManagedEventSource, EventSourceMappingSelfManagedEventSourceArgs              
- Endpoints Dictionary<string, string>
- A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERSand the value should be a string with a comma separated list of broker endpoints.
- Endpoints map[string]string
- A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERSand the value should be a string with a comma separated list of broker endpoints.
- endpoints Map<String,String>
- A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERSand the value should be a string with a comma separated list of broker endpoints.
- endpoints {[key: string]: string}
- A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERSand the value should be a string with a comma separated list of broker endpoints.
- endpoints Mapping[str, str]
- A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERSand the value should be a string with a comma separated list of broker endpoints.
- endpoints Map<String>
- A map of endpoints for the self managed source. For Kafka self-managed sources, the key should be KAFKA_BOOTSTRAP_SERVERSand the value should be a string with a comma separated list of broker endpoints.
EventSourceMappingSelfManagedKafkaEventSourceConfig, EventSourceMappingSelfManagedKafkaEventSourceConfigArgs                  
- ConsumerGroup stringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.
- ConsumerGroup stringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.
- consumerGroup StringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.
- consumerGroup stringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.
- consumer_group_ strid 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.
- consumerGroup StringId 
- A Kafka consumer group ID between 1 and 200 characters for use when creating this event source mapping. If one is not specified, this value will be automatically generated. See SelfManagedKafkaEventSourceConfig Syntax.
EventSourceMappingSourceAccessConfiguration, EventSourceMappingSourceAccessConfigurationArgs            
- Type string
- The type of authentication protocol, VPC components, or virtual host for your event source. For valid values, refer to the AWS documentation.
- Uri string
- The URI for this configuration. For type VPC_SUBNETthe value should besubnet:subnet_idwheresubnet_idis the value you would find in an aws.ec2.Subnet resource's id attribute. For typeVPC_SECURITY_GROUPthe value should besecurity_group:security_group_idwheresecurity_group_idis the value you would find in an aws.ec2.SecurityGroup resource's id attribute.
- Type string
- The type of authentication protocol, VPC components, or virtual host for your event source. For valid values, refer to the AWS documentation.
- Uri string
- The URI for this configuration. For type VPC_SUBNETthe value should besubnet:subnet_idwheresubnet_idis the value you would find in an aws.ec2.Subnet resource's id attribute. For typeVPC_SECURITY_GROUPthe value should besecurity_group:security_group_idwheresecurity_group_idis the value you would find in an aws.ec2.SecurityGroup resource's id attribute.
- type String
- The type of authentication protocol, VPC components, or virtual host for your event source. For valid values, refer to the AWS documentation.
- uri String
- The URI for this configuration. For type VPC_SUBNETthe value should besubnet:subnet_idwheresubnet_idis the value you would find in an aws.ec2.Subnet resource's id attribute. For typeVPC_SECURITY_GROUPthe value should besecurity_group:security_group_idwheresecurity_group_idis the value you would find in an aws.ec2.SecurityGroup resource's id attribute.
- type string
- The type of authentication protocol, VPC components, or virtual host for your event source. For valid values, refer to the AWS documentation.
- uri string
- The URI for this configuration. For type VPC_SUBNETthe value should besubnet:subnet_idwheresubnet_idis the value you would find in an aws.ec2.Subnet resource's id attribute. For typeVPC_SECURITY_GROUPthe value should besecurity_group:security_group_idwheresecurity_group_idis the value you would find in an aws.ec2.SecurityGroup resource's id attribute.
- type str
- The type of authentication protocol, VPC components, or virtual host for your event source. For valid values, refer to the AWS documentation.
- uri str
- The URI for this configuration. For type VPC_SUBNETthe value should besubnet:subnet_idwheresubnet_idis the value you would find in an aws.ec2.Subnet resource's id attribute. For typeVPC_SECURITY_GROUPthe value should besecurity_group:security_group_idwheresecurity_group_idis the value you would find in an aws.ec2.SecurityGroup resource's id attribute.
- type String
- The type of authentication protocol, VPC components, or virtual host for your event source. For valid values, refer to the AWS documentation.
- uri String
- The URI for this configuration. For type VPC_SUBNETthe value should besubnet:subnet_idwheresubnet_idis the value you would find in an aws.ec2.Subnet resource's id attribute. For typeVPC_SECURITY_GROUPthe value should besecurity_group:security_group_idwheresecurity_group_idis the value you would find in an aws.ec2.SecurityGroup resource's id attribute.
Import
Using pulumi import, import Lambda event source mappings using the UUID (event source mapping identifier). For example:
$ pulumi import aws:lambda/eventSourceMapping:EventSourceMapping event_source_mapping 12345kxodurf3443
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.