All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.lambda.EventSourceMapping Maven / Gradle / Ivy

Go to download

A Pulumi package for creating and managing Amazon Web Services (AWS) cloud resources.

The newest version!
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.aws.lambda;

import com.pulumi.aws.Utilities;
import com.pulumi.aws.lambda.EventSourceMappingArgs;
import com.pulumi.aws.lambda.inputs.EventSourceMappingState;
import com.pulumi.aws.lambda.outputs.EventSourceMappingAmazonManagedKafkaEventSourceConfig;
import com.pulumi.aws.lambda.outputs.EventSourceMappingDestinationConfig;
import com.pulumi.aws.lambda.outputs.EventSourceMappingDocumentDbEventSourceConfig;
import com.pulumi.aws.lambda.outputs.EventSourceMappingFilterCriteria;
import com.pulumi.aws.lambda.outputs.EventSourceMappingScalingConfig;
import com.pulumi.aws.lambda.outputs.EventSourceMappingSelfManagedEventSource;
import com.pulumi.aws.lambda.outputs.EventSourceMappingSelfManagedKafkaEventSourceConfig;
import com.pulumi.aws.lambda.outputs.EventSourceMappingSourceAccessConfiguration;
import com.pulumi.core.Output;
import com.pulumi.core.annotations.Export;
import com.pulumi.core.annotations.ResourceType;
import com.pulumi.core.internal.Codegen;
import java.lang.Boolean;
import java.lang.Integer;
import java.lang.String;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;

/**
 * Provides a Lambda event source mapping. This allows Lambda functions to get events from Kinesis, DynamoDB, SQS, Amazon MQ and Managed Streaming for Apache Kafka (MSK).
 * 
 * For information about Lambda and how to use it, see [What is AWS Lambda?](http://docs.aws.amazon.com/lambda/latest/dg/welcome.html).
 * For information about event source mappings, see [CreateEventSourceMapping](http://docs.aws.amazon.com/lambda/latest/dg/API_CreateEventSourceMapping.html) in the API docs.
 * 
 * ## Example Usage
 * 
 * ### DynamoDB
 * 
 * <!--Start PulumiCodeChooser -->
 * 
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .eventSourceArn(exampleAwsDynamodbTable.streamArn())
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .startingPosition("LATEST")
 *             .tags(Map.of("Name", "dynamodb"))
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### Kinesis * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .eventSourceArn(exampleAwsKinesisStream.arn())
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .startingPosition("LATEST")
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### Managed Streaming for Apache Kafka (MSK) * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .eventSourceArn(exampleAwsMskCluster.arn())
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .topics("Example")
 *             .startingPosition("TRIM_HORIZON")
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### Self Managed Apache Kafka * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import com.pulumi.aws.lambda.inputs.EventSourceMappingSelfManagedEventSourceArgs;
 * import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .topics("Example")
 *             .startingPosition("TRIM_HORIZON")
 *             .selfManagedEventSource(EventSourceMappingSelfManagedEventSourceArgs.builder()
 *                 .endpoints(Map.of("KAFKA_BOOTSTRAP_SERVERS", "kafka1.example.com:9092,kafka2.example.com:9092"))
 *                 .build())
 *             .sourceAccessConfigurations(            
 *                 EventSourceMappingSourceAccessConfigurationArgs.builder()
 *                     .type("VPC_SUBNET")
 *                     .uri("subnet:subnet-example1")
 *                     .build(),
 *                 EventSourceMappingSourceAccessConfigurationArgs.builder()
 *                     .type("VPC_SUBNET")
 *                     .uri("subnet:subnet-example2")
 *                     .build(),
 *                 EventSourceMappingSourceAccessConfigurationArgs.builder()
 *                     .type("VPC_SECURITY_GROUP")
 *                     .uri("security_group:sg-example")
 *                     .build())
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### SQS * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .eventSourceArn(sqsQueueTest.arn())
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### SQS with event filter * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import com.pulumi.aws.lambda.inputs.EventSourceMappingFilterCriteriaArgs;
 * import static com.pulumi.codegen.internal.Serialization.*;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .eventSourceArn(sqsQueueTest.arn())
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .filterCriteria(EventSourceMappingFilterCriteriaArgs.builder()
 *                 .filters(EventSourceMappingFilterCriteriaFilterArgs.builder()
 *                     .pattern(serializeJson(
 *                         jsonObject(
 *                             jsonProperty("body", jsonObject(
 *                                 jsonProperty("Temperature", jsonArray(jsonObject(
 *                                     jsonProperty("numeric", jsonArray(
 *                                         ">", 
 *                                         0, 
 *                                         "<=", 
 *                                         100
 *                                     ))
 *                                 ))),
 *                                 jsonProperty("Location", jsonArray("New York"))
 *                             ))
 *                         )))
 *                     .build())
 *                 .build())
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### Amazon MQ (ActiveMQ) * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .batchSize(10)
 *             .eventSourceArn(exampleAwsMqBroker.arn())
 *             .enabled(true)
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .queues("example")
 *             .sourceAccessConfigurations(EventSourceMappingSourceAccessConfigurationArgs.builder()
 *                 .type("BASIC_AUTH")
 *                 .uri(exampleAwsSecretsmanagerSecretVersion.arn())
 *                 .build())
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ### Amazon MQ (RabbitMQ) * * <!--Start PulumiCodeChooser --> *
 * {@code
 * package generated_program;
 * 
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.aws.lambda.EventSourceMapping;
 * import com.pulumi.aws.lambda.EventSourceMappingArgs;
 * import com.pulumi.aws.lambda.inputs.EventSourceMappingSourceAccessConfigurationArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * 
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 * 
 *     public static void stack(Context ctx) {
 *         var example = new EventSourceMapping("example", EventSourceMappingArgs.builder()
 *             .batchSize(1)
 *             .eventSourceArn(exampleAwsMqBroker.arn())
 *             .enabled(true)
 *             .functionName(exampleAwsLambdaFunction.arn())
 *             .queues("example")
 *             .sourceAccessConfigurations(            
 *                 EventSourceMappingSourceAccessConfigurationArgs.builder()
 *                     .type("VIRTUAL_HOST")
 *                     .uri("/example")
 *                     .build(),
 *                 EventSourceMappingSourceAccessConfigurationArgs.builder()
 *                     .type("BASIC_AUTH")
 *                     .uri(exampleAwsSecretsmanagerSecretVersion.arn())
 *                     .build())
 *             .build());
 * 
 *     }
 * }
 * }
 * 
* <!--End PulumiCodeChooser --> * * ## Import * * Using `pulumi import`, import Lambda event source mappings using the `UUID` (event source mapping identifier). For example: * * ```sh * $ pulumi import aws:lambda/eventSourceMapping:EventSourceMapping event_source_mapping 12345kxodurf3443 * ``` * */ @ResourceType(type="aws:lambda/eventSourceMapping:EventSourceMapping") public class EventSourceMapping extends com.pulumi.resources.CustomResource { /** * Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below. * */ @Export(name="amazonManagedKafkaEventSourceConfig", refs={EventSourceMappingAmazonManagedKafkaEventSourceConfig.class}, tree="[0]") private Output amazonManagedKafkaEventSourceConfig; /** * @return Additional configuration block for Amazon Managed Kafka sources. Incompatible with "self_managed_event_source" and "self_managed_kafka_event_source_config". Detailed below. * */ public Output amazonManagedKafkaEventSourceConfig() { return this.amazonManagedKafkaEventSourceConfig; } /** * The event source mapping ARN. * */ @Export(name="arn", refs={String.class}, tree="[0]") private Output arn; /** * @return The event source mapping ARN. * */ public Output arn() { return this.arn; } /** * The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to `100` for DynamoDB, Kinesis, MQ and MSK, `10` for SQS. * */ @Export(name="batchSize", refs={Integer.class}, tree="[0]") private Output batchSize; /** * @return The largest number of records that Lambda will retrieve from your event source at the time of invocation. Defaults to `100` for DynamoDB, Kinesis, MQ and MSK, `10` for SQS. * */ public Output> batchSize() { return Codegen.optional(this.batchSize); } /** * - (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to `false`. * */ @Export(name="bisectBatchOnFunctionError", refs={Boolean.class}, tree="[0]") private Output bisectBatchOnFunctionError; /** * @return - (Optional) If the function returns an error, split the batch in two and retry. Only available for stream sources (DynamoDB and Kinesis). Defaults to `false`. * */ public Output> bisectBatchOnFunctionError() { return Codegen.optional(this.bisectBatchOnFunctionError); } /** * - (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below. * */ @Export(name="destinationConfig", refs={EventSourceMappingDestinationConfig.class}, tree="[0]") private Output destinationConfig; /** * @return - (Optional) An Amazon SQS queue, Amazon SNS topic or Amazon S3 bucket (only available for Kafka sources) destination for failed records. Only available for stream sources (DynamoDB and Kinesis) and Kafka sources (Amazon MSK and Self-managed Apache Kafka). Detailed below. * */ public Output> destinationConfig() { return Codegen.optional(this.destinationConfig); } /** * - (Optional) Configuration settings for a DocumentDB event source. Detailed below. * */ @Export(name="documentDbEventSourceConfig", refs={EventSourceMappingDocumentDbEventSourceConfig.class}, tree="[0]") private Output documentDbEventSourceConfig; /** * @return - (Optional) Configuration settings for a DocumentDB event source. Detailed below. * */ public Output> documentDbEventSourceConfig() { return Codegen.optional(this.documentDbEventSourceConfig); } /** * Determines if the mapping will be enabled on creation. Defaults to `true`. * */ @Export(name="enabled", refs={Boolean.class}, tree="[0]") private Output enabled; /** * @return Determines if the mapping will be enabled on creation. Defaults to `true`. * */ public Output> enabled() { return Codegen.optional(this.enabled); } /** * The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source. * */ @Export(name="eventSourceArn", refs={String.class}, tree="[0]") private Output eventSourceArn; /** * @return The event source ARN - this is required for Kinesis stream, DynamoDB stream, SQS queue, MQ broker, MSK cluster or DocumentDB change stream. It is incompatible with a Self Managed Kafka source. * */ public Output> eventSourceArn() { return Codegen.optional(this.eventSourceArn); } /** * The criteria to use for [event filtering](https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventfiltering.html) Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below. * */ @Export(name="filterCriteria", refs={EventSourceMappingFilterCriteria.class}, tree="[0]") private Output filterCriteria; /** * @return The criteria to use for [event filtering](https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventfiltering.html) Kinesis stream, DynamoDB stream, SQS queue event sources. Detailed below. * */ public Output> filterCriteria() { return Codegen.optional(this.filterCriteria); } /** * The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from `function_name` above.) * */ @Export(name="functionArn", refs={String.class}, tree="[0]") private Output functionArn; /** * @return The ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from `function_name` above.) * */ public Output functionArn() { return this.functionArn; } /** * The name or the ARN of the Lambda function that will be subscribing to events. * */ @Export(name="functionName", refs={String.class}, tree="[0]") private Output functionName; /** * @return The name or the ARN of the Lambda function that will be subscribing to events. * */ public Output functionName() { return this.functionName; } /** * A list of current response type enums applied to the event source mapping for [AWS Lambda checkpointing](https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting). Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: `ReportBatchItemFailures`. * */ @Export(name="functionResponseTypes", refs={List.class,String.class}, tree="[0,1]") private Output> functionResponseTypes; /** * @return A list of current response type enums applied to the event source mapping for [AWS Lambda checkpointing](https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting). Only available for SQS and stream sources (DynamoDB and Kinesis). Valid values: `ReportBatchItemFailures`. * */ public Output>> functionResponseTypes() { return Codegen.optional(this.functionResponseTypes); } /** * The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria. * */ @Export(name="kmsKeyArn", refs={String.class}, tree="[0]") private Output kmsKeyArn; /** * @return The ARN of the Key Management Service (KMS) customer managed key that Lambda uses to encrypt your function's filter criteria. * */ public Output> kmsKeyArn() { return Codegen.optional(this.kmsKeyArn); } /** * The date this resource was last modified. * */ @Export(name="lastModified", refs={String.class}, tree="[0]") private Output lastModified; /** * @return The date this resource was last modified. * */ public Output lastModified() { return this.lastModified; } /** * The result of the last AWS Lambda invocation of your Lambda function. * */ @Export(name="lastProcessingResult", refs={String.class}, tree="[0]") private Output lastProcessingResult; /** * @return The result of the last AWS Lambda invocation of your Lambda function. * */ public Output lastProcessingResult() { return this.lastProcessingResult; } /** * The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either `maximum_batching_window_in_seconds` expires or `batch_size` has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues. * */ @Export(name="maximumBatchingWindowInSeconds", refs={Integer.class}, tree="[0]") private Output maximumBatchingWindowInSeconds; /** * @return The maximum amount of time to gather records before invoking the function, in seconds (between 0 and 300). Records will continue to buffer (or accumulate in the case of an SQS queue event source) until either `maximum_batching_window_in_seconds` expires or `batch_size` has been met. For streaming event sources, defaults to as soon as records are available in the stream. If the batch it reads from the stream/queue only has one record in it, Lambda only sends one record to the function. Only available for stream sources (DynamoDB and Kinesis) and SQS standard queues. * */ public Output> maximumBatchingWindowInSeconds() { return Codegen.optional(this.maximumBatchingWindowInSeconds); } /** * - (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive). * */ @Export(name="maximumRecordAgeInSeconds", refs={Integer.class}, tree="[0]") private Output maximumRecordAgeInSeconds; /** * @return - (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive). * */ public Output maximumRecordAgeInSeconds() { return this.maximumRecordAgeInSeconds; } /** * - (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000. * */ @Export(name="maximumRetryAttempts", refs={Integer.class}, tree="[0]") private Output maximumRetryAttempts; /** * @return - (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000. * */ public Output maximumRetryAttempts() { return this.maximumRetryAttempts; } /** * - (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10. * */ @Export(name="parallelizationFactor", refs={Integer.class}, tree="[0]") private Output parallelizationFactor; /** * @return - (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10. * */ public Output parallelizationFactor() { return this.parallelizationFactor; } /** * The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name. * */ @Export(name="queues", refs={String.class}, tree="[0]") private Output queues; /** * @return The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name. * */ public Output> queues() { return Codegen.optional(this.queues); } /** * Scaling configuration of the event source. Only available for SQS queues. Detailed below. * */ @Export(name="scalingConfig", refs={EventSourceMappingScalingConfig.class}, tree="[0]") private Output scalingConfig; /** * @return Scaling configuration of the event source. Only available for SQS queues. Detailed below. * */ public Output> scalingConfig() { return Codegen.optional(this.scalingConfig); } /** * - (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include `source_access_configuration`. Detailed below. * */ @Export(name="selfManagedEventSource", refs={EventSourceMappingSelfManagedEventSource.class}, tree="[0]") private Output selfManagedEventSource; /** * @return - (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include `source_access_configuration`. Detailed below. * */ public Output> selfManagedEventSource() { return Codegen.optional(this.selfManagedEventSource); } /** * Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below. * */ @Export(name="selfManagedKafkaEventSourceConfig", refs={EventSourceMappingSelfManagedKafkaEventSourceConfig.class}, tree="[0]") private Output selfManagedKafkaEventSourceConfig; /** * @return Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below. * */ public Output selfManagedKafkaEventSourceConfig() { return this.selfManagedKafkaEventSourceConfig; } /** * For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include `self_managed_event_source`. Detailed below. * */ @Export(name="sourceAccessConfigurations", refs={List.class,EventSourceMappingSourceAccessConfiguration.class}, tree="[0,1]") private Output> sourceAccessConfigurations; /** * @return For Self Managed Kafka sources, the access configuration for the source. If set, configuration must also include `self_managed_event_source`. Detailed below. * */ public Output>> sourceAccessConfigurations() { return Codegen.optional(this.sourceAccessConfigurations); } /** * The position in the stream where AWS Lambda should start reading. Must be one of `AT_TIMESTAMP` (Kinesis only), `LATEST` or `TRIM_HORIZON` if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the [AWS DynamoDB Streams API Reference](https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_GetShardIterator.html) and [AWS Kinesis API Reference](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html#Kinesis-GetShardIterator-request-ShardIteratorType). * */ @Export(name="startingPosition", refs={String.class}, tree="[0]") private Output startingPosition; /** * @return The position in the stream where AWS Lambda should start reading. Must be one of `AT_TIMESTAMP` (Kinesis only), `LATEST` or `TRIM_HORIZON` if getting events from Kinesis, DynamoDB, MSK or Self Managed Apache Kafka. Must not be provided if getting events from SQS. More information about these positions can be found in the [AWS DynamoDB Streams API Reference](https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_GetShardIterator.html) and [AWS Kinesis API Reference](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html#Kinesis-GetShardIterator-request-ShardIteratorType). * */ public Output> startingPosition() { return Codegen.optional(this.startingPosition); } /** * A timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of the data record which to start reading when using `starting_position` set to `AT_TIMESTAMP`. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen. * */ @Export(name="startingPositionTimestamp", refs={String.class}, tree="[0]") private Output startingPositionTimestamp; /** * @return A timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of the data record which to start reading when using `starting_position` set to `AT_TIMESTAMP`. If a record with this exact timestamp does not exist, the next later record is chosen. If the timestamp is older than the current trim horizon, the oldest available record is chosen. * */ public Output> startingPositionTimestamp() { return Codegen.optional(this.startingPositionTimestamp); } /** * The state of the event source mapping. * */ @Export(name="state", refs={String.class}, tree="[0]") private Output state; /** * @return The state of the event source mapping. * */ public Output state() { return this.state; } /** * The reason the event source mapping is in its current state. * */ @Export(name="stateTransitionReason", refs={String.class}, tree="[0]") private Output stateTransitionReason; /** * @return The reason the event source mapping is in its current state. * */ public Output stateTransitionReason() { return this.stateTransitionReason; } /** * Map of tags to assign to the object. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. * */ @Export(name="tags", refs={Map.class,String.class}, tree="[0,1,1]") private Output> tags; /** * @return Map of tags to assign to the object. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. * */ public Output>> tags() { return Codegen.optional(this.tags); } /** * A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block. * * @deprecated * Please use `tags` instead. * */ @Deprecated /* Please use `tags` instead. */ @Export(name="tagsAll", refs={Map.class,String.class}, tree="[0,1,1]") private Output> tagsAll; /** * @return A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block. * */ public Output> tagsAll() { return this.tagsAll; } /** * The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified. * */ @Export(name="topics", refs={List.class,String.class}, tree="[0,1]") private Output> topics; /** * @return The name of the Kafka topics. Only available for MSK sources. A single topic name must be specified. * */ public Output>> topics() { return Codegen.optional(this.topics); } /** * The duration in seconds of a processing window for [AWS Lambda streaming analytics](https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html#services-kinesis-windows). The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis). * */ @Export(name="tumblingWindowInSeconds", refs={Integer.class}, tree="[0]") private Output tumblingWindowInSeconds; /** * @return The duration in seconds of a processing window for [AWS Lambda streaming analytics](https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html#services-kinesis-windows). The range is between 1 second up to 900 seconds. Only available for stream sources (DynamoDB and Kinesis). * */ public Output> tumblingWindowInSeconds() { return Codegen.optional(this.tumblingWindowInSeconds); } /** * The UUID of the created event source mapping. * */ @Export(name="uuid", refs={String.class}, tree="[0]") private Output uuid; /** * @return The UUID of the created event source mapping. * */ public Output uuid() { return this.uuid; } /** * * @param name The _unique_ name of the resulting resource. */ public EventSourceMapping(java.lang.String name) { this(name, EventSourceMappingArgs.Empty); } /** * * @param name The _unique_ name of the resulting resource. * @param args The arguments to use to populate this resource's properties. */ public EventSourceMapping(java.lang.String name, EventSourceMappingArgs args) { this(name, args, null); } /** * * @param name The _unique_ name of the resulting resource. * @param args The arguments to use to populate this resource's properties. * @param options A bag of options that control this resource's behavior. */ public EventSourceMapping(java.lang.String name, EventSourceMappingArgs args, @Nullable com.pulumi.resources.CustomResourceOptions options) { super("aws:lambda/eventSourceMapping:EventSourceMapping", name, makeArgs(args, options), makeResourceOptions(options, Codegen.empty()), false); } private EventSourceMapping(java.lang.String name, Output id, @Nullable EventSourceMappingState state, @Nullable com.pulumi.resources.CustomResourceOptions options) { super("aws:lambda/eventSourceMapping:EventSourceMapping", name, state, makeResourceOptions(options, id), false); } private static EventSourceMappingArgs makeArgs(EventSourceMappingArgs args, @Nullable com.pulumi.resources.CustomResourceOptions options) { if (options != null && options.getUrn().isPresent()) { return null; } return args == null ? EventSourceMappingArgs.Empty : args; } private static com.pulumi.resources.CustomResourceOptions makeResourceOptions(@Nullable com.pulumi.resources.CustomResourceOptions options, @Nullable Output id) { var defaultOptions = com.pulumi.resources.CustomResourceOptions.builder() .version(Utilities.getVersion()) .build(); return com.pulumi.resources.CustomResourceOptions.merge(defaultOptions, options, id); } /** * Get an existing Host resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state * @param options Optional settings to control the behavior of the CustomResource. */ public static EventSourceMapping get(java.lang.String name, Output id, @Nullable EventSourceMappingState state, @Nullable com.pulumi.resources.CustomResourceOptions options) { return new EventSourceMapping(name, id, state, options); } }




© 2015 - 2025 Weber Informatics LLC | Privacy Policy