
software.amazon.awscdk.services.kinesis.analytics.flink.package-info Maven / Gradle / Ivy
/**
* Kinesis Analytics Flink
*
* ---
*
*
*
*
*
* The APIs of higher level constructs in this module are experimental and under active development.
* They are subject to non-backward compatible changes or removal in any future version. These are
* not subject to the Semantic Versioning model and breaking changes will be
* announced in the release notes. This means that while you may use them, you may need to update
* your source code when upgrading to a newer version of this package.
*
*
*
*
*
*
*
* This package provides constructs for creating Kinesis Analytics Flink
* applications. To learn more about using using managed Flink applications, see
* the AWS developer
* guide.
*
*
Creating Flink Applications
*
* To create a new Flink application, use the Application
construct:
*
*
* import path.*;
* import software.amazon.awscdk.core.*;
* import software.amazon.awscdk.services.kinesis.analytics.flink.*;
* import software.amazon.awscdk.services.cloudwatch.*;
*
* App app = new App();
* Stack stack = new Stack(app, "FlinkAppTest");
*
* Application flinkApp = Application.Builder.create(stack, "App")
* .code(ApplicationCode.fromAsset(join(__dirname, "code-asset")))
* .runtime(Runtime.FLINK_1_11)
* .build();
*
* Alarm.Builder.create(stack, "Alarm")
* .metric(flinkApp.metricFullRestarts())
* .evaluationPeriods(1)
* .threshold(3)
* .build();
*
* app.synth();
*
*
* The code
property can use fromAsset
as shown above to reference a local jar
* file in s3 or fromBucket
to reference a file in s3.
*
*
* import path.*;
* import software.amazon.awscdk.services.s3.assets.*;
* import software.amazon.awscdk.core.*;
* import software.amazon.awscdk.services.kinesis.analytics.flink.*;
*
* App app = new App();
* Stack stack = new Stack(app, "FlinkAppCodeFromBucketTest");
*
* Asset asset = Asset.Builder.create(stack, "CodeAsset")
* .path(join(__dirname, "code-asset"))
* .build();
* IBucket bucket = asset.getBucket();
* String fileKey = asset.getS3ObjectKey();
*
* Application.Builder.create(stack, "App")
* .code(ApplicationCode.fromBucket(bucket, fileKey))
* .runtime(Runtime.FLINK_1_11)
* .build();
*
* app.synth();
*
*
* The propertyGroups
property provides a way of passing arbitrary runtime
* properties to your Flink application. You can use the
* aws-kinesisanalytics-runtime library to retrieve these
* properties.
*
*
* Bucket bucket;
*
* Application flinkApp = Application.Builder.create(this, "Application")
* .propertyGroups(PropertyGroups.builder()
* .FlinkApplicationProperties(Map.of(
* "inputStreamName", "my-input-kinesis-stream",
* "outputStreamName", "my-output-kinesis-stream"))
* .build())
* // ...
* .runtime(Runtime.FLINK_1_13)
* .code(ApplicationCode.fromBucket(bucket, "my-app.jar"))
* .build();
*
*
* Flink applications also have specific configuration for passing parameters
* when the Flink job starts. These include parameters for checkpointing,
* snapshotting, monitoring, and parallelism.
*
*
* Bucket bucket;
*
* Application flinkApp = Application.Builder.create(this, "Application")
* .code(ApplicationCode.fromBucket(bucket, "my-app.jar"))
* .runtime(Runtime.FLINK_1_13)
* .checkpointingEnabled(true) // default is true
* .checkpointInterval(Duration.seconds(30)) // default is 1 minute
* .minPauseBetweenCheckpoints(Duration.seconds(10)) // default is 5 seconds
* .logLevel(LogLevel.ERROR) // default is INFO
* .metricsLevel(MetricsLevel.PARALLELISM) // default is APPLICATION
* .autoScalingEnabled(false) // default is true
* .parallelism(32) // default is 1
* .parallelismPerKpu(2) // default is 1
* .snapshotsEnabled(false) // default is true
* .logGroup(new LogGroup(this, "LogGroup"))
* .build();
*
*/
@software.amazon.jsii.Stability(software.amazon.jsii.Stability.Level.Experimental)
package software.amazon.awscdk.services.kinesis.analytics.flink;