kamelets.kafka-batch-azure-schema-registry-source.kamelet.yaml Maven / Gradle / Ivy
The newest version!
# ---------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ---------------------------------------------------------------------------
apiVersion: camel.apache.org/v1
kind: Kamelet
metadata:
name: kafka-batch-azure-schema-registry-source
annotations:
camel.apache.org/kamelet.support.level: "Preview"
camel.apache.org/catalog.version: "4.8.0"
camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4NCjwhLS0gR2VuZXJhdG9yOiBBZG9iZSBJbGx1c3RyYXRvciAxOS4wLjAsIFNWRyBFeHBvcnQgUGx1Zy1JbiAuIFNWRyBWZXJzaW9uOiA2LjAwIEJ1aWxkIDApICAtLT4NCjxzdmcgdmVyc2lvbj0iMS4xIiBpZD0iTGF5ZXJfMSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayIgeD0iMHB4IiB5PSIwcHgiDQoJIHZpZXdCb3g9IjAgMCA1MDAgNTAwIiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCA1MDAgNTAwOyIgeG1sOnNwYWNlPSJwcmVzZXJ2ZSI+DQo8ZyBpZD0iWE1MSURfMV8iPg0KCTxwYXRoIGlkPSJYTUxJRF85XyIgZD0iTTMxNC44LDI2OS43Yy0xNC4yLDAtMjcsNi4zLTM1LjcsMTYuMkwyNTYuOCwyNzBjMi40LTYuNSwzLjctMTMuNiwzLjctMjAuOWMwLTcuMi0xLjMtMTQuMS0zLjYtMjAuNg0KCQlsMjIuMy0xNS43YzguNyw5LjksMjEuNCwxNi4xLDM1LjYsMTYuMWMyNi4yLDAsNDcuNi0yMS4zLDQ3LjYtNDcuNnMtMjEuMy00Ny42LTQ3LjYtNDcuNnMtNDcuNiwyMS4zLTQ3LjYsNDcuNg0KCQljMCw0LjcsMC43LDkuMiwyLDEzLjVsLTIyLjMsMTUuN2MtOS4zLTExLjYtMjIuOC0xOS42LTM4LjEtMjIuMXYtMjYuOWMyMS42LTQuNSwzNy44LTIzLjcsMzcuOC00Ni42YzAtMjYuMi0yMS4zLTQ3LjYtNDcuNi00Ny42DQoJCWMtMjYuMiwwLTQ3LjYsMjEuMy00Ny42LDQ3LjZjMCwyMi42LDE1LjgsNDEuNSwzNi45LDQ2LjN2MjcuM2MtMjguOCw1LjEtNTAuOCwzMC4yLTUwLjgsNjAuNWMwLDMwLjQsMjIuMiw1NS43LDUxLjIsNjAuNXYyOC44DQoJCWMtMjEuMyw0LjctMzcuNCwyMy43LTM3LjQsNDYuNGMwLDI2LjIsMjEuMyw0Ny42LDQ3LjYsNDcuNmMyNi4yLDAsNDcuNi0yMS4zLDQ3LjYtNDcuNmMwLTIyLjctMTYtNDEuOC0zNy40LTQ2LjR2LTI4LjgNCgkJYzE1LTIuNSwyOC4yLTEwLjQsMzcuNC0yMS44bDIyLjUsMTUuOWMtMS4yLDQuMy0xLjksOC43LTEuOSwxMy40YzAsMjYuMiwyMS4zLDQ3LjYsNDcuNiw0Ny42czQ3LjYtMjEuMyw0Ny42LTQ3LjYNCgkJQzM2Mi40LDI5MSwzNDEuMSwyNjkuNywzMTQuOCwyNjkuN3ogTTMxNC44LDE1OC40YzEyLjcsMCwyMy4xLDEwLjQsMjMuMSwyMy4xYzAsMTIuNy0xMC4zLDIzLjEtMjMuMSwyMy4xcy0yMy4xLTEwLjQtMjMuMS0yMy4xDQoJCUMyOTEuOCwxNjguOCwzMDIuMSwxNTguNCwzMTQuOCwxNTguNHogTTE3NiwxMTUuMWMwLTEyLjcsMTAuMy0yMy4xLDIzLjEtMjMuMWMxMi43LDAsMjMuMSwxMC40LDIzLjEsMjMuMQ0KCQljMCwxMi43LTEwLjMsMjMuMS0yMy4xLDIzLjFDMTg2LjMsMTM4LjIsMTc2LDEyNy44LDE3NiwxMTUuMXogTTIyMi4xLDM4NC45YzAsMTIuNy0xMC4zLDIzLjEtMjMuMSwyMy4xDQoJCWMtMTIuNywwLTIzLjEtMTAuNC0yMy4xLTIzLjFjMC0xMi43LDEwLjMtMjMuMSwyMy4xLTIzLjFDMjExLjgsMzYxLjgsMjIyLjEsMzcyLjIsMjIyLjEsMzg0Ljl6IE0xOTkuMSwyODEuMw0KCQljLTE3LjcsMC0zMi4yLTE0LjQtMzIuMi0zMi4yYzAtMTcuNywxNC40LTMyLjIsMzIuMi0zMi4yYzE3LjcsMCwzMi4yLDE0LjQsMzIuMiwzMi4yQzIzMS4yLDI2Ni45LDIxNi44LDI4MS4zLDE5OS4xLDI4MS4zeg0KCQkgTTMxNC44LDM0MC4zYy0xMi43LDAtMjMuMS0xMC40LTIzLjEtMjMuMWMwLTEyLjcsMTAuMy0yMy4xLDIzLjEtMjMuMXMyMy4xLDEwLjQsMjMuMSwyMy4xQzMzNy45LDMzMCwzMjcuNSwzNDAuMywzMTQuOCwzNDAuM3oiLz4NCjwvZz4NCjwvc3ZnPg0K"
camel.apache.org/provider: "Apache Software Foundation"
camel.apache.org/kamelet.group: "Kafka"
camel.apache.org/kamelet.namespace: "Kafka"
camel.apache.org/keda.type: "kafka"
labels:
camel.apache.org/kamelet.type: "source"
spec:
definition:
title: "Azure Kafka Batch through Eventhubs with Azure Schema Registry Source"
description: |-
Receive data from Kafka topics in batch on Azure Eventhubs combined with Azure Schema Registry and commit them manually through KafkaManualCommit or auto commit.
required:
- topic
- bootstrapServers
- azureRegistryUrl
- password
type: object
properties:
topic:
title: Topic Names
description: Comma separated list of Kafka topic names
type: string
x-descriptors:
- urn:keda:metadata:topic
- urn:keda:required
bootstrapServers:
title: Bootstrap Servers
description: Comma separated list of Kafka Broker URLs
type: string
x-descriptors:
- urn:keda:metadata:bootstrapServers
- urn:keda:required
securityProtocol:
title: Security Protocol
description: Protocol used to communicate with brokers. SASL_PLAINTEXT, PLAINTEXT, SASL_SSL and SSL are supported
type: string
default: SASL_SSL
saslMechanism:
title: SASL Mechanism
description: The Simple Authentication and Security Layer (SASL) Mechanism used.
type: string
default: PLAIN
password:
title: Password
description: Password to authenticate to kafka
type: string
format: password
x-descriptors:
- urn:camel:group:credentials
- urn:keda:authentication:password
- urn:keda:required
autoCommitEnable:
title: Auto Commit Enable
description: If true, periodically commit to ZooKeeper the offset of messages already fetched by the consumer
type: boolean
default: true
allowManualCommit:
title: Allow Manual Commit
description: Whether to allow doing manual commits
type: boolean
default: false
pollOnError:
title: Poll On Error Behavior
description: What to do if kafka threw an exception while polling for new messages. There are 5 enums and the value can be one of DISCARD, ERROR_HANDLER, RECONNECT, RETRY, STOP
type: string
default: "ERROR_HANDLER"
autoOffsetReset:
title: Auto Offset Reset
description: What to do when there is no initial offset. There are 3 enums and the value can be one of latest, earliest, none
type: string
default: "latest"
x-descriptors:
- urn:keda:metadata:offsetResetPolicy
consumerGroup:
title: Consumer Group
description: A string that uniquely identifies the group of consumers to which this source belongs
type: string
example: "my-group-id"
x-descriptors:
- urn:keda:metadata:consumerGroup
- urn:keda:required
deserializeHeaders:
title: Automatically Deserialize Headers
description: When enabled the Kamelet source will deserialize all message headers to String representation.
type: boolean
default: true
valueDeserializer:
title: Value Deserializer
description: Deserializer class for value that implements the Deserializer interface.
type: string
default: "com.microsoft.azure.schemaregistry.kafka.avro.KafkaAvroDeserializer"
azureRegistryUrl:
title: Azure Schema Registry URL
description: The Apicurio Schema Registry URL
type: string
x-descriptors:
- urn:keda:metadata:bootstrapServers
- urn:keda:required
specificAvroValueType:
title: Specific Avro Value Type
description: The Specific Type Avro will have to deal with
type: string
example: "com.example.Order"
batchSize:
title: Batch Dimension
description: The maximum number of records returned in a single call to poll()
type: int
default: 500
pollTimeout:
title: Poll Timeout Interval
description: The timeout used when polling the KafkaConsumer
type: int
default: 5000
maxPollIntervalMs:
title: Max Poll Interval
description: The maximum delay between invocations of poll() when using consumer group management
type: int
topicIsPattern:
title: Topic Is Pattern
description: Whether the topic is a pattern (regular expression). This can be used to subscribe to dynamic number of topics matching the pattern.
type: boolean
default: false
dependencies:
- "mvn:org.apache.camel.kamelets:camel-kamelets-utils:4.8.0"
- "camel:kafka"
- "camel:core"
- "camel:kamelet"
- "camel:azure-schema-registry"
- "mvn:com.microsoft.azure:azure-schemaregistry-kafka-avro:1.1.1"
- "mvn:com.azure:azure-data-schemaregistry-apacheavro:1.1.20"
- "mvn:com.azure:azure-identity:1.13.2"
template:
beans:
- name: defaultAzureCredential
type: "#class:org.apache.camel.component.azure.schema.registry.DefaultAzureCredentialWrapper"
- name: kafkaHeaderDeserializer
type: "#class:org.apache.camel.kamelets.utils.serialization.kafka.KafkaHeaderDeserializer"
properties:
enabled: '{{deserializeHeaders}}'
- name: manualCommitFactory
type: "#class:org.apache.camel.component.kafka.consumer.DefaultKafkaManualCommitFactory"
from:
uri: "kafka:{{topic}}"
parameters:
brokers: "{{bootstrapServers}}"
securityProtocol: "{{securityProtocol}}"
saslMechanism: "{{saslMechanism}}"
saslJaasConfig: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="$ConnectionString" password={{password}};'
autoCommitEnable: "{{autoCommitEnable}}"
allowManualCommit: "{{allowManualCommit}}"
pollOnError: "{{pollOnError}}"
autoOffsetReset: "{{autoOffsetReset}}"
groupId: "{{?consumerGroup}}"
valueDeserializer: "{{valueDeserializer}}"
maxPollRecords: "{{batchSize}}"
pollTimeoutMs: "{{pollTimeout}}"
maxPollIntervalMs: "{{?maxPollIntervalMs}}"
batching: true
kafkaManualCommitFactory: "#bean:{{manualCommitFactory}}"
topicIsPattern: "{{topicIsPattern}}"
additionalProperties.schema.registry.url: "{{azureRegistryUrl}}"
additionalProperties.schema.group: avro
additionalProperties.schema.registry.credential: '#bean:defaultAzureCredential'
additionalProperties.specific.avro.value.type: '#valueAs(java.lang.Class):{{specificAvroValueType}}'
additionalProperties.specific.avro.reader: '#valueAs(boolean):true'
steps:
- process:
ref: "{{kafkaHeaderDeserializer}}"
- to: "kamelet:sink"