
config.kafka-producer.yml Maven / Gradle / Ivy
The newest version!
# Generic configuration for Kafka producer.
properties:
# The sidecar does the serialization to byte array for both key and value
key.serializer: org.apache.kafka.common.serialization.ByteArraySerializer
value.serializer: org.apache.kafka.common.serialization.ByteArraySerializer
# This value is a string, if using 1 or 0, you must use '1' or '0' as the value
acks: ${kafka-producer.acks:all}
bootstrap.servers: ${kafka-producer.bootstrap.servers:localhost:9092}
buffer.memory: ${kafka-producer.buffer.memory:33554432}
retries: ${kafka-producer.retries:3}
batch.size: ${kafka-producer.batch.size:16384}
linger.ms: ${kafka-producer.linger.ms:1}
max.in.flight.requests.per.connection: ${kafka-producer.max.in.flight.requests.per.connection:5}
enable.idempotence: ${kafka-producer.enable.idempotence:true}
# Transactional producer configuration
# The TransactionalId to use for transactional delivery.
# transactional.id: ${kafka-producer.transactional.id:T1000}
# The maximum amount of time in ms that the transaction coordinator will wait for a transaction status
# update from the producer before proactively aborting the ongoing transaction. Default to 1 minute.
# transaction.timeout.ms: ${kafka-producer.transaction.timeout.ms:60000}
# The time in ms that the transaction coordinator will wait without receiving any transaction status
# updates for the current transaction before expiring its transactional id. Default to 7 days.
# transactional.id.expiration.ms: ${kafka-producer.transactional.id.expiration.ms:604800000}
# Confluent schema registry url
schema.registry.url: ${kafka-producer.schema.registry.url:http://localhost:8081}
# Schema registry identity cache size
schema.registry.cache: ${kafka-producer.schema.registry.cache:100}
# Schema registry auto register schema indicator for streams application. If true, the first request will register the schema auto automatically.
schema.registry.auto.register.schemas: ${kafka-producer.schema.registry.auto.register.schemas:true}
# Schema registry client truststore location, use the following two properties only if schema registry url is https.
schema.registry.ssl.truststore.location: ${kafka-producer.schema.registry.ssl.truststore.location:/config/client.truststore}
# Schema registry client truststore password
schema.registry.ssl.truststore.password: ${kafka-producer.schema.registry.ssl.truststore.password:password}
# security configuration for enterprise deployment
security.protocol: ${kafka-producer.security.protocol:SASL_SSL}
sasl.mechanism: ${kafka-producer.sasl.mechanism:PLAIN}
sasl.jaas.config: "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"${kafka-producer.username:username}\" password=\"${KAFKA_PRODUCER_PASSWORD:password}\";"
ssl.endpoint.identification.algorithm: ${kafka-producer.ssl.endpoint.identification.algorithm:algo-name}
ssl.truststore.location: ${kafka-producer.ssl.truststore.location:/config/client.truststore}
ssl.truststore.password: ${kafka-producer.ssl.truststore.password:password}
client.rack: ${kafka-producer.client.rack:rack-name}
# basic authentication user:pass for the schema registry
basic.auth.user.info: ${kafka-producer.username:username}:${KAFKA_PRODUCER_PASSWORD:password}
basic.auth.credentials.source: ${kafka-producer.basic.auth.credentials.source:USER_INFO}
# If you have a message that is bigger than 1 MB to produce, increase this value.
max.request.size: ${kafka-producer.max.request.size:1048576}
# The default topic for the producer. Only certain producer implementation will use it.
topic: ${kafka-producer.topic:test1}
# Default key format if no schema for the topic key
keyFormat: ${kafka-producer.keyFormat:jsonschema}
# Default value format if no schema for the topic value
valueFormat: ${kafka-producer.valueFormat:jsonschema}
# If open tracing is enabled. traceability, correlation and metrics should not be in the chain if opentracing is used.
injectOpenTracing: ${kafka-producer.injectOpenTracing:false}
# Inject serviceId as callerId into the http header for metrics to collect the caller. The serviceId is from server.yml
injectCallerId: ${kafka-producer.injectCallerId:false}
# Indicator if the audit is enabled.
auditEnabled: ${kafka-producer.auditEnabled:true}
# Audit log destination topic or logfile. Default to topic
auditTarget: ${kafka-producer.auditTarget:logfile}
# The consumer audit topic name if the auditTarget is topic
auditTopic: ${kafka-producer.auditTopic:sidecar-audit}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy