All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.atlan.model.assets.KafkaTopic Maven / Gradle / Ivy

There is a newer version: 3.0.0
Show newest version
// Generated by delombok at Wed Oct 16 22:16:03 UTC 2024
/* SPDX-License-Identifier: Apache-2.0
   Copyright 2022 Atlan Pte. Ltd. */
package com.atlan.model.assets;

import com.atlan.Atlan;
import com.atlan.AtlanClient;
import com.atlan.exception.AtlanException;
import com.atlan.exception.ErrorCode;
import com.atlan.exception.InvalidRequestException;
import com.atlan.exception.NotFoundException;
import com.atlan.model.enums.AtlanAnnouncementType;
import com.atlan.model.enums.CertificateStatus;
import com.atlan.model.enums.KafkaTopicCleanupPolicy;
import com.atlan.model.enums.KafkaTopicCompressionType;
import com.atlan.model.relations.Reference;
import com.atlan.model.relations.UniqueAttributes;
import com.atlan.model.search.FluentSearch;
import com.atlan.util.StringUtils;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.concurrent.ThreadLocalRandom;
import javax.annotation.processing.Generated;
import lombok.*;

/**
 * Instance of a Kafka Topic in Atlan.
 */
@Generated("com.atlan.generators.ModelGeneratorV2")
public class KafkaTopic extends Asset implements IKafkaTopic, IKafka, IEventStore, ICatalog, IAsset, IReferenceable {
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(KafkaTopic.class);
    private static final long serialVersionUID = 2L;
    public static final String TYPE_NAME = "KafkaTopic";
    /**
     * Fixed typeName for KafkaTopics.
     */
    String typeName;
    /**
     * Tasks to which this asset provides input.
     */
    @Attribute
    SortedSet inputToAirflowTasks;
    /**
     * Processes to which this asset provides input.
     */
    @Attribute
    SortedSet inputToProcesses;
    /**
     * TBC
     */
    @Attribute
    SortedSet inputToSparkJobs;
    /**
     * Consumer groups subscribed to this topic.
     */
    @Attribute
    SortedSet kafkaConsumerGroups;
    /**
     * Cleanup policy for this topic.
     */
    @Attribute
    KafkaTopicCleanupPolicy kafkaTopicCleanupPolicy;
    /**
     * Type of compression used for this topic.
     */
    @Attribute
    KafkaTopicCompressionType kafkaTopicCompressionType;
    /**
     * Whether this topic is an internal topic (true) or not (false).
     */
    @Attribute
    Boolean kafkaTopicIsInternal;
    /**
     * Number of partitions for this topic.
     */
    @Attribute
    Long kafkaTopicPartitionsCount;
    /**
     * Number of (unexpired) messages in this topic.
     */
    @Attribute
    Long kafkaTopicRecordCount;
    /**
     * Replication factor for this topic.
     */
    @Attribute
    Long kafkaTopicReplicationFactor;
    /**
     * Amount of time messages will be retained in this topic, in milliseconds.
     */
    @Attribute
    Long kafkaTopicRetentionTimeInMs;
    /**
     * Segment size for this topic.
     */
    @Attribute
    Long kafkaTopicSegmentBytes;
    /**
     * Size of this topic, in bytes.
     */
    @Attribute
    Long kafkaTopicSizeInBytes;
    /**
     * Entities implemented by this asset.
     */
    @Attribute
    @JsonProperty("modelEntityImplemented")
    SortedSet modelImplementedEntities;
    /**
     * Tasks from which this asset is output.
     */
    @Attribute
    SortedSet outputFromAirflowTasks;
    /**
     * Processes from which this asset is produced as output.
     */
    @Attribute
    SortedSet outputFromProcesses;
    /**
     * TBC
     */
    @Attribute
    SortedSet outputFromSparkJobs;

    /**
     * Builds the minimal object necessary to create a relationship to a KafkaTopic, from a potentially
     * more-complete KafkaTopic object.
     *
     * @return the minimal object necessary to relate to the KafkaTopic
     * @throws InvalidRequestException if any of the minimal set of required properties for a KafkaTopic relationship are not found in the initial object
     */
    @Override
    public KafkaTopic trimToReference() throws InvalidRequestException {
        if (this.getGuid() != null && !this.getGuid().isEmpty()) {
            return refByGuid(this.getGuid());
        }
        if (this.getQualifiedName() != null && !this.getQualifiedName().isEmpty()) {
            return refByQualifiedName(this.getQualifiedName());
        }
        if (this.getUniqueAttributes() != null && this.getUniqueAttributes().getQualifiedName() != null && !this.getUniqueAttributes().getQualifiedName().isEmpty()) {
            return refByQualifiedName(this.getUniqueAttributes().getQualifiedName());
        }
        throw new InvalidRequestException(ErrorCode.MISSING_REQUIRED_RELATIONSHIP_PARAM, TYPE_NAME, "guid, qualifiedName");
    }

    /**
     * Start a fluent search that will return all KafkaTopic assets.
     * Additional conditions can be chained onto the returned search before any
     * asset retrieval is attempted, ensuring all conditions are pushed-down for
     * optimal retrieval. Only active (non-archived) KafkaTopic assets will be included.
     *
     * @return a fluent search that includes all KafkaTopic assets
     */
    public static FluentSearch.FluentSearchBuilder select() {
        return select(Atlan.getDefaultClient());
    }

    /**
     * Start a fluent search that will return all KafkaTopic assets.
     * Additional conditions can be chained onto the returned search before any
     * asset retrieval is attempted, ensuring all conditions are pushed-down for
     * optimal retrieval. Only active (non-archived) KafkaTopic assets will be included.
     *
     * @param client connectivity to the Atlan tenant from which to retrieve the assets
     * @return a fluent search that includes all KafkaTopic assets
     */
    public static FluentSearch.FluentSearchBuilder select(AtlanClient client) {
        return select(client, false);
    }

    /**
     * Start a fluent search that will return all KafkaTopic assets.
     * Additional conditions can be chained onto the returned search before any
     * asset retrieval is attempted, ensuring all conditions are pushed-down for
     * optimal retrieval.
     *
     * @param includeArchived when true, archived (soft-deleted) KafkaTopics will be included
     * @return a fluent search that includes all KafkaTopic assets
     */
    public static FluentSearch.FluentSearchBuilder select(boolean includeArchived) {
        return select(Atlan.getDefaultClient(), includeArchived);
    }

    /**
     * Start a fluent search that will return all KafkaTopic assets.
     * Additional conditions can be chained onto the returned search before any
     * asset retrieval is attempted, ensuring all conditions are pushed-down for
     * optimal retrieval.
     *
     * @param client connectivity to the Atlan tenant from which to retrieve the assets
     * @param includeArchived when true, archived (soft-deleted) KafkaTopics will be included
     * @return a fluent search that includes all KafkaTopic assets
     */
    public static FluentSearch.FluentSearchBuilder select(AtlanClient client, boolean includeArchived) {
        FluentSearch.FluentSearchBuilder builder = FluentSearch.builder(client).where(Asset.TYPE_NAME.eq(TYPE_NAME));
        if (!includeArchived) {
            builder.active();
        }
        return builder;
    }

    /**
     * Reference to a KafkaTopic by GUID. Use this to create a relationship to this KafkaTopic,
     * where the relationship should be replaced.
     *
     * @param guid the GUID of the KafkaTopic to reference
     * @return reference to a KafkaTopic that can be used for defining a relationship to a KafkaTopic
     */
    public static KafkaTopic refByGuid(String guid) {
        return refByGuid(guid, Reference.SaveSemantic.REPLACE);
    }

    /**
     * Reference to a KafkaTopic by GUID. Use this to create a relationship to this KafkaTopic,
     * where you want to further control how that relationship should be updated (i.e. replaced,
     * appended, or removed).
     *
     * @param guid the GUID of the KafkaTopic to reference
     * @param semantic how to save this relationship (replace all with this, append it, or remove it)
     * @return reference to a KafkaTopic that can be used for defining a relationship to a KafkaTopic
     */
    public static KafkaTopic refByGuid(String guid, Reference.SaveSemantic semantic) {
        return KafkaTopic._internal().guid(guid).semantic(semantic).build();
    }

    /**
     * Reference to a KafkaTopic by qualifiedName. Use this to create a relationship to this KafkaTopic,
     * where the relationship should be replaced.
     *
     * @param qualifiedName the qualifiedName of the KafkaTopic to reference
     * @return reference to a KafkaTopic that can be used for defining a relationship to a KafkaTopic
     */
    public static KafkaTopic refByQualifiedName(String qualifiedName) {
        return refByQualifiedName(qualifiedName, Reference.SaveSemantic.REPLACE);
    }

    /**
     * Reference to a KafkaTopic by qualifiedName. Use this to create a relationship to this KafkaTopic,
     * where you want to further control how that relationship should be updated (i.e. replaced,
     * appended, or removed).
     *
     * @param qualifiedName the qualifiedName of the KafkaTopic to reference
     * @param semantic how to save this relationship (replace all with this, append it, or remove it)
     * @return reference to a KafkaTopic that can be used for defining a relationship to a KafkaTopic
     */
    public static KafkaTopic refByQualifiedName(String qualifiedName, Reference.SaveSemantic semantic) {
        return KafkaTopic._internal().uniqueAttributes(UniqueAttributes.builder().qualifiedName(qualifiedName).build()).semantic(semantic).build();
    }

    /**
     * Retrieves a KafkaTopic by one of its identifiers, complete with all of its relationships.
     *
     * @param id of the KafkaTopic to retrieve, either its GUID or its full qualifiedName
     * @return the requested full KafkaTopic, complete with all of its relationships
     * @throws AtlanException on any error during the API invocation, such as the {@link NotFoundException} if the KafkaTopic does not exist or the provided GUID is not a KafkaTopic
     */
    @JsonIgnore
    public static KafkaTopic get(String id) throws AtlanException {
        return get(Atlan.getDefaultClient(), id);
    }

    /**
     * Retrieves a KafkaTopic by one of its identifiers, complete with all of its relationships.
     *
     * @param client connectivity to the Atlan tenant from which to retrieve the asset
     * @param id of the KafkaTopic to retrieve, either its GUID or its full qualifiedName
     * @return the requested full KafkaTopic, complete with all of its relationships
     * @throws AtlanException on any error during the API invocation, such as the {@link NotFoundException} if the KafkaTopic does not exist or the provided GUID is not a KafkaTopic
     */
    @JsonIgnore
    public static KafkaTopic get(AtlanClient client, String id) throws AtlanException {
        return get(client, id, true);
    }

    /**
     * Retrieves a KafkaTopic by one of its identifiers, optionally complete with all of its relationships.
     *
     * @param client connectivity to the Atlan tenant from which to retrieve the asset
     * @param id of the KafkaTopic to retrieve, either its GUID or its full qualifiedName
     * @param includeRelationships if true, all of the asset's relationships will also be retrieved; if false, no relationships will be retrieved
     * @return the requested full KafkaTopic, optionally complete with all of its relationships
     * @throws AtlanException on any error during the API invocation, such as the {@link NotFoundException} if the KafkaTopic does not exist or the provided GUID is not a KafkaTopic
     */
    @JsonIgnore
    public static KafkaTopic get(AtlanClient client, String id, boolean includeRelationships) throws AtlanException {
        if (id == null) {
            throw new NotFoundException(ErrorCode.ASSET_NOT_FOUND_BY_GUID, "(null)");
        } else if (StringUtils.isUUID(id)) {
            Asset asset = Asset.get(client, id, includeRelationships);
            if (asset == null) {
                throw new NotFoundException(ErrorCode.ASSET_NOT_FOUND_BY_GUID, id);
            } else if (asset instanceof KafkaTopic) {
                return (KafkaTopic) asset;
            } else {
                throw new NotFoundException(ErrorCode.ASSET_NOT_TYPE_REQUESTED, id, TYPE_NAME);
            }
        } else {
            Asset asset = Asset.get(client, TYPE_NAME, id, includeRelationships);
            if (asset instanceof KafkaTopic) {
                return (KafkaTopic) asset;
            } else {
                throw new NotFoundException(ErrorCode.ASSET_NOT_FOUND_BY_QN, id, TYPE_NAME);
            }
        }
    }

    /**
     * Restore the archived (soft-deleted) KafkaTopic to active.
     *
     * @param qualifiedName for the KafkaTopic
     * @return true if the KafkaTopic is now active, and false otherwise
     * @throws AtlanException on any API problems
     */
    public static boolean restore(String qualifiedName) throws AtlanException {
        return restore(Atlan.getDefaultClient(), qualifiedName);
    }

    /**
     * Restore the archived (soft-deleted) KafkaTopic to active.
     *
     * @param client connectivity to the Atlan tenant on which to restore the asset
     * @param qualifiedName for the KafkaTopic
     * @return true if the KafkaTopic is now active, and false otherwise
     * @throws AtlanException on any API problems
     */
    public static boolean restore(AtlanClient client, String qualifiedName) throws AtlanException {
        return Asset.restore(client, TYPE_NAME, qualifiedName);
    }

    /**
     * Builds the minimal object necessary to create a KafkaTopic.
     *
     * @param name of the KafkaTopic
     * @param connectionQualifiedName unique name of the connection through which the KafkaTopic is accessible
     * @return the minimal object necessary to create the KafkaTopic, as a builder
     */
    public static KafkaTopicBuilder creator(String name, String connectionQualifiedName) {
        return KafkaTopic._internal().guid("-" + ThreadLocalRandom.current().nextLong(0, Long.MAX_VALUE - 1)).qualifiedName(generateQualifiedName(name, connectionQualifiedName)).name(name).connectionQualifiedName(connectionQualifiedName).connectorType(Connection.getConnectorTypeFromQualifiedName(connectionQualifiedName));
    }

    /**
     * Generate a unique KafkaTopic name.
     *
     * @param name of the KafkaTopic
     * @param connectionQualifiedName unique name of the connection through which the KafkaTopic is accessible
     * @return a unique name for the KafkaTopic
     */
    public static String generateQualifiedName(String name, String connectionQualifiedName) {
        return connectionQualifiedName + "/topic/" + name;
    }

    /**
     * Builds the minimal object necessary to update a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the minimal request necessary to update the KafkaTopic, as a builder
     */
    public static KafkaTopicBuilder updater(String qualifiedName, String name) {
        return KafkaTopic._internal().guid("-" + ThreadLocalRandom.current().nextLong(0, Long.MAX_VALUE - 1)).qualifiedName(qualifiedName).name(name);
    }

    /**
     * Builds the minimal object necessary to apply an update to a KafkaTopic, from a potentially
     * more-complete KafkaTopic object.
     *
     * @return the minimal object necessary to update the KafkaTopic, as a builder
     * @throws InvalidRequestException if any of the minimal set of required properties for KafkaTopic are not found in the initial object
     */
    @Override
    public KafkaTopicBuilder trimToRequired() throws InvalidRequestException {
        Map map = new HashMap<>();
        map.put("qualifiedName", this.getQualifiedName());
        map.put("name", this.getName());
        validateRequired(TYPE_NAME, map);
        return updater(this.getQualifiedName(), this.getName());
    }

    /**
     * Remove the system description from a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeDescription(String qualifiedName, String name) throws AtlanException {
        return removeDescription(Atlan.getDefaultClient(), qualifiedName, name);
    }

    /**
     * Remove the system description from a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant on which to remove the asset's description
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeDescription(AtlanClient client, String qualifiedName, String name) throws AtlanException {
        return (KafkaTopic) Asset.removeDescription(client, updater(qualifiedName, name));
    }

    /**
     * Remove the user's description from a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeUserDescription(String qualifiedName, String name) throws AtlanException {
        return removeUserDescription(Atlan.getDefaultClient(), qualifiedName, name);
    }

    /**
     * Remove the user's description from a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant on which to remove the asset's description
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeUserDescription(AtlanClient client, String qualifiedName, String name) throws AtlanException {
        return (KafkaTopic) Asset.removeUserDescription(client, updater(qualifiedName, name));
    }

    /**
     * Remove the owners from a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeOwners(String qualifiedName, String name) throws AtlanException {
        return removeOwners(Atlan.getDefaultClient(), qualifiedName, name);
    }

    /**
     * Remove the owners from a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant from which to remove the KafkaTopic's owners
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeOwners(AtlanClient client, String qualifiedName, String name) throws AtlanException {
        return (KafkaTopic) Asset.removeOwners(client, updater(qualifiedName, name));
    }

    /**
     * Update the certificate on a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param certificate to use
     * @param message (optional) message, or null if no message
     * @return the updated KafkaTopic, or null if the update failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic updateCertificate(String qualifiedName, CertificateStatus certificate, String message) throws AtlanException {
        return updateCertificate(Atlan.getDefaultClient(), qualifiedName, certificate, message);
    }

    /**
     * Update the certificate on a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant on which to update the KafkaTopic's certificate
     * @param qualifiedName of the KafkaTopic
     * @param certificate to use
     * @param message (optional) message, or null if no message
     * @return the updated KafkaTopic, or null if the update failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic updateCertificate(AtlanClient client, String qualifiedName, CertificateStatus certificate, String message) throws AtlanException {
        return (KafkaTopic) Asset.updateCertificate(client, _internal(), TYPE_NAME, qualifiedName, certificate, message);
    }

    /**
     * Remove the certificate from a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeCertificate(String qualifiedName, String name) throws AtlanException {
        return removeCertificate(Atlan.getDefaultClient(), qualifiedName, name);
    }

    /**
     * Remove the certificate from a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant from which to remove the KafkaTopic's certificate
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeCertificate(AtlanClient client, String qualifiedName, String name) throws AtlanException {
        return (KafkaTopic) Asset.removeCertificate(client, updater(qualifiedName, name));
    }

    /**
     * Update the announcement on a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param type type of announcement to set
     * @param title (optional) title of the announcement to set (or null for no title)
     * @param message (optional) message of the announcement to set (or null for no message)
     * @return the result of the update, or null if the update failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic updateAnnouncement(String qualifiedName, AtlanAnnouncementType type, String title, String message) throws AtlanException {
        return updateAnnouncement(Atlan.getDefaultClient(), qualifiedName, type, title, message);
    }

    /**
     * Update the announcement on a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant on which to update the KafkaTopic's announcement
     * @param qualifiedName of the KafkaTopic
     * @param type type of announcement to set
     * @param title (optional) title of the announcement to set (or null for no title)
     * @param message (optional) message of the announcement to set (or null for no message)
     * @return the result of the update, or null if the update failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic updateAnnouncement(AtlanClient client, String qualifiedName, AtlanAnnouncementType type, String title, String message) throws AtlanException {
        return (KafkaTopic) Asset.updateAnnouncement(client, _internal(), TYPE_NAME, qualifiedName, type, title, message);
    }

    /**
     * Remove the announcement from a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeAnnouncement(String qualifiedName, String name) throws AtlanException {
        return removeAnnouncement(Atlan.getDefaultClient(), qualifiedName, name);
    }

    /**
     * Remove the announcement from a KafkaTopic.
     *
     * @param client connectivity to the Atlan client from which to remove the KafkaTopic's announcement
     * @param qualifiedName of the KafkaTopic
     * @param name of the KafkaTopic
     * @return the updated KafkaTopic, or null if the removal failed
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeAnnouncement(AtlanClient client, String qualifiedName, String name) throws AtlanException {
        return (KafkaTopic) Asset.removeAnnouncement(client, updater(qualifiedName, name));
    }

    /**
     * Replace the terms linked to the KafkaTopic.
     *
     * @param qualifiedName for the KafkaTopic
     * @param name human-readable name of the KafkaTopic
     * @param terms the list of terms to replace on the KafkaTopic, or null to remove all terms from the KafkaTopic
     * @return the KafkaTopic that was updated (note that it will NOT contain details of the replaced terms)
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic replaceTerms(String qualifiedName, String name, List terms) throws AtlanException {
        return replaceTerms(Atlan.getDefaultClient(), qualifiedName, name, terms);
    }

    /**
     * Replace the terms linked to the KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant on which to replace the KafkaTopic's assigned terms
     * @param qualifiedName for the KafkaTopic
     * @param name human-readable name of the KafkaTopic
     * @param terms the list of terms to replace on the KafkaTopic, or null to remove all terms from the KafkaTopic
     * @return the KafkaTopic that was updated (note that it will NOT contain details of the replaced terms)
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic replaceTerms(AtlanClient client, String qualifiedName, String name, List terms) throws AtlanException {
        return (KafkaTopic) Asset.replaceTerms(client, updater(qualifiedName, name), terms);
    }

    /**
     * Link additional terms to the KafkaTopic, without replacing existing terms linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing terms,
     * and a second to append the new terms.
     *
     * @param qualifiedName for the KafkaTopic
     * @param terms the list of terms to append to the KafkaTopic
     * @return the KafkaTopic that was updated  (note that it will NOT contain details of the appended terms)
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic appendTerms(String qualifiedName, List terms) throws AtlanException {
        return appendTerms(Atlan.getDefaultClient(), qualifiedName, terms);
    }

    /**
     * Link additional terms to the KafkaTopic, without replacing existing terms linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing terms,
     * and a second to append the new terms.
     *
     * @param client connectivity to the Atlan tenant on which to append terms to the KafkaTopic
     * @param qualifiedName for the KafkaTopic
     * @param terms the list of terms to append to the KafkaTopic
     * @return the KafkaTopic that was updated  (note that it will NOT contain details of the appended terms)
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic appendTerms(AtlanClient client, String qualifiedName, List terms) throws AtlanException {
        return (KafkaTopic) Asset.appendTerms(client, TYPE_NAME, qualifiedName, terms);
    }

    /**
     * Remove terms from a KafkaTopic, without replacing all existing terms linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing terms,
     * and a second to remove the provided terms.
     *
     * @param qualifiedName for the KafkaTopic
     * @param terms the list of terms to remove from the KafkaTopic, which must be referenced by GUID
     * @return the KafkaTopic that was updated (note that it will NOT contain details of the resulting terms)
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeTerms(String qualifiedName, List terms) throws AtlanException {
        return removeTerms(Atlan.getDefaultClient(), qualifiedName, terms);
    }

    /**
     * Remove terms from a KafkaTopic, without replacing all existing terms linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing terms,
     * and a second to remove the provided terms.
     *
     * @param client connectivity to the Atlan tenant from which to remove terms from the KafkaTopic
     * @param qualifiedName for the KafkaTopic
     * @param terms the list of terms to remove from the KafkaTopic, which must be referenced by GUID
     * @return the KafkaTopic that was updated (note that it will NOT contain details of the resulting terms)
     * @throws AtlanException on any API problems
     */
    public static KafkaTopic removeTerms(AtlanClient client, String qualifiedName, List terms) throws AtlanException {
        return (KafkaTopic) Asset.removeTerms(client, TYPE_NAME, qualifiedName, terms);
    }

    /**
     * Add Atlan tags to a KafkaTopic, without replacing existing Atlan tags linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing Atlan tags,
     * and a second to append the new Atlan tags.
     *
     * @param qualifiedName of the KafkaTopic
     * @param atlanTagNames human-readable names of the Atlan tags to add
     * @throws AtlanException on any API problems
     * @return the updated KafkaTopic
     */
    public static KafkaTopic appendAtlanTags(String qualifiedName, List atlanTagNames) throws AtlanException {
        return appendAtlanTags(Atlan.getDefaultClient(), qualifiedName, atlanTagNames);
    }

    /**
     * Add Atlan tags to a KafkaTopic, without replacing existing Atlan tags linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing Atlan tags,
     * and a second to append the new Atlan tags.
     *
     * @param client connectivity to the Atlan tenant on which to append Atlan tags to the KafkaTopic
     * @param qualifiedName of the KafkaTopic
     * @param atlanTagNames human-readable names of the Atlan tags to add
     * @throws AtlanException on any API problems
     * @return the updated KafkaTopic
     */
    public static KafkaTopic appendAtlanTags(AtlanClient client, String qualifiedName, List atlanTagNames) throws AtlanException {
        return (KafkaTopic) Asset.appendAtlanTags(client, TYPE_NAME, qualifiedName, atlanTagNames);
    }

    /**
     * Add Atlan tags to a KafkaTopic, without replacing existing Atlan tags linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing Atlan tags,
     * and a second to append the new Atlan tags.
     *
     * @param qualifiedName of the KafkaTopic
     * @param atlanTagNames human-readable names of the Atlan tags to add
     * @param propagate whether to propagate the Atlan tag (true) or not (false)
     * @param removePropagationsOnDelete whether to remove the propagated Atlan tags when the Atlan tag is removed from this asset (true) or not (false)
     * @param restrictLineagePropagation whether to avoid propagating through lineage (true) or do propagate through lineage (false)
     * @throws AtlanException on any API problems
     * @return the updated KafkaTopic
     */
    public static KafkaTopic appendAtlanTags(String qualifiedName, List atlanTagNames, boolean propagate, boolean removePropagationsOnDelete, boolean restrictLineagePropagation) throws AtlanException {
        return appendAtlanTags(Atlan.getDefaultClient(), qualifiedName, atlanTagNames, propagate, removePropagationsOnDelete, restrictLineagePropagation);
    }

    /**
     * Add Atlan tags to a KafkaTopic, without replacing existing Atlan tags linked to the KafkaTopic.
     * Note: this operation must make two API calls — one to retrieve the KafkaTopic's existing Atlan tags,
     * and a second to append the new Atlan tags.
     *
     * @param client connectivity to the Atlan tenant on which to append Atlan tags to the KafkaTopic
     * @param qualifiedName of the KafkaTopic
     * @param atlanTagNames human-readable names of the Atlan tags to add
     * @param propagate whether to propagate the Atlan tag (true) or not (false)
     * @param removePropagationsOnDelete whether to remove the propagated Atlan tags when the Atlan tag is removed from this asset (true) or not (false)
     * @param restrictLineagePropagation whether to avoid propagating through lineage (true) or do propagate through lineage (false)
     * @throws AtlanException on any API problems
     * @return the updated KafkaTopic
     */
    public static KafkaTopic appendAtlanTags(AtlanClient client, String qualifiedName, List atlanTagNames, boolean propagate, boolean removePropagationsOnDelete, boolean restrictLineagePropagation) throws AtlanException {
        return (KafkaTopic) Asset.appendAtlanTags(client, TYPE_NAME, qualifiedName, atlanTagNames, propagate, removePropagationsOnDelete, restrictLineagePropagation);
    }

    /**
     * Remove an Atlan tag from a KafkaTopic.
     *
     * @param qualifiedName of the KafkaTopic
     * @param atlanTagName human-readable name of the Atlan tag to remove
     * @throws AtlanException on any API problems, or if the Atlan tag does not exist on the KafkaTopic
     */
    public static void removeAtlanTag(String qualifiedName, String atlanTagName) throws AtlanException {
        removeAtlanTag(Atlan.getDefaultClient(), qualifiedName, atlanTagName);
    }

    /**
     * Remove an Atlan tag from a KafkaTopic.
     *
     * @param client connectivity to the Atlan tenant from which to remove an Atlan tag from a KafkaTopic
     * @param qualifiedName of the KafkaTopic
     * @param atlanTagName human-readable name of the Atlan tag to remove
     * @throws AtlanException on any API problems, or if the Atlan tag does not exist on the KafkaTopic
     */
    public static void removeAtlanTag(AtlanClient client, String qualifiedName, String atlanTagName) throws AtlanException {
        Asset.removeAtlanTag(client, TYPE_NAME, qualifiedName, atlanTagName);
    }

    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    private static String $default$typeName() {
        return TYPE_NAME;
    }


    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public static abstract class KafkaTopicBuilder> extends Asset.AssetBuilder {
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private boolean typeName$set;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private String typeName$value;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList inputToAirflowTasks;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList inputToProcesses;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList inputToSparkJobs;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList kafkaConsumerGroups;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private KafkaTopicCleanupPolicy kafkaTopicCleanupPolicy;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private KafkaTopicCompressionType kafkaTopicCompressionType;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Boolean kafkaTopicIsInternal;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Long kafkaTopicPartitionsCount;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Long kafkaTopicRecordCount;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Long kafkaTopicReplicationFactor;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Long kafkaTopicRetentionTimeInMs;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Long kafkaTopicSegmentBytes;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private Long kafkaTopicSizeInBytes;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList modelImplementedEntities;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList outputFromAirflowTasks;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList outputFromProcesses;
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private java.util.ArrayList outputFromSparkJobs;

        @java.lang.Override
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        protected B $fillValuesFrom(final C instance) {
            super.$fillValuesFrom(instance);
            KafkaTopic.KafkaTopicBuilder.$fillValuesFromInstanceIntoBuilder(instance, this);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private static void $fillValuesFromInstanceIntoBuilder(final KafkaTopic instance, final KafkaTopic.KafkaTopicBuilder b) {
            b.typeName(instance.typeName);
            b.inputToAirflowTasks(instance.inputToAirflowTasks == null ? java.util.Collections.emptySortedSet() : instance.inputToAirflowTasks);
            b.inputToProcesses(instance.inputToProcesses == null ? java.util.Collections.emptySortedSet() : instance.inputToProcesses);
            b.inputToSparkJobs(instance.inputToSparkJobs == null ? java.util.Collections.emptySortedSet() : instance.inputToSparkJobs);
            b.kafkaConsumerGroups(instance.kafkaConsumerGroups == null ? java.util.Collections.emptySortedSet() : instance.kafkaConsumerGroups);
            b.kafkaTopicCleanupPolicy(instance.kafkaTopicCleanupPolicy);
            b.kafkaTopicCompressionType(instance.kafkaTopicCompressionType);
            b.kafkaTopicIsInternal(instance.kafkaTopicIsInternal);
            b.kafkaTopicPartitionsCount(instance.kafkaTopicPartitionsCount);
            b.kafkaTopicRecordCount(instance.kafkaTopicRecordCount);
            b.kafkaTopicReplicationFactor(instance.kafkaTopicReplicationFactor);
            b.kafkaTopicRetentionTimeInMs(instance.kafkaTopicRetentionTimeInMs);
            b.kafkaTopicSegmentBytes(instance.kafkaTopicSegmentBytes);
            b.kafkaTopicSizeInBytes(instance.kafkaTopicSizeInBytes);
            b.modelImplementedEntities(instance.modelImplementedEntities == null ? java.util.Collections.emptySortedSet() : instance.modelImplementedEntities);
            b.outputFromAirflowTasks(instance.outputFromAirflowTasks == null ? java.util.Collections.emptySortedSet() : instance.outputFromAirflowTasks);
            b.outputFromProcesses(instance.outputFromProcesses == null ? java.util.Collections.emptySortedSet() : instance.outputFromProcesses);
            b.outputFromSparkJobs(instance.outputFromSparkJobs == null ? java.util.Collections.emptySortedSet() : instance.outputFromSparkJobs);
        }

        /**
         * Fixed typeName for KafkaTopics.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B typeName(final String typeName) {
            this.typeName$value = typeName;
            typeName$set = true;
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B inputToAirflowTask(final IAirflowTask inputToAirflowTask) {
            if (this.inputToAirflowTasks == null) this.inputToAirflowTasks = new java.util.ArrayList();
            this.inputToAirflowTasks.add(inputToAirflowTask);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B inputToAirflowTasks(final java.util.Collection inputToAirflowTasks) {
            if (inputToAirflowTasks == null) {
                throw new java.lang.NullPointerException("inputToAirflowTasks cannot be null");
            }
            if (this.inputToAirflowTasks == null) this.inputToAirflowTasks = new java.util.ArrayList();
            this.inputToAirflowTasks.addAll(inputToAirflowTasks);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearInputToAirflowTasks() {
            if (this.inputToAirflowTasks != null) this.inputToAirflowTasks.clear();
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B inputToProcess(final ILineageProcess inputToProcess) {
            if (this.inputToProcesses == null) this.inputToProcesses = new java.util.ArrayList();
            this.inputToProcesses.add(inputToProcess);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B inputToProcesses(final java.util.Collection inputToProcesses) {
            if (inputToProcesses == null) {
                throw new java.lang.NullPointerException("inputToProcesses cannot be null");
            }
            if (this.inputToProcesses == null) this.inputToProcesses = new java.util.ArrayList();
            this.inputToProcesses.addAll(inputToProcesses);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearInputToProcesses() {
            if (this.inputToProcesses != null) this.inputToProcesses.clear();
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B inputToSparkJob(final ISparkJob inputToSparkJob) {
            if (this.inputToSparkJobs == null) this.inputToSparkJobs = new java.util.ArrayList();
            this.inputToSparkJobs.add(inputToSparkJob);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B inputToSparkJobs(final java.util.Collection inputToSparkJobs) {
            if (inputToSparkJobs == null) {
                throw new java.lang.NullPointerException("inputToSparkJobs cannot be null");
            }
            if (this.inputToSparkJobs == null) this.inputToSparkJobs = new java.util.ArrayList();
            this.inputToSparkJobs.addAll(inputToSparkJobs);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearInputToSparkJobs() {
            if (this.inputToSparkJobs != null) this.inputToSparkJobs.clear();
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaConsumerGroup(final IKafkaConsumerGroup kafkaConsumerGroup) {
            if (this.kafkaConsumerGroups == null) this.kafkaConsumerGroups = new java.util.ArrayList();
            this.kafkaConsumerGroups.add(kafkaConsumerGroup);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaConsumerGroups(final java.util.Collection kafkaConsumerGroups) {
            if (kafkaConsumerGroups == null) {
                throw new java.lang.NullPointerException("kafkaConsumerGroups cannot be null");
            }
            if (this.kafkaConsumerGroups == null) this.kafkaConsumerGroups = new java.util.ArrayList();
            this.kafkaConsumerGroups.addAll(kafkaConsumerGroups);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearKafkaConsumerGroups() {
            if (this.kafkaConsumerGroups != null) this.kafkaConsumerGroups.clear();
            return self();
        }

        /**
         * Cleanup policy for this topic.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicCleanupPolicy(final KafkaTopicCleanupPolicy kafkaTopicCleanupPolicy) {
            this.kafkaTopicCleanupPolicy = kafkaTopicCleanupPolicy;
            return self();
        }

        /**
         * Type of compression used for this topic.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicCompressionType(final KafkaTopicCompressionType kafkaTopicCompressionType) {
            this.kafkaTopicCompressionType = kafkaTopicCompressionType;
            return self();
        }

        /**
         * Whether this topic is an internal topic (true) or not (false).
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicIsInternal(final Boolean kafkaTopicIsInternal) {
            this.kafkaTopicIsInternal = kafkaTopicIsInternal;
            return self();
        }

        /**
         * Number of partitions for this topic.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicPartitionsCount(final Long kafkaTopicPartitionsCount) {
            this.kafkaTopicPartitionsCount = kafkaTopicPartitionsCount;
            return self();
        }

        /**
         * Number of (unexpired) messages in this topic.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicRecordCount(final Long kafkaTopicRecordCount) {
            this.kafkaTopicRecordCount = kafkaTopicRecordCount;
            return self();
        }

        /**
         * Replication factor for this topic.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicReplicationFactor(final Long kafkaTopicReplicationFactor) {
            this.kafkaTopicReplicationFactor = kafkaTopicReplicationFactor;
            return self();
        }

        /**
         * Amount of time messages will be retained in this topic, in milliseconds.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicRetentionTimeInMs(final Long kafkaTopicRetentionTimeInMs) {
            this.kafkaTopicRetentionTimeInMs = kafkaTopicRetentionTimeInMs;
            return self();
        }

        /**
         * Segment size for this topic.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicSegmentBytes(final Long kafkaTopicSegmentBytes) {
            this.kafkaTopicSegmentBytes = kafkaTopicSegmentBytes;
            return self();
        }

        /**
         * Size of this topic, in bytes.
         * @return {@code this}.
         */
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B kafkaTopicSizeInBytes(final Long kafkaTopicSizeInBytes) {
            this.kafkaTopicSizeInBytes = kafkaTopicSizeInBytes;
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B modelImplementedEntity(final IModelEntity modelImplementedEntity) {
            if (this.modelImplementedEntities == null) this.modelImplementedEntities = new java.util.ArrayList();
            this.modelImplementedEntities.add(modelImplementedEntity);
            return self();
        }

        @JsonProperty("modelEntityImplemented")
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B modelImplementedEntities(final java.util.Collection modelImplementedEntities) {
            if (modelImplementedEntities == null) {
                throw new java.lang.NullPointerException("modelImplementedEntities cannot be null");
            }
            if (this.modelImplementedEntities == null) this.modelImplementedEntities = new java.util.ArrayList();
            this.modelImplementedEntities.addAll(modelImplementedEntities);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearModelImplementedEntities() {
            if (this.modelImplementedEntities != null) this.modelImplementedEntities.clear();
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B outputFromAirflowTask(final IAirflowTask outputFromAirflowTask) {
            if (this.outputFromAirflowTasks == null) this.outputFromAirflowTasks = new java.util.ArrayList();
            this.outputFromAirflowTasks.add(outputFromAirflowTask);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B outputFromAirflowTasks(final java.util.Collection outputFromAirflowTasks) {
            if (outputFromAirflowTasks == null) {
                throw new java.lang.NullPointerException("outputFromAirflowTasks cannot be null");
            }
            if (this.outputFromAirflowTasks == null) this.outputFromAirflowTasks = new java.util.ArrayList();
            this.outputFromAirflowTasks.addAll(outputFromAirflowTasks);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearOutputFromAirflowTasks() {
            if (this.outputFromAirflowTasks != null) this.outputFromAirflowTasks.clear();
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B outputFromProcess(final ILineageProcess outputFromProcess) {
            if (this.outputFromProcesses == null) this.outputFromProcesses = new java.util.ArrayList();
            this.outputFromProcesses.add(outputFromProcess);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B outputFromProcesses(final java.util.Collection outputFromProcesses) {
            if (outputFromProcesses == null) {
                throw new java.lang.NullPointerException("outputFromProcesses cannot be null");
            }
            if (this.outputFromProcesses == null) this.outputFromProcesses = new java.util.ArrayList();
            this.outputFromProcesses.addAll(outputFromProcesses);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearOutputFromProcesses() {
            if (this.outputFromProcesses != null) this.outputFromProcesses.clear();
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B outputFromSparkJob(final ISparkJob outputFromSparkJob) {
            if (this.outputFromSparkJobs == null) this.outputFromSparkJobs = new java.util.ArrayList();
            this.outputFromSparkJobs.add(outputFromSparkJob);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B outputFromSparkJobs(final java.util.Collection outputFromSparkJobs) {
            if (outputFromSparkJobs == null) {
                throw new java.lang.NullPointerException("outputFromSparkJobs cannot be null");
            }
            if (this.outputFromSparkJobs == null) this.outputFromSparkJobs = new java.util.ArrayList();
            this.outputFromSparkJobs.addAll(outputFromSparkJobs);
            return self();
        }

        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public B clearOutputFromSparkJobs() {
            if (this.outputFromSparkJobs != null) this.outputFromSparkJobs.clear();
            return self();
        }

        @java.lang.Override
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        protected abstract B self();

        @java.lang.Override
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public abstract C build();

        @java.lang.Override
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public java.lang.String toString() {
            return "KafkaTopic.KafkaTopicBuilder(super=" + super.toString() + ", typeName$value=" + this.typeName$value + ", inputToAirflowTasks=" + this.inputToAirflowTasks + ", inputToProcesses=" + this.inputToProcesses + ", inputToSparkJobs=" + this.inputToSparkJobs + ", kafkaConsumerGroups=" + this.kafkaConsumerGroups + ", kafkaTopicCleanupPolicy=" + this.kafkaTopicCleanupPolicy + ", kafkaTopicCompressionType=" + this.kafkaTopicCompressionType + ", kafkaTopicIsInternal=" + this.kafkaTopicIsInternal + ", kafkaTopicPartitionsCount=" + this.kafkaTopicPartitionsCount + ", kafkaTopicRecordCount=" + this.kafkaTopicRecordCount + ", kafkaTopicReplicationFactor=" + this.kafkaTopicReplicationFactor + ", kafkaTopicRetentionTimeInMs=" + this.kafkaTopicRetentionTimeInMs + ", kafkaTopicSegmentBytes=" + this.kafkaTopicSegmentBytes + ", kafkaTopicSizeInBytes=" + this.kafkaTopicSizeInBytes + ", modelImplementedEntities=" + this.modelImplementedEntities + ", outputFromAirflowTasks=" + this.outputFromAirflowTasks + ", outputFromProcesses=" + this.outputFromProcesses + ", outputFromSparkJobs=" + this.outputFromSparkJobs + ")";
        }
    }


    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    private static final class KafkaTopicBuilderImpl extends KafkaTopic.KafkaTopicBuilder {
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        private KafkaTopicBuilderImpl() {
        }

        @java.lang.Override
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        protected KafkaTopic.KafkaTopicBuilderImpl self() {
            return this;
        }

        @java.lang.Override
        @java.lang.SuppressWarnings("all")
        @lombok.Generated
        public KafkaTopic build() {
            return new KafkaTopic(this);
        }
    }

    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    protected KafkaTopic(final KafkaTopic.KafkaTopicBuilder b) {
        super(b);
        if (b.typeName$set) this.typeName = b.typeName$value;
         else this.typeName = KafkaTopic.$default$typeName();
        java.util.SortedSet inputToAirflowTasks = new java.util.TreeSet();
        if (b.inputToAirflowTasks != null) inputToAirflowTasks.addAll(b.inputToAirflowTasks);
        inputToAirflowTasks = java.util.Collections.unmodifiableSortedSet(inputToAirflowTasks);
        this.inputToAirflowTasks = inputToAirflowTasks;
        java.util.SortedSet inputToProcesses = new java.util.TreeSet();
        if (b.inputToProcesses != null) inputToProcesses.addAll(b.inputToProcesses);
        inputToProcesses = java.util.Collections.unmodifiableSortedSet(inputToProcesses);
        this.inputToProcesses = inputToProcesses;
        java.util.SortedSet inputToSparkJobs = new java.util.TreeSet();
        if (b.inputToSparkJobs != null) inputToSparkJobs.addAll(b.inputToSparkJobs);
        inputToSparkJobs = java.util.Collections.unmodifiableSortedSet(inputToSparkJobs);
        this.inputToSparkJobs = inputToSparkJobs;
        java.util.SortedSet kafkaConsumerGroups = new java.util.TreeSet();
        if (b.kafkaConsumerGroups != null) kafkaConsumerGroups.addAll(b.kafkaConsumerGroups);
        kafkaConsumerGroups = java.util.Collections.unmodifiableSortedSet(kafkaConsumerGroups);
        this.kafkaConsumerGroups = kafkaConsumerGroups;
        this.kafkaTopicCleanupPolicy = b.kafkaTopicCleanupPolicy;
        this.kafkaTopicCompressionType = b.kafkaTopicCompressionType;
        this.kafkaTopicIsInternal = b.kafkaTopicIsInternal;
        this.kafkaTopicPartitionsCount = b.kafkaTopicPartitionsCount;
        this.kafkaTopicRecordCount = b.kafkaTopicRecordCount;
        this.kafkaTopicReplicationFactor = b.kafkaTopicReplicationFactor;
        this.kafkaTopicRetentionTimeInMs = b.kafkaTopicRetentionTimeInMs;
        this.kafkaTopicSegmentBytes = b.kafkaTopicSegmentBytes;
        this.kafkaTopicSizeInBytes = b.kafkaTopicSizeInBytes;
        java.util.SortedSet modelImplementedEntities = new java.util.TreeSet();
        if (b.modelImplementedEntities != null) modelImplementedEntities.addAll(b.modelImplementedEntities);
        modelImplementedEntities = java.util.Collections.unmodifiableSortedSet(modelImplementedEntities);
        this.modelImplementedEntities = modelImplementedEntities;
        java.util.SortedSet outputFromAirflowTasks = new java.util.TreeSet();
        if (b.outputFromAirflowTasks != null) outputFromAirflowTasks.addAll(b.outputFromAirflowTasks);
        outputFromAirflowTasks = java.util.Collections.unmodifiableSortedSet(outputFromAirflowTasks);
        this.outputFromAirflowTasks = outputFromAirflowTasks;
        java.util.SortedSet outputFromProcesses = new java.util.TreeSet();
        if (b.outputFromProcesses != null) outputFromProcesses.addAll(b.outputFromProcesses);
        outputFromProcesses = java.util.Collections.unmodifiableSortedSet(outputFromProcesses);
        this.outputFromProcesses = outputFromProcesses;
        java.util.SortedSet outputFromSparkJobs = new java.util.TreeSet();
        if (b.outputFromSparkJobs != null) outputFromSparkJobs.addAll(b.outputFromSparkJobs);
        outputFromSparkJobs = java.util.Collections.unmodifiableSortedSet(outputFromSparkJobs);
        this.outputFromSparkJobs = outputFromSparkJobs;
    }

    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public static KafkaTopic.KafkaTopicBuilder _internal() {
        return new KafkaTopic.KafkaTopicBuilderImpl();
    }

    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public KafkaTopic.KafkaTopicBuilder toBuilder() {
        return new KafkaTopic.KafkaTopicBuilderImpl().$fillValuesFrom(this);
    }

    /**
     * Tasks to which this asset provides input.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getInputToAirflowTasks() {
        return this.inputToAirflowTasks;
    }

    /**
     * Processes to which this asset provides input.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getInputToProcesses() {
        return this.inputToProcesses;
    }

    /**
     * TBC
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getInputToSparkJobs() {
        return this.inputToSparkJobs;
    }

    /**
     * Consumer groups subscribed to this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getKafkaConsumerGroups() {
        return this.kafkaConsumerGroups;
    }

    /**
     * Cleanup policy for this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public KafkaTopicCleanupPolicy getKafkaTopicCleanupPolicy() {
        return this.kafkaTopicCleanupPolicy;
    }

    /**
     * Type of compression used for this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public KafkaTopicCompressionType getKafkaTopicCompressionType() {
        return this.kafkaTopicCompressionType;
    }

    /**
     * Whether this topic is an internal topic (true) or not (false).
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Boolean getKafkaTopicIsInternal() {
        return this.kafkaTopicIsInternal;
    }

    /**
     * Number of partitions for this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Long getKafkaTopicPartitionsCount() {
        return this.kafkaTopicPartitionsCount;
    }

    /**
     * Number of (unexpired) messages in this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Long getKafkaTopicRecordCount() {
        return this.kafkaTopicRecordCount;
    }

    /**
     * Replication factor for this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Long getKafkaTopicReplicationFactor() {
        return this.kafkaTopicReplicationFactor;
    }

    /**
     * Amount of time messages will be retained in this topic, in milliseconds.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Long getKafkaTopicRetentionTimeInMs() {
        return this.kafkaTopicRetentionTimeInMs;
    }

    /**
     * Segment size for this topic.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Long getKafkaTopicSegmentBytes() {
        return this.kafkaTopicSegmentBytes;
    }

    /**
     * Size of this topic, in bytes.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public Long getKafkaTopicSizeInBytes() {
        return this.kafkaTopicSizeInBytes;
    }

    /**
     * Entities implemented by this asset.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getModelImplementedEntities() {
        return this.modelImplementedEntities;
    }

    /**
     * Tasks from which this asset is output.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getOutputFromAirflowTasks() {
        return this.outputFromAirflowTasks;
    }

    /**
     * Processes from which this asset is produced as output.
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getOutputFromProcesses() {
        return this.outputFromProcesses;
    }

    /**
     * TBC
     */
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public SortedSet getOutputFromSparkJobs() {
        return this.outputFromSparkJobs;
    }

    @java.lang.Override
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public boolean equals(final java.lang.Object o) {
        if (o == this) return true;
        if (!(o instanceof KafkaTopic)) return false;
        final KafkaTopic other = (KafkaTopic) o;
        if (!other.canEqual((java.lang.Object) this)) return false;
        if (!super.equals(o)) return false;
        final java.lang.Object this$kafkaTopicIsInternal = this.getKafkaTopicIsInternal();
        final java.lang.Object other$kafkaTopicIsInternal = other.getKafkaTopicIsInternal();
        if (this$kafkaTopicIsInternal == null ? other$kafkaTopicIsInternal != null : !this$kafkaTopicIsInternal.equals(other$kafkaTopicIsInternal)) return false;
        final java.lang.Object this$kafkaTopicPartitionsCount = this.getKafkaTopicPartitionsCount();
        final java.lang.Object other$kafkaTopicPartitionsCount = other.getKafkaTopicPartitionsCount();
        if (this$kafkaTopicPartitionsCount == null ? other$kafkaTopicPartitionsCount != null : !this$kafkaTopicPartitionsCount.equals(other$kafkaTopicPartitionsCount)) return false;
        final java.lang.Object this$kafkaTopicRecordCount = this.getKafkaTopicRecordCount();
        final java.lang.Object other$kafkaTopicRecordCount = other.getKafkaTopicRecordCount();
        if (this$kafkaTopicRecordCount == null ? other$kafkaTopicRecordCount != null : !this$kafkaTopicRecordCount.equals(other$kafkaTopicRecordCount)) return false;
        final java.lang.Object this$kafkaTopicReplicationFactor = this.getKafkaTopicReplicationFactor();
        final java.lang.Object other$kafkaTopicReplicationFactor = other.getKafkaTopicReplicationFactor();
        if (this$kafkaTopicReplicationFactor == null ? other$kafkaTopicReplicationFactor != null : !this$kafkaTopicReplicationFactor.equals(other$kafkaTopicReplicationFactor)) return false;
        final java.lang.Object this$kafkaTopicRetentionTimeInMs = this.getKafkaTopicRetentionTimeInMs();
        final java.lang.Object other$kafkaTopicRetentionTimeInMs = other.getKafkaTopicRetentionTimeInMs();
        if (this$kafkaTopicRetentionTimeInMs == null ? other$kafkaTopicRetentionTimeInMs != null : !this$kafkaTopicRetentionTimeInMs.equals(other$kafkaTopicRetentionTimeInMs)) return false;
        final java.lang.Object this$kafkaTopicSegmentBytes = this.getKafkaTopicSegmentBytes();
        final java.lang.Object other$kafkaTopicSegmentBytes = other.getKafkaTopicSegmentBytes();
        if (this$kafkaTopicSegmentBytes == null ? other$kafkaTopicSegmentBytes != null : !this$kafkaTopicSegmentBytes.equals(other$kafkaTopicSegmentBytes)) return false;
        final java.lang.Object this$kafkaTopicSizeInBytes = this.getKafkaTopicSizeInBytes();
        final java.lang.Object other$kafkaTopicSizeInBytes = other.getKafkaTopicSizeInBytes();
        if (this$kafkaTopicSizeInBytes == null ? other$kafkaTopicSizeInBytes != null : !this$kafkaTopicSizeInBytes.equals(other$kafkaTopicSizeInBytes)) return false;
        final java.lang.Object this$typeName = this.getTypeName();
        final java.lang.Object other$typeName = other.getTypeName();
        if (this$typeName == null ? other$typeName != null : !this$typeName.equals(other$typeName)) return false;
        final java.lang.Object this$inputToAirflowTasks = this.getInputToAirflowTasks();
        final java.lang.Object other$inputToAirflowTasks = other.getInputToAirflowTasks();
        if (this$inputToAirflowTasks == null ? other$inputToAirflowTasks != null : !this$inputToAirflowTasks.equals(other$inputToAirflowTasks)) return false;
        final java.lang.Object this$inputToProcesses = this.getInputToProcesses();
        final java.lang.Object other$inputToProcesses = other.getInputToProcesses();
        if (this$inputToProcesses == null ? other$inputToProcesses != null : !this$inputToProcesses.equals(other$inputToProcesses)) return false;
        final java.lang.Object this$inputToSparkJobs = this.getInputToSparkJobs();
        final java.lang.Object other$inputToSparkJobs = other.getInputToSparkJobs();
        if (this$inputToSparkJobs == null ? other$inputToSparkJobs != null : !this$inputToSparkJobs.equals(other$inputToSparkJobs)) return false;
        final java.lang.Object this$kafkaConsumerGroups = this.getKafkaConsumerGroups();
        final java.lang.Object other$kafkaConsumerGroups = other.getKafkaConsumerGroups();
        if (this$kafkaConsumerGroups == null ? other$kafkaConsumerGroups != null : !this$kafkaConsumerGroups.equals(other$kafkaConsumerGroups)) return false;
        final java.lang.Object this$kafkaTopicCleanupPolicy = this.getKafkaTopicCleanupPolicy();
        final java.lang.Object other$kafkaTopicCleanupPolicy = other.getKafkaTopicCleanupPolicy();
        if (this$kafkaTopicCleanupPolicy == null ? other$kafkaTopicCleanupPolicy != null : !this$kafkaTopicCleanupPolicy.equals(other$kafkaTopicCleanupPolicy)) return false;
        final java.lang.Object this$kafkaTopicCompressionType = this.getKafkaTopicCompressionType();
        final java.lang.Object other$kafkaTopicCompressionType = other.getKafkaTopicCompressionType();
        if (this$kafkaTopicCompressionType == null ? other$kafkaTopicCompressionType != null : !this$kafkaTopicCompressionType.equals(other$kafkaTopicCompressionType)) return false;
        final java.lang.Object this$modelImplementedEntities = this.getModelImplementedEntities();
        final java.lang.Object other$modelImplementedEntities = other.getModelImplementedEntities();
        if (this$modelImplementedEntities == null ? other$modelImplementedEntities != null : !this$modelImplementedEntities.equals(other$modelImplementedEntities)) return false;
        final java.lang.Object this$outputFromAirflowTasks = this.getOutputFromAirflowTasks();
        final java.lang.Object other$outputFromAirflowTasks = other.getOutputFromAirflowTasks();
        if (this$outputFromAirflowTasks == null ? other$outputFromAirflowTasks != null : !this$outputFromAirflowTasks.equals(other$outputFromAirflowTasks)) return false;
        final java.lang.Object this$outputFromProcesses = this.getOutputFromProcesses();
        final java.lang.Object other$outputFromProcesses = other.getOutputFromProcesses();
        if (this$outputFromProcesses == null ? other$outputFromProcesses != null : !this$outputFromProcesses.equals(other$outputFromProcesses)) return false;
        final java.lang.Object this$outputFromSparkJobs = this.getOutputFromSparkJobs();
        final java.lang.Object other$outputFromSparkJobs = other.getOutputFromSparkJobs();
        if (this$outputFromSparkJobs == null ? other$outputFromSparkJobs != null : !this$outputFromSparkJobs.equals(other$outputFromSparkJobs)) return false;
        return true;
    }

    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    protected boolean canEqual(final java.lang.Object other) {
        return other instanceof KafkaTopic;
    }

    @java.lang.Override
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public int hashCode() {
        final int PRIME = 59;
        int result = super.hashCode();
        final java.lang.Object $kafkaTopicIsInternal = this.getKafkaTopicIsInternal();
        result = result * PRIME + ($kafkaTopicIsInternal == null ? 43 : $kafkaTopicIsInternal.hashCode());
        final java.lang.Object $kafkaTopicPartitionsCount = this.getKafkaTopicPartitionsCount();
        result = result * PRIME + ($kafkaTopicPartitionsCount == null ? 43 : $kafkaTopicPartitionsCount.hashCode());
        final java.lang.Object $kafkaTopicRecordCount = this.getKafkaTopicRecordCount();
        result = result * PRIME + ($kafkaTopicRecordCount == null ? 43 : $kafkaTopicRecordCount.hashCode());
        final java.lang.Object $kafkaTopicReplicationFactor = this.getKafkaTopicReplicationFactor();
        result = result * PRIME + ($kafkaTopicReplicationFactor == null ? 43 : $kafkaTopicReplicationFactor.hashCode());
        final java.lang.Object $kafkaTopicRetentionTimeInMs = this.getKafkaTopicRetentionTimeInMs();
        result = result * PRIME + ($kafkaTopicRetentionTimeInMs == null ? 43 : $kafkaTopicRetentionTimeInMs.hashCode());
        final java.lang.Object $kafkaTopicSegmentBytes = this.getKafkaTopicSegmentBytes();
        result = result * PRIME + ($kafkaTopicSegmentBytes == null ? 43 : $kafkaTopicSegmentBytes.hashCode());
        final java.lang.Object $kafkaTopicSizeInBytes = this.getKafkaTopicSizeInBytes();
        result = result * PRIME + ($kafkaTopicSizeInBytes == null ? 43 : $kafkaTopicSizeInBytes.hashCode());
        final java.lang.Object $typeName = this.getTypeName();
        result = result * PRIME + ($typeName == null ? 43 : $typeName.hashCode());
        final java.lang.Object $inputToAirflowTasks = this.getInputToAirflowTasks();
        result = result * PRIME + ($inputToAirflowTasks == null ? 43 : $inputToAirflowTasks.hashCode());
        final java.lang.Object $inputToProcesses = this.getInputToProcesses();
        result = result * PRIME + ($inputToProcesses == null ? 43 : $inputToProcesses.hashCode());
        final java.lang.Object $inputToSparkJobs = this.getInputToSparkJobs();
        result = result * PRIME + ($inputToSparkJobs == null ? 43 : $inputToSparkJobs.hashCode());
        final java.lang.Object $kafkaConsumerGroups = this.getKafkaConsumerGroups();
        result = result * PRIME + ($kafkaConsumerGroups == null ? 43 : $kafkaConsumerGroups.hashCode());
        final java.lang.Object $kafkaTopicCleanupPolicy = this.getKafkaTopicCleanupPolicy();
        result = result * PRIME + ($kafkaTopicCleanupPolicy == null ? 43 : $kafkaTopicCleanupPolicy.hashCode());
        final java.lang.Object $kafkaTopicCompressionType = this.getKafkaTopicCompressionType();
        result = result * PRIME + ($kafkaTopicCompressionType == null ? 43 : $kafkaTopicCompressionType.hashCode());
        final java.lang.Object $modelImplementedEntities = this.getModelImplementedEntities();
        result = result * PRIME + ($modelImplementedEntities == null ? 43 : $modelImplementedEntities.hashCode());
        final java.lang.Object $outputFromAirflowTasks = this.getOutputFromAirflowTasks();
        result = result * PRIME + ($outputFromAirflowTasks == null ? 43 : $outputFromAirflowTasks.hashCode());
        final java.lang.Object $outputFromProcesses = this.getOutputFromProcesses();
        result = result * PRIME + ($outputFromProcesses == null ? 43 : $outputFromProcesses.hashCode());
        final java.lang.Object $outputFromSparkJobs = this.getOutputFromSparkJobs();
        result = result * PRIME + ($outputFromSparkJobs == null ? 43 : $outputFromSparkJobs.hashCode());
        return result;
    }

    @java.lang.Override
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public java.lang.String toString() {
        return "KafkaTopic(super=" + super.toString() + ", typeName=" + this.getTypeName() + ", inputToAirflowTasks=" + this.getInputToAirflowTasks() + ", inputToProcesses=" + this.getInputToProcesses() + ", inputToSparkJobs=" + this.getInputToSparkJobs() + ", kafkaConsumerGroups=" + this.getKafkaConsumerGroups() + ", kafkaTopicCleanupPolicy=" + this.getKafkaTopicCleanupPolicy() + ", kafkaTopicCompressionType=" + this.getKafkaTopicCompressionType() + ", kafkaTopicIsInternal=" + this.getKafkaTopicIsInternal() + ", kafkaTopicPartitionsCount=" + this.getKafkaTopicPartitionsCount() + ", kafkaTopicRecordCount=" + this.getKafkaTopicRecordCount() + ", kafkaTopicReplicationFactor=" + this.getKafkaTopicReplicationFactor() + ", kafkaTopicRetentionTimeInMs=" + this.getKafkaTopicRetentionTimeInMs() + ", kafkaTopicSegmentBytes=" + this.getKafkaTopicSegmentBytes() + ", kafkaTopicSizeInBytes=" + this.getKafkaTopicSizeInBytes() + ", modelImplementedEntities=" + this.getModelImplementedEntities() + ", outputFromAirflowTasks=" + this.getOutputFromAirflowTasks() + ", outputFromProcesses=" + this.getOutputFromProcesses() + ", outputFromSparkJobs=" + this.getOutputFromSparkJobs() + ")";
    }

    /**
     * Fixed typeName for KafkaTopics.
     */
    @Override
    @java.lang.SuppressWarnings("all")
    @lombok.Generated
    public String getTypeName() {
        return this.typeName;
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy