Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
// Generated by delombok at Wed Oct 16 22:16:03 UTC 2024
/* SPDX-License-Identifier: Apache-2.0
Copyright 2022 Atlan Pte. Ltd. */
package com.atlan.model.assets;
import com.atlan.Atlan;
import com.atlan.AtlanClient;
import com.atlan.exception.AtlanException;
import com.atlan.exception.ErrorCode;
import com.atlan.exception.InvalidRequestException;
import com.atlan.exception.NotFoundException;
import com.atlan.model.enums.AtlanAnnouncementType;
import com.atlan.model.enums.CertificateStatus;
import com.atlan.model.enums.OpenLineageRunState;
import com.atlan.model.relations.Reference;
import com.atlan.model.relations.UniqueAttributes;
import com.atlan.model.search.FluentSearch;
import com.atlan.util.StringUtils;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.concurrent.ThreadLocalRandom;
import javax.annotation.processing.Generated;
import lombok.*;
/**
* Instance of a Spark Job run in Atlan.
*/
@Generated("com.atlan.generators.ModelGeneratorV2")
public class SparkJob extends Asset implements ISparkJob, ISpark, ICatalog, IAsset, IReferenceable {
@java.lang.SuppressWarnings("all")
@lombok.Generated
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(SparkJob.class);
private static final long serialVersionUID = 2L;
public static final String TYPE_NAME = "SparkJob";
/**
* Fixed typeName for SparkJobs.
*/
String typeName;
/**
* Tasks to which this asset provides input.
*/
@Attribute
SortedSet inputToAirflowTasks;
/**
* Processes to which this asset provides input.
*/
@Attribute
SortedSet inputToProcesses;
/**
* TBC
*/
@Attribute
SortedSet inputToSparkJobs;
/**
* Assets that are inputs to this task.
*/
@Attribute
SortedSet inputs;
/**
* Entities implemented by this asset.
*/
@Attribute
@JsonProperty("modelEntityImplemented")
SortedSet modelImplementedEntities;
/**
* Tasks from which this asset is output.
*/
@Attribute
SortedSet outputFromAirflowTasks;
/**
* Processes from which this asset is produced as output.
*/
@Attribute
SortedSet outputFromProcesses;
/**
* TBC
*/
@Attribute
SortedSet outputFromSparkJobs;
/**
* Assets that are outputs from this task.
*/
@Attribute
SortedSet outputs;
/**
* TBC
*/
@Attribute
ILineageProcess process;
/**
* Name of the Spark app containing this Spark Job For eg. extract_raw_data
*/
@Attribute
String sparkAppName;
/**
* The Spark master URL eg. local, local[4], or spark://master:7077
*/
@Attribute
String sparkMaster;
/**
* End time of the Spark Job eg. 1695673598218
*/
@Attribute
@Date
Long sparkRunEndTime;
/**
* OpenLineage state of the Spark Job run eg. COMPLETE
*/
@Attribute
OpenLineageRunState sparkRunOpenLineageState;
/**
* OpenLineage Version of the Spark Job run eg. 1.1.0
*/
@Attribute
String sparkRunOpenLineageVersion;
/**
* Start time of the Spark Job eg. 1695673598218
*/
@Attribute
@Date
Long sparkRunStartTime;
/**
* Spark Version for the Spark Job run eg. 3.4.1
*/
@Attribute
String sparkRunVersion;
/**
* Builds the minimal object necessary to create a relationship to a SparkJob, from a potentially
* more-complete SparkJob object.
*
* @return the minimal object necessary to relate to the SparkJob
* @throws InvalidRequestException if any of the minimal set of required properties for a SparkJob relationship are not found in the initial object
*/
@Override
public SparkJob trimToReference() throws InvalidRequestException {
if (this.getGuid() != null && !this.getGuid().isEmpty()) {
return refByGuid(this.getGuid());
}
if (this.getQualifiedName() != null && !this.getQualifiedName().isEmpty()) {
return refByQualifiedName(this.getQualifiedName());
}
if (this.getUniqueAttributes() != null && this.getUniqueAttributes().getQualifiedName() != null && !this.getUniqueAttributes().getQualifiedName().isEmpty()) {
return refByQualifiedName(this.getUniqueAttributes().getQualifiedName());
}
throw new InvalidRequestException(ErrorCode.MISSING_REQUIRED_RELATIONSHIP_PARAM, TYPE_NAME, "guid, qualifiedName");
}
/**
* Start a fluent search that will return all SparkJob assets.
* Additional conditions can be chained onto the returned search before any
* asset retrieval is attempted, ensuring all conditions are pushed-down for
* optimal retrieval. Only active (non-archived) SparkJob assets will be included.
*
* @return a fluent search that includes all SparkJob assets
*/
public static FluentSearch.FluentSearchBuilder, ?> select() {
return select(Atlan.getDefaultClient());
}
/**
* Start a fluent search that will return all SparkJob assets.
* Additional conditions can be chained onto the returned search before any
* asset retrieval is attempted, ensuring all conditions are pushed-down for
* optimal retrieval. Only active (non-archived) SparkJob assets will be included.
*
* @param client connectivity to the Atlan tenant from which to retrieve the assets
* @return a fluent search that includes all SparkJob assets
*/
public static FluentSearch.FluentSearchBuilder, ?> select(AtlanClient client) {
return select(client, false);
}
/**
* Start a fluent search that will return all SparkJob assets.
* Additional conditions can be chained onto the returned search before any
* asset retrieval is attempted, ensuring all conditions are pushed-down for
* optimal retrieval.
*
* @param includeArchived when true, archived (soft-deleted) SparkJobs will be included
* @return a fluent search that includes all SparkJob assets
*/
public static FluentSearch.FluentSearchBuilder, ?> select(boolean includeArchived) {
return select(Atlan.getDefaultClient(), includeArchived);
}
/**
* Start a fluent search that will return all SparkJob assets.
* Additional conditions can be chained onto the returned search before any
* asset retrieval is attempted, ensuring all conditions are pushed-down for
* optimal retrieval.
*
* @param client connectivity to the Atlan tenant from which to retrieve the assets
* @param includeArchived when true, archived (soft-deleted) SparkJobs will be included
* @return a fluent search that includes all SparkJob assets
*/
public static FluentSearch.FluentSearchBuilder, ?> select(AtlanClient client, boolean includeArchived) {
FluentSearch.FluentSearchBuilder, ?> builder = FluentSearch.builder(client).where(Asset.TYPE_NAME.eq(TYPE_NAME));
if (!includeArchived) {
builder.active();
}
return builder;
}
/**
* Reference to a SparkJob by GUID. Use this to create a relationship to this SparkJob,
* where the relationship should be replaced.
*
* @param guid the GUID of the SparkJob to reference
* @return reference to a SparkJob that can be used for defining a relationship to a SparkJob
*/
public static SparkJob refByGuid(String guid) {
return refByGuid(guid, Reference.SaveSemantic.REPLACE);
}
/**
* Reference to a SparkJob by GUID. Use this to create a relationship to this SparkJob,
* where you want to further control how that relationship should be updated (i.e. replaced,
* appended, or removed).
*
* @param guid the GUID of the SparkJob to reference
* @param semantic how to save this relationship (replace all with this, append it, or remove it)
* @return reference to a SparkJob that can be used for defining a relationship to a SparkJob
*/
public static SparkJob refByGuid(String guid, Reference.SaveSemantic semantic) {
return SparkJob._internal().guid(guid).semantic(semantic).build();
}
/**
* Reference to a SparkJob by qualifiedName. Use this to create a relationship to this SparkJob,
* where the relationship should be replaced.
*
* @param qualifiedName the qualifiedName of the SparkJob to reference
* @return reference to a SparkJob that can be used for defining a relationship to a SparkJob
*/
public static SparkJob refByQualifiedName(String qualifiedName) {
return refByQualifiedName(qualifiedName, Reference.SaveSemantic.REPLACE);
}
/**
* Reference to a SparkJob by qualifiedName. Use this to create a relationship to this SparkJob,
* where you want to further control how that relationship should be updated (i.e. replaced,
* appended, or removed).
*
* @param qualifiedName the qualifiedName of the SparkJob to reference
* @param semantic how to save this relationship (replace all with this, append it, or remove it)
* @return reference to a SparkJob that can be used for defining a relationship to a SparkJob
*/
public static SparkJob refByQualifiedName(String qualifiedName, Reference.SaveSemantic semantic) {
return SparkJob._internal().uniqueAttributes(UniqueAttributes.builder().qualifiedName(qualifiedName).build()).semantic(semantic).build();
}
/**
* Retrieves a SparkJob by one of its identifiers, complete with all of its relationships.
*
* @param id of the SparkJob to retrieve, either its GUID or its full qualifiedName
* @return the requested full SparkJob, complete with all of its relationships
* @throws AtlanException on any error during the API invocation, such as the {@link NotFoundException} if the SparkJob does not exist or the provided GUID is not a SparkJob
*/
@JsonIgnore
public static SparkJob get(String id) throws AtlanException {
return get(Atlan.getDefaultClient(), id);
}
/**
* Retrieves a SparkJob by one of its identifiers, complete with all of its relationships.
*
* @param client connectivity to the Atlan tenant from which to retrieve the asset
* @param id of the SparkJob to retrieve, either its GUID or its full qualifiedName
* @return the requested full SparkJob, complete with all of its relationships
* @throws AtlanException on any error during the API invocation, such as the {@link NotFoundException} if the SparkJob does not exist or the provided GUID is not a SparkJob
*/
@JsonIgnore
public static SparkJob get(AtlanClient client, String id) throws AtlanException {
return get(client, id, true);
}
/**
* Retrieves a SparkJob by one of its identifiers, optionally complete with all of its relationships.
*
* @param client connectivity to the Atlan tenant from which to retrieve the asset
* @param id of the SparkJob to retrieve, either its GUID or its full qualifiedName
* @param includeRelationships if true, all of the asset's relationships will also be retrieved; if false, no relationships will be retrieved
* @return the requested full SparkJob, optionally complete with all of its relationships
* @throws AtlanException on any error during the API invocation, such as the {@link NotFoundException} if the SparkJob does not exist or the provided GUID is not a SparkJob
*/
@JsonIgnore
public static SparkJob get(AtlanClient client, String id, boolean includeRelationships) throws AtlanException {
if (id == null) {
throw new NotFoundException(ErrorCode.ASSET_NOT_FOUND_BY_GUID, "(null)");
} else if (StringUtils.isUUID(id)) {
Asset asset = Asset.get(client, id, includeRelationships);
if (asset == null) {
throw new NotFoundException(ErrorCode.ASSET_NOT_FOUND_BY_GUID, id);
} else if (asset instanceof SparkJob) {
return (SparkJob) asset;
} else {
throw new NotFoundException(ErrorCode.ASSET_NOT_TYPE_REQUESTED, id, TYPE_NAME);
}
} else {
Asset asset = Asset.get(client, TYPE_NAME, id, includeRelationships);
if (asset instanceof SparkJob) {
return (SparkJob) asset;
} else {
throw new NotFoundException(ErrorCode.ASSET_NOT_FOUND_BY_QN, id, TYPE_NAME);
}
}
}
/**
* Restore the archived (soft-deleted) SparkJob to active.
*
* @param qualifiedName for the SparkJob
* @return true if the SparkJob is now active, and false otherwise
* @throws AtlanException on any API problems
*/
public static boolean restore(String qualifiedName) throws AtlanException {
return restore(Atlan.getDefaultClient(), qualifiedName);
}
/**
* Restore the archived (soft-deleted) SparkJob to active.
*
* @param client connectivity to the Atlan tenant on which to restore the asset
* @param qualifiedName for the SparkJob
* @return true if the SparkJob is now active, and false otherwise
* @throws AtlanException on any API problems
*/
public static boolean restore(AtlanClient client, String qualifiedName) throws AtlanException {
return Asset.restore(client, TYPE_NAME, qualifiedName);
}
/**
* Builds the minimal object necessary to update a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the minimal request necessary to update the SparkJob, as a builder
*/
public static SparkJobBuilder, ?> updater(String qualifiedName, String name) {
return SparkJob._internal().guid("-" + ThreadLocalRandom.current().nextLong(0, Long.MAX_VALUE - 1)).qualifiedName(qualifiedName).name(name);
}
/**
* Builds the minimal object necessary to apply an update to a SparkJob, from a potentially
* more-complete SparkJob object.
*
* @return the minimal object necessary to update the SparkJob, as a builder
* @throws InvalidRequestException if any of the minimal set of required properties for SparkJob are not found in the initial object
*/
@Override
public SparkJobBuilder, ?> trimToRequired() throws InvalidRequestException {
Map map = new HashMap<>();
map.put("qualifiedName", this.getQualifiedName());
map.put("name", this.getName());
validateRequired(TYPE_NAME, map);
return updater(this.getQualifiedName(), this.getName());
}
/**
* Remove the system description from a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeDescription(String qualifiedName, String name) throws AtlanException {
return removeDescription(Atlan.getDefaultClient(), qualifiedName, name);
}
/**
* Remove the system description from a SparkJob.
*
* @param client connectivity to the Atlan tenant on which to remove the asset's description
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeDescription(AtlanClient client, String qualifiedName, String name) throws AtlanException {
return (SparkJob) Asset.removeDescription(client, updater(qualifiedName, name));
}
/**
* Remove the user's description from a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeUserDescription(String qualifiedName, String name) throws AtlanException {
return removeUserDescription(Atlan.getDefaultClient(), qualifiedName, name);
}
/**
* Remove the user's description from a SparkJob.
*
* @param client connectivity to the Atlan tenant on which to remove the asset's description
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeUserDescription(AtlanClient client, String qualifiedName, String name) throws AtlanException {
return (SparkJob) Asset.removeUserDescription(client, updater(qualifiedName, name));
}
/**
* Remove the owners from a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeOwners(String qualifiedName, String name) throws AtlanException {
return removeOwners(Atlan.getDefaultClient(), qualifiedName, name);
}
/**
* Remove the owners from a SparkJob.
*
* @param client connectivity to the Atlan tenant from which to remove the SparkJob's owners
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeOwners(AtlanClient client, String qualifiedName, String name) throws AtlanException {
return (SparkJob) Asset.removeOwners(client, updater(qualifiedName, name));
}
/**
* Update the certificate on a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param certificate to use
* @param message (optional) message, or null if no message
* @return the updated SparkJob, or null if the update failed
* @throws AtlanException on any API problems
*/
public static SparkJob updateCertificate(String qualifiedName, CertificateStatus certificate, String message) throws AtlanException {
return updateCertificate(Atlan.getDefaultClient(), qualifiedName, certificate, message);
}
/**
* Update the certificate on a SparkJob.
*
* @param client connectivity to the Atlan tenant on which to update the SparkJob's certificate
* @param qualifiedName of the SparkJob
* @param certificate to use
* @param message (optional) message, or null if no message
* @return the updated SparkJob, or null if the update failed
* @throws AtlanException on any API problems
*/
public static SparkJob updateCertificate(AtlanClient client, String qualifiedName, CertificateStatus certificate, String message) throws AtlanException {
return (SparkJob) Asset.updateCertificate(client, _internal(), TYPE_NAME, qualifiedName, certificate, message);
}
/**
* Remove the certificate from a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeCertificate(String qualifiedName, String name) throws AtlanException {
return removeCertificate(Atlan.getDefaultClient(), qualifiedName, name);
}
/**
* Remove the certificate from a SparkJob.
*
* @param client connectivity to the Atlan tenant from which to remove the SparkJob's certificate
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeCertificate(AtlanClient client, String qualifiedName, String name) throws AtlanException {
return (SparkJob) Asset.removeCertificate(client, updater(qualifiedName, name));
}
/**
* Update the announcement on a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param type type of announcement to set
* @param title (optional) title of the announcement to set (or null for no title)
* @param message (optional) message of the announcement to set (or null for no message)
* @return the result of the update, or null if the update failed
* @throws AtlanException on any API problems
*/
public static SparkJob updateAnnouncement(String qualifiedName, AtlanAnnouncementType type, String title, String message) throws AtlanException {
return updateAnnouncement(Atlan.getDefaultClient(), qualifiedName, type, title, message);
}
/**
* Update the announcement on a SparkJob.
*
* @param client connectivity to the Atlan tenant on which to update the SparkJob's announcement
* @param qualifiedName of the SparkJob
* @param type type of announcement to set
* @param title (optional) title of the announcement to set (or null for no title)
* @param message (optional) message of the announcement to set (or null for no message)
* @return the result of the update, or null if the update failed
* @throws AtlanException on any API problems
*/
public static SparkJob updateAnnouncement(AtlanClient client, String qualifiedName, AtlanAnnouncementType type, String title, String message) throws AtlanException {
return (SparkJob) Asset.updateAnnouncement(client, _internal(), TYPE_NAME, qualifiedName, type, title, message);
}
/**
* Remove the announcement from a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeAnnouncement(String qualifiedName, String name) throws AtlanException {
return removeAnnouncement(Atlan.getDefaultClient(), qualifiedName, name);
}
/**
* Remove the announcement from a SparkJob.
*
* @param client connectivity to the Atlan client from which to remove the SparkJob's announcement
* @param qualifiedName of the SparkJob
* @param name of the SparkJob
* @return the updated SparkJob, or null if the removal failed
* @throws AtlanException on any API problems
*/
public static SparkJob removeAnnouncement(AtlanClient client, String qualifiedName, String name) throws AtlanException {
return (SparkJob) Asset.removeAnnouncement(client, updater(qualifiedName, name));
}
/**
* Replace the terms linked to the SparkJob.
*
* @param qualifiedName for the SparkJob
* @param name human-readable name of the SparkJob
* @param terms the list of terms to replace on the SparkJob, or null to remove all terms from the SparkJob
* @return the SparkJob that was updated (note that it will NOT contain details of the replaced terms)
* @throws AtlanException on any API problems
*/
public static SparkJob replaceTerms(String qualifiedName, String name, List terms) throws AtlanException {
return replaceTerms(Atlan.getDefaultClient(), qualifiedName, name, terms);
}
/**
* Replace the terms linked to the SparkJob.
*
* @param client connectivity to the Atlan tenant on which to replace the SparkJob's assigned terms
* @param qualifiedName for the SparkJob
* @param name human-readable name of the SparkJob
* @param terms the list of terms to replace on the SparkJob, or null to remove all terms from the SparkJob
* @return the SparkJob that was updated (note that it will NOT contain details of the replaced terms)
* @throws AtlanException on any API problems
*/
public static SparkJob replaceTerms(AtlanClient client, String qualifiedName, String name, List terms) throws AtlanException {
return (SparkJob) Asset.replaceTerms(client, updater(qualifiedName, name), terms);
}
/**
* Link additional terms to the SparkJob, without replacing existing terms linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing terms,
* and a second to append the new terms.
*
* @param qualifiedName for the SparkJob
* @param terms the list of terms to append to the SparkJob
* @return the SparkJob that was updated (note that it will NOT contain details of the appended terms)
* @throws AtlanException on any API problems
*/
public static SparkJob appendTerms(String qualifiedName, List terms) throws AtlanException {
return appendTerms(Atlan.getDefaultClient(), qualifiedName, terms);
}
/**
* Link additional terms to the SparkJob, without replacing existing terms linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing terms,
* and a second to append the new terms.
*
* @param client connectivity to the Atlan tenant on which to append terms to the SparkJob
* @param qualifiedName for the SparkJob
* @param terms the list of terms to append to the SparkJob
* @return the SparkJob that was updated (note that it will NOT contain details of the appended terms)
* @throws AtlanException on any API problems
*/
public static SparkJob appendTerms(AtlanClient client, String qualifiedName, List terms) throws AtlanException {
return (SparkJob) Asset.appendTerms(client, TYPE_NAME, qualifiedName, terms);
}
/**
* Remove terms from a SparkJob, without replacing all existing terms linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing terms,
* and a second to remove the provided terms.
*
* @param qualifiedName for the SparkJob
* @param terms the list of terms to remove from the SparkJob, which must be referenced by GUID
* @return the SparkJob that was updated (note that it will NOT contain details of the resulting terms)
* @throws AtlanException on any API problems
*/
public static SparkJob removeTerms(String qualifiedName, List terms) throws AtlanException {
return removeTerms(Atlan.getDefaultClient(), qualifiedName, terms);
}
/**
* Remove terms from a SparkJob, without replacing all existing terms linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing terms,
* and a second to remove the provided terms.
*
* @param client connectivity to the Atlan tenant from which to remove terms from the SparkJob
* @param qualifiedName for the SparkJob
* @param terms the list of terms to remove from the SparkJob, which must be referenced by GUID
* @return the SparkJob that was updated (note that it will NOT contain details of the resulting terms)
* @throws AtlanException on any API problems
*/
public static SparkJob removeTerms(AtlanClient client, String qualifiedName, List terms) throws AtlanException {
return (SparkJob) Asset.removeTerms(client, TYPE_NAME, qualifiedName, terms);
}
/**
* Add Atlan tags to a SparkJob, without replacing existing Atlan tags linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing Atlan tags,
* and a second to append the new Atlan tags.
*
* @param qualifiedName of the SparkJob
* @param atlanTagNames human-readable names of the Atlan tags to add
* @throws AtlanException on any API problems
* @return the updated SparkJob
*/
public static SparkJob appendAtlanTags(String qualifiedName, List atlanTagNames) throws AtlanException {
return appendAtlanTags(Atlan.getDefaultClient(), qualifiedName, atlanTagNames);
}
/**
* Add Atlan tags to a SparkJob, without replacing existing Atlan tags linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing Atlan tags,
* and a second to append the new Atlan tags.
*
* @param client connectivity to the Atlan tenant on which to append Atlan tags to the SparkJob
* @param qualifiedName of the SparkJob
* @param atlanTagNames human-readable names of the Atlan tags to add
* @throws AtlanException on any API problems
* @return the updated SparkJob
*/
public static SparkJob appendAtlanTags(AtlanClient client, String qualifiedName, List atlanTagNames) throws AtlanException {
return (SparkJob) Asset.appendAtlanTags(client, TYPE_NAME, qualifiedName, atlanTagNames);
}
/**
* Add Atlan tags to a SparkJob, without replacing existing Atlan tags linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing Atlan tags,
* and a second to append the new Atlan tags.
*
* @param qualifiedName of the SparkJob
* @param atlanTagNames human-readable names of the Atlan tags to add
* @param propagate whether to propagate the Atlan tag (true) or not (false)
* @param removePropagationsOnDelete whether to remove the propagated Atlan tags when the Atlan tag is removed from this asset (true) or not (false)
* @param restrictLineagePropagation whether to avoid propagating through lineage (true) or do propagate through lineage (false)
* @throws AtlanException on any API problems
* @return the updated SparkJob
*/
public static SparkJob appendAtlanTags(String qualifiedName, List atlanTagNames, boolean propagate, boolean removePropagationsOnDelete, boolean restrictLineagePropagation) throws AtlanException {
return appendAtlanTags(Atlan.getDefaultClient(), qualifiedName, atlanTagNames, propagate, removePropagationsOnDelete, restrictLineagePropagation);
}
/**
* Add Atlan tags to a SparkJob, without replacing existing Atlan tags linked to the SparkJob.
* Note: this operation must make two API calls — one to retrieve the SparkJob's existing Atlan tags,
* and a second to append the new Atlan tags.
*
* @param client connectivity to the Atlan tenant on which to append Atlan tags to the SparkJob
* @param qualifiedName of the SparkJob
* @param atlanTagNames human-readable names of the Atlan tags to add
* @param propagate whether to propagate the Atlan tag (true) or not (false)
* @param removePropagationsOnDelete whether to remove the propagated Atlan tags when the Atlan tag is removed from this asset (true) or not (false)
* @param restrictLineagePropagation whether to avoid propagating through lineage (true) or do propagate through lineage (false)
* @throws AtlanException on any API problems
* @return the updated SparkJob
*/
public static SparkJob appendAtlanTags(AtlanClient client, String qualifiedName, List atlanTagNames, boolean propagate, boolean removePropagationsOnDelete, boolean restrictLineagePropagation) throws AtlanException {
return (SparkJob) Asset.appendAtlanTags(client, TYPE_NAME, qualifiedName, atlanTagNames, propagate, removePropagationsOnDelete, restrictLineagePropagation);
}
/**
* Remove an Atlan tag from a SparkJob.
*
* @param qualifiedName of the SparkJob
* @param atlanTagName human-readable name of the Atlan tag to remove
* @throws AtlanException on any API problems, or if the Atlan tag does not exist on the SparkJob
*/
public static void removeAtlanTag(String qualifiedName, String atlanTagName) throws AtlanException {
removeAtlanTag(Atlan.getDefaultClient(), qualifiedName, atlanTagName);
}
/**
* Remove an Atlan tag from a SparkJob.
*
* @param client connectivity to the Atlan tenant from which to remove an Atlan tag from a SparkJob
* @param qualifiedName of the SparkJob
* @param atlanTagName human-readable name of the Atlan tag to remove
* @throws AtlanException on any API problems, or if the Atlan tag does not exist on the SparkJob
*/
public static void removeAtlanTag(AtlanClient client, String qualifiedName, String atlanTagName) throws AtlanException {
Asset.removeAtlanTag(client, TYPE_NAME, qualifiedName, atlanTagName);
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
private static String $default$typeName() {
return TYPE_NAME;
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public static abstract class SparkJobBuilder> extends Asset.AssetBuilder {
@java.lang.SuppressWarnings("all")
@lombok.Generated
private boolean typeName$set;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private String typeName$value;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList inputToAirflowTasks;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList inputToProcesses;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList inputToSparkJobs;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList inputs;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList modelImplementedEntities;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList outputFromAirflowTasks;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList outputFromProcesses;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList outputFromSparkJobs;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private java.util.ArrayList outputs;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private ILineageProcess process;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private String sparkAppName;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private String sparkMaster;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private Long sparkRunEndTime;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private OpenLineageRunState sparkRunOpenLineageState;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private String sparkRunOpenLineageVersion;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private Long sparkRunStartTime;
@java.lang.SuppressWarnings("all")
@lombok.Generated
private String sparkRunVersion;
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
protected B $fillValuesFrom(final C instance) {
super.$fillValuesFrom(instance);
SparkJob.SparkJobBuilder.$fillValuesFromInstanceIntoBuilder(instance, this);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
private static void $fillValuesFromInstanceIntoBuilder(final SparkJob instance, final SparkJob.SparkJobBuilder, ?> b) {
b.typeName(instance.typeName);
b.inputToAirflowTasks(instance.inputToAirflowTasks == null ? java.util.Collections.emptySortedSet() : instance.inputToAirflowTasks);
b.inputToProcesses(instance.inputToProcesses == null ? java.util.Collections.emptySortedSet() : instance.inputToProcesses);
b.inputToSparkJobs(instance.inputToSparkJobs == null ? java.util.Collections.emptySortedSet() : instance.inputToSparkJobs);
b.inputs(instance.inputs == null ? java.util.Collections.emptySortedSet() : instance.inputs);
b.modelImplementedEntities(instance.modelImplementedEntities == null ? java.util.Collections.emptySortedSet() : instance.modelImplementedEntities);
b.outputFromAirflowTasks(instance.outputFromAirflowTasks == null ? java.util.Collections.emptySortedSet() : instance.outputFromAirflowTasks);
b.outputFromProcesses(instance.outputFromProcesses == null ? java.util.Collections.emptySortedSet() : instance.outputFromProcesses);
b.outputFromSparkJobs(instance.outputFromSparkJobs == null ? java.util.Collections.emptySortedSet() : instance.outputFromSparkJobs);
b.outputs(instance.outputs == null ? java.util.Collections.emptySortedSet() : instance.outputs);
b.process(instance.process);
b.sparkAppName(instance.sparkAppName);
b.sparkMaster(instance.sparkMaster);
b.sparkRunEndTime(instance.sparkRunEndTime);
b.sparkRunOpenLineageState(instance.sparkRunOpenLineageState);
b.sparkRunOpenLineageVersion(instance.sparkRunOpenLineageVersion);
b.sparkRunStartTime(instance.sparkRunStartTime);
b.sparkRunVersion(instance.sparkRunVersion);
}
/**
* Fixed typeName for SparkJobs.
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B typeName(final String typeName) {
this.typeName$value = typeName;
typeName$set = true;
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputToAirflowTask(final IAirflowTask inputToAirflowTask) {
if (this.inputToAirflowTasks == null) this.inputToAirflowTasks = new java.util.ArrayList();
this.inputToAirflowTasks.add(inputToAirflowTask);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputToAirflowTasks(final java.util.Collection extends IAirflowTask> inputToAirflowTasks) {
if (inputToAirflowTasks == null) {
throw new java.lang.NullPointerException("inputToAirflowTasks cannot be null");
}
if (this.inputToAirflowTasks == null) this.inputToAirflowTasks = new java.util.ArrayList();
this.inputToAirflowTasks.addAll(inputToAirflowTasks);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearInputToAirflowTasks() {
if (this.inputToAirflowTasks != null) this.inputToAirflowTasks.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputToProcess(final ILineageProcess inputToProcess) {
if (this.inputToProcesses == null) this.inputToProcesses = new java.util.ArrayList();
this.inputToProcesses.add(inputToProcess);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputToProcesses(final java.util.Collection extends ILineageProcess> inputToProcesses) {
if (inputToProcesses == null) {
throw new java.lang.NullPointerException("inputToProcesses cannot be null");
}
if (this.inputToProcesses == null) this.inputToProcesses = new java.util.ArrayList();
this.inputToProcesses.addAll(inputToProcesses);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearInputToProcesses() {
if (this.inputToProcesses != null) this.inputToProcesses.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputToSparkJob(final ISparkJob inputToSparkJob) {
if (this.inputToSparkJobs == null) this.inputToSparkJobs = new java.util.ArrayList();
this.inputToSparkJobs.add(inputToSparkJob);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputToSparkJobs(final java.util.Collection extends ISparkJob> inputToSparkJobs) {
if (inputToSparkJobs == null) {
throw new java.lang.NullPointerException("inputToSparkJobs cannot be null");
}
if (this.inputToSparkJobs == null) this.inputToSparkJobs = new java.util.ArrayList();
this.inputToSparkJobs.addAll(inputToSparkJobs);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearInputToSparkJobs() {
if (this.inputToSparkJobs != null) this.inputToSparkJobs.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B input(final ICatalog input) {
if (this.inputs == null) this.inputs = new java.util.ArrayList();
this.inputs.add(input);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B inputs(final java.util.Collection extends ICatalog> inputs) {
if (inputs == null) {
throw new java.lang.NullPointerException("inputs cannot be null");
}
if (this.inputs == null) this.inputs = new java.util.ArrayList();
this.inputs.addAll(inputs);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearInputs() {
if (this.inputs != null) this.inputs.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B modelImplementedEntity(final IModelEntity modelImplementedEntity) {
if (this.modelImplementedEntities == null) this.modelImplementedEntities = new java.util.ArrayList();
this.modelImplementedEntities.add(modelImplementedEntity);
return self();
}
@JsonProperty("modelEntityImplemented")
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B modelImplementedEntities(final java.util.Collection extends IModelEntity> modelImplementedEntities) {
if (modelImplementedEntities == null) {
throw new java.lang.NullPointerException("modelImplementedEntities cannot be null");
}
if (this.modelImplementedEntities == null) this.modelImplementedEntities = new java.util.ArrayList();
this.modelImplementedEntities.addAll(modelImplementedEntities);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearModelImplementedEntities() {
if (this.modelImplementedEntities != null) this.modelImplementedEntities.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputFromAirflowTask(final IAirflowTask outputFromAirflowTask) {
if (this.outputFromAirflowTasks == null) this.outputFromAirflowTasks = new java.util.ArrayList();
this.outputFromAirflowTasks.add(outputFromAirflowTask);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputFromAirflowTasks(final java.util.Collection extends IAirflowTask> outputFromAirflowTasks) {
if (outputFromAirflowTasks == null) {
throw new java.lang.NullPointerException("outputFromAirflowTasks cannot be null");
}
if (this.outputFromAirflowTasks == null) this.outputFromAirflowTasks = new java.util.ArrayList();
this.outputFromAirflowTasks.addAll(outputFromAirflowTasks);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearOutputFromAirflowTasks() {
if (this.outputFromAirflowTasks != null) this.outputFromAirflowTasks.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputFromProcess(final ILineageProcess outputFromProcess) {
if (this.outputFromProcesses == null) this.outputFromProcesses = new java.util.ArrayList();
this.outputFromProcesses.add(outputFromProcess);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputFromProcesses(final java.util.Collection extends ILineageProcess> outputFromProcesses) {
if (outputFromProcesses == null) {
throw new java.lang.NullPointerException("outputFromProcesses cannot be null");
}
if (this.outputFromProcesses == null) this.outputFromProcesses = new java.util.ArrayList();
this.outputFromProcesses.addAll(outputFromProcesses);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearOutputFromProcesses() {
if (this.outputFromProcesses != null) this.outputFromProcesses.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputFromSparkJob(final ISparkJob outputFromSparkJob) {
if (this.outputFromSparkJobs == null) this.outputFromSparkJobs = new java.util.ArrayList();
this.outputFromSparkJobs.add(outputFromSparkJob);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputFromSparkJobs(final java.util.Collection extends ISparkJob> outputFromSparkJobs) {
if (outputFromSparkJobs == null) {
throw new java.lang.NullPointerException("outputFromSparkJobs cannot be null");
}
if (this.outputFromSparkJobs == null) this.outputFromSparkJobs = new java.util.ArrayList();
this.outputFromSparkJobs.addAll(outputFromSparkJobs);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearOutputFromSparkJobs() {
if (this.outputFromSparkJobs != null) this.outputFromSparkJobs.clear();
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B output(final ICatalog output) {
if (this.outputs == null) this.outputs = new java.util.ArrayList();
this.outputs.add(output);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B outputs(final java.util.Collection extends ICatalog> outputs) {
if (outputs == null) {
throw new java.lang.NullPointerException("outputs cannot be null");
}
if (this.outputs == null) this.outputs = new java.util.ArrayList();
this.outputs.addAll(outputs);
return self();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B clearOutputs() {
if (this.outputs != null) this.outputs.clear();
return self();
}
/**
* TBC
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B process(final ILineageProcess process) {
this.process = process;
return self();
}
/**
* Name of the Spark app containing this Spark Job For eg. extract_raw_data
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkAppName(final String sparkAppName) {
this.sparkAppName = sparkAppName;
return self();
}
/**
* The Spark master URL eg. local, local[4], or spark://master:7077
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkMaster(final String sparkMaster) {
this.sparkMaster = sparkMaster;
return self();
}
/**
* End time of the Spark Job eg. 1695673598218
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkRunEndTime(final Long sparkRunEndTime) {
this.sparkRunEndTime = sparkRunEndTime;
return self();
}
/**
* OpenLineage state of the Spark Job run eg. COMPLETE
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkRunOpenLineageState(final OpenLineageRunState sparkRunOpenLineageState) {
this.sparkRunOpenLineageState = sparkRunOpenLineageState;
return self();
}
/**
* OpenLineage Version of the Spark Job run eg. 1.1.0
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkRunOpenLineageVersion(final String sparkRunOpenLineageVersion) {
this.sparkRunOpenLineageVersion = sparkRunOpenLineageVersion;
return self();
}
/**
* Start time of the Spark Job eg. 1695673598218
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkRunStartTime(final Long sparkRunStartTime) {
this.sparkRunStartTime = sparkRunStartTime;
return self();
}
/**
* Spark Version for the Spark Job run eg. 3.4.1
* @return {@code this}.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public B sparkRunVersion(final String sparkRunVersion) {
this.sparkRunVersion = sparkRunVersion;
return self();
}
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
protected abstract B self();
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public abstract C build();
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public java.lang.String toString() {
return "SparkJob.SparkJobBuilder(super=" + super.toString() + ", typeName$value=" + this.typeName$value + ", inputToAirflowTasks=" + this.inputToAirflowTasks + ", inputToProcesses=" + this.inputToProcesses + ", inputToSparkJobs=" + this.inputToSparkJobs + ", inputs=" + this.inputs + ", modelImplementedEntities=" + this.modelImplementedEntities + ", outputFromAirflowTasks=" + this.outputFromAirflowTasks + ", outputFromProcesses=" + this.outputFromProcesses + ", outputFromSparkJobs=" + this.outputFromSparkJobs + ", outputs=" + this.outputs + ", process=" + this.process + ", sparkAppName=" + this.sparkAppName + ", sparkMaster=" + this.sparkMaster + ", sparkRunEndTime=" + this.sparkRunEndTime + ", sparkRunOpenLineageState=" + this.sparkRunOpenLineageState + ", sparkRunOpenLineageVersion=" + this.sparkRunOpenLineageVersion + ", sparkRunStartTime=" + this.sparkRunStartTime + ", sparkRunVersion=" + this.sparkRunVersion + ")";
}
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
private static final class SparkJobBuilderImpl extends SparkJob.SparkJobBuilder {
@java.lang.SuppressWarnings("all")
@lombok.Generated
private SparkJobBuilderImpl() {
}
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
protected SparkJob.SparkJobBuilderImpl self() {
return this;
}
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SparkJob build() {
return new SparkJob(this);
}
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
protected SparkJob(final SparkJob.SparkJobBuilder, ?> b) {
super(b);
if (b.typeName$set) this.typeName = b.typeName$value;
else this.typeName = SparkJob.$default$typeName();
java.util.SortedSet inputToAirflowTasks = new java.util.TreeSet();
if (b.inputToAirflowTasks != null) inputToAirflowTasks.addAll(b.inputToAirflowTasks);
inputToAirflowTasks = java.util.Collections.unmodifiableSortedSet(inputToAirflowTasks);
this.inputToAirflowTasks = inputToAirflowTasks;
java.util.SortedSet inputToProcesses = new java.util.TreeSet();
if (b.inputToProcesses != null) inputToProcesses.addAll(b.inputToProcesses);
inputToProcesses = java.util.Collections.unmodifiableSortedSet(inputToProcesses);
this.inputToProcesses = inputToProcesses;
java.util.SortedSet inputToSparkJobs = new java.util.TreeSet();
if (b.inputToSparkJobs != null) inputToSparkJobs.addAll(b.inputToSparkJobs);
inputToSparkJobs = java.util.Collections.unmodifiableSortedSet(inputToSparkJobs);
this.inputToSparkJobs = inputToSparkJobs;
java.util.SortedSet inputs = new java.util.TreeSet();
if (b.inputs != null) inputs.addAll(b.inputs);
inputs = java.util.Collections.unmodifiableSortedSet(inputs);
this.inputs = inputs;
java.util.SortedSet modelImplementedEntities = new java.util.TreeSet();
if (b.modelImplementedEntities != null) modelImplementedEntities.addAll(b.modelImplementedEntities);
modelImplementedEntities = java.util.Collections.unmodifiableSortedSet(modelImplementedEntities);
this.modelImplementedEntities = modelImplementedEntities;
java.util.SortedSet outputFromAirflowTasks = new java.util.TreeSet();
if (b.outputFromAirflowTasks != null) outputFromAirflowTasks.addAll(b.outputFromAirflowTasks);
outputFromAirflowTasks = java.util.Collections.unmodifiableSortedSet(outputFromAirflowTasks);
this.outputFromAirflowTasks = outputFromAirflowTasks;
java.util.SortedSet outputFromProcesses = new java.util.TreeSet();
if (b.outputFromProcesses != null) outputFromProcesses.addAll(b.outputFromProcesses);
outputFromProcesses = java.util.Collections.unmodifiableSortedSet(outputFromProcesses);
this.outputFromProcesses = outputFromProcesses;
java.util.SortedSet outputFromSparkJobs = new java.util.TreeSet();
if (b.outputFromSparkJobs != null) outputFromSparkJobs.addAll(b.outputFromSparkJobs);
outputFromSparkJobs = java.util.Collections.unmodifiableSortedSet(outputFromSparkJobs);
this.outputFromSparkJobs = outputFromSparkJobs;
java.util.SortedSet outputs = new java.util.TreeSet();
if (b.outputs != null) outputs.addAll(b.outputs);
outputs = java.util.Collections.unmodifiableSortedSet(outputs);
this.outputs = outputs;
this.process = b.process;
this.sparkAppName = b.sparkAppName;
this.sparkMaster = b.sparkMaster;
this.sparkRunEndTime = b.sparkRunEndTime;
this.sparkRunOpenLineageState = b.sparkRunOpenLineageState;
this.sparkRunOpenLineageVersion = b.sparkRunOpenLineageVersion;
this.sparkRunStartTime = b.sparkRunStartTime;
this.sparkRunVersion = b.sparkRunVersion;
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public static SparkJob.SparkJobBuilder, ?> _internal() {
return new SparkJob.SparkJobBuilderImpl();
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SparkJob.SparkJobBuilder, ?> toBuilder() {
return new SparkJob.SparkJobBuilderImpl().$fillValuesFrom(this);
}
/**
* Tasks to which this asset provides input.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getInputToAirflowTasks() {
return this.inputToAirflowTasks;
}
/**
* Processes to which this asset provides input.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getInputToProcesses() {
return this.inputToProcesses;
}
/**
* TBC
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getInputToSparkJobs() {
return this.inputToSparkJobs;
}
/**
* Assets that are inputs to this task.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getInputs() {
return this.inputs;
}
/**
* Entities implemented by this asset.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getModelImplementedEntities() {
return this.modelImplementedEntities;
}
/**
* Tasks from which this asset is output.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getOutputFromAirflowTasks() {
return this.outputFromAirflowTasks;
}
/**
* Processes from which this asset is produced as output.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getOutputFromProcesses() {
return this.outputFromProcesses;
}
/**
* TBC
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getOutputFromSparkJobs() {
return this.outputFromSparkJobs;
}
/**
* Assets that are outputs from this task.
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public SortedSet getOutputs() {
return this.outputs;
}
/**
* TBC
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public ILineageProcess getProcess() {
return this.process;
}
/**
* Name of the Spark app containing this Spark Job For eg. extract_raw_data
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public String getSparkAppName() {
return this.sparkAppName;
}
/**
* The Spark master URL eg. local, local[4], or spark://master:7077
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public String getSparkMaster() {
return this.sparkMaster;
}
/**
* End time of the Spark Job eg. 1695673598218
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public Long getSparkRunEndTime() {
return this.sparkRunEndTime;
}
/**
* OpenLineage state of the Spark Job run eg. COMPLETE
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public OpenLineageRunState getSparkRunOpenLineageState() {
return this.sparkRunOpenLineageState;
}
/**
* OpenLineage Version of the Spark Job run eg. 1.1.0
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public String getSparkRunOpenLineageVersion() {
return this.sparkRunOpenLineageVersion;
}
/**
* Start time of the Spark Job eg. 1695673598218
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public Long getSparkRunStartTime() {
return this.sparkRunStartTime;
}
/**
* Spark Version for the Spark Job run eg. 3.4.1
*/
@java.lang.SuppressWarnings("all")
@lombok.Generated
public String getSparkRunVersion() {
return this.sparkRunVersion;
}
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public boolean equals(final java.lang.Object o) {
if (o == this) return true;
if (!(o instanceof SparkJob)) return false;
final SparkJob other = (SparkJob) o;
if (!other.canEqual((java.lang.Object) this)) return false;
if (!super.equals(o)) return false;
final java.lang.Object this$sparkRunEndTime = this.getSparkRunEndTime();
final java.lang.Object other$sparkRunEndTime = other.getSparkRunEndTime();
if (this$sparkRunEndTime == null ? other$sparkRunEndTime != null : !this$sparkRunEndTime.equals(other$sparkRunEndTime)) return false;
final java.lang.Object this$sparkRunStartTime = this.getSparkRunStartTime();
final java.lang.Object other$sparkRunStartTime = other.getSparkRunStartTime();
if (this$sparkRunStartTime == null ? other$sparkRunStartTime != null : !this$sparkRunStartTime.equals(other$sparkRunStartTime)) return false;
final java.lang.Object this$typeName = this.getTypeName();
final java.lang.Object other$typeName = other.getTypeName();
if (this$typeName == null ? other$typeName != null : !this$typeName.equals(other$typeName)) return false;
final java.lang.Object this$inputToAirflowTasks = this.getInputToAirflowTasks();
final java.lang.Object other$inputToAirflowTasks = other.getInputToAirflowTasks();
if (this$inputToAirflowTasks == null ? other$inputToAirflowTasks != null : !this$inputToAirflowTasks.equals(other$inputToAirflowTasks)) return false;
final java.lang.Object this$inputToProcesses = this.getInputToProcesses();
final java.lang.Object other$inputToProcesses = other.getInputToProcesses();
if (this$inputToProcesses == null ? other$inputToProcesses != null : !this$inputToProcesses.equals(other$inputToProcesses)) return false;
final java.lang.Object this$inputToSparkJobs = this.getInputToSparkJobs();
final java.lang.Object other$inputToSparkJobs = other.getInputToSparkJobs();
if (this$inputToSparkJobs == null ? other$inputToSparkJobs != null : !this$inputToSparkJobs.equals(other$inputToSparkJobs)) return false;
final java.lang.Object this$inputs = this.getInputs();
final java.lang.Object other$inputs = other.getInputs();
if (this$inputs == null ? other$inputs != null : !this$inputs.equals(other$inputs)) return false;
final java.lang.Object this$modelImplementedEntities = this.getModelImplementedEntities();
final java.lang.Object other$modelImplementedEntities = other.getModelImplementedEntities();
if (this$modelImplementedEntities == null ? other$modelImplementedEntities != null : !this$modelImplementedEntities.equals(other$modelImplementedEntities)) return false;
final java.lang.Object this$outputFromAirflowTasks = this.getOutputFromAirflowTasks();
final java.lang.Object other$outputFromAirflowTasks = other.getOutputFromAirflowTasks();
if (this$outputFromAirflowTasks == null ? other$outputFromAirflowTasks != null : !this$outputFromAirflowTasks.equals(other$outputFromAirflowTasks)) return false;
final java.lang.Object this$outputFromProcesses = this.getOutputFromProcesses();
final java.lang.Object other$outputFromProcesses = other.getOutputFromProcesses();
if (this$outputFromProcesses == null ? other$outputFromProcesses != null : !this$outputFromProcesses.equals(other$outputFromProcesses)) return false;
final java.lang.Object this$outputFromSparkJobs = this.getOutputFromSparkJobs();
final java.lang.Object other$outputFromSparkJobs = other.getOutputFromSparkJobs();
if (this$outputFromSparkJobs == null ? other$outputFromSparkJobs != null : !this$outputFromSparkJobs.equals(other$outputFromSparkJobs)) return false;
final java.lang.Object this$outputs = this.getOutputs();
final java.lang.Object other$outputs = other.getOutputs();
if (this$outputs == null ? other$outputs != null : !this$outputs.equals(other$outputs)) return false;
final java.lang.Object this$process = this.getProcess();
final java.lang.Object other$process = other.getProcess();
if (this$process == null ? other$process != null : !this$process.equals(other$process)) return false;
final java.lang.Object this$sparkAppName = this.getSparkAppName();
final java.lang.Object other$sparkAppName = other.getSparkAppName();
if (this$sparkAppName == null ? other$sparkAppName != null : !this$sparkAppName.equals(other$sparkAppName)) return false;
final java.lang.Object this$sparkMaster = this.getSparkMaster();
final java.lang.Object other$sparkMaster = other.getSparkMaster();
if (this$sparkMaster == null ? other$sparkMaster != null : !this$sparkMaster.equals(other$sparkMaster)) return false;
final java.lang.Object this$sparkRunOpenLineageState = this.getSparkRunOpenLineageState();
final java.lang.Object other$sparkRunOpenLineageState = other.getSparkRunOpenLineageState();
if (this$sparkRunOpenLineageState == null ? other$sparkRunOpenLineageState != null : !this$sparkRunOpenLineageState.equals(other$sparkRunOpenLineageState)) return false;
final java.lang.Object this$sparkRunOpenLineageVersion = this.getSparkRunOpenLineageVersion();
final java.lang.Object other$sparkRunOpenLineageVersion = other.getSparkRunOpenLineageVersion();
if (this$sparkRunOpenLineageVersion == null ? other$sparkRunOpenLineageVersion != null : !this$sparkRunOpenLineageVersion.equals(other$sparkRunOpenLineageVersion)) return false;
final java.lang.Object this$sparkRunVersion = this.getSparkRunVersion();
final java.lang.Object other$sparkRunVersion = other.getSparkRunVersion();
if (this$sparkRunVersion == null ? other$sparkRunVersion != null : !this$sparkRunVersion.equals(other$sparkRunVersion)) return false;
return true;
}
@java.lang.SuppressWarnings("all")
@lombok.Generated
protected boolean canEqual(final java.lang.Object other) {
return other instanceof SparkJob;
}
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public int hashCode() {
final int PRIME = 59;
int result = super.hashCode();
final java.lang.Object $sparkRunEndTime = this.getSparkRunEndTime();
result = result * PRIME + ($sparkRunEndTime == null ? 43 : $sparkRunEndTime.hashCode());
final java.lang.Object $sparkRunStartTime = this.getSparkRunStartTime();
result = result * PRIME + ($sparkRunStartTime == null ? 43 : $sparkRunStartTime.hashCode());
final java.lang.Object $typeName = this.getTypeName();
result = result * PRIME + ($typeName == null ? 43 : $typeName.hashCode());
final java.lang.Object $inputToAirflowTasks = this.getInputToAirflowTasks();
result = result * PRIME + ($inputToAirflowTasks == null ? 43 : $inputToAirflowTasks.hashCode());
final java.lang.Object $inputToProcesses = this.getInputToProcesses();
result = result * PRIME + ($inputToProcesses == null ? 43 : $inputToProcesses.hashCode());
final java.lang.Object $inputToSparkJobs = this.getInputToSparkJobs();
result = result * PRIME + ($inputToSparkJobs == null ? 43 : $inputToSparkJobs.hashCode());
final java.lang.Object $inputs = this.getInputs();
result = result * PRIME + ($inputs == null ? 43 : $inputs.hashCode());
final java.lang.Object $modelImplementedEntities = this.getModelImplementedEntities();
result = result * PRIME + ($modelImplementedEntities == null ? 43 : $modelImplementedEntities.hashCode());
final java.lang.Object $outputFromAirflowTasks = this.getOutputFromAirflowTasks();
result = result * PRIME + ($outputFromAirflowTasks == null ? 43 : $outputFromAirflowTasks.hashCode());
final java.lang.Object $outputFromProcesses = this.getOutputFromProcesses();
result = result * PRIME + ($outputFromProcesses == null ? 43 : $outputFromProcesses.hashCode());
final java.lang.Object $outputFromSparkJobs = this.getOutputFromSparkJobs();
result = result * PRIME + ($outputFromSparkJobs == null ? 43 : $outputFromSparkJobs.hashCode());
final java.lang.Object $outputs = this.getOutputs();
result = result * PRIME + ($outputs == null ? 43 : $outputs.hashCode());
final java.lang.Object $process = this.getProcess();
result = result * PRIME + ($process == null ? 43 : $process.hashCode());
final java.lang.Object $sparkAppName = this.getSparkAppName();
result = result * PRIME + ($sparkAppName == null ? 43 : $sparkAppName.hashCode());
final java.lang.Object $sparkMaster = this.getSparkMaster();
result = result * PRIME + ($sparkMaster == null ? 43 : $sparkMaster.hashCode());
final java.lang.Object $sparkRunOpenLineageState = this.getSparkRunOpenLineageState();
result = result * PRIME + ($sparkRunOpenLineageState == null ? 43 : $sparkRunOpenLineageState.hashCode());
final java.lang.Object $sparkRunOpenLineageVersion = this.getSparkRunOpenLineageVersion();
result = result * PRIME + ($sparkRunOpenLineageVersion == null ? 43 : $sparkRunOpenLineageVersion.hashCode());
final java.lang.Object $sparkRunVersion = this.getSparkRunVersion();
result = result * PRIME + ($sparkRunVersion == null ? 43 : $sparkRunVersion.hashCode());
return result;
}
@java.lang.Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public java.lang.String toString() {
return "SparkJob(super=" + super.toString() + ", typeName=" + this.getTypeName() + ", inputToAirflowTasks=" + this.getInputToAirflowTasks() + ", inputToProcesses=" + this.getInputToProcesses() + ", inputToSparkJobs=" + this.getInputToSparkJobs() + ", inputs=" + this.getInputs() + ", modelImplementedEntities=" + this.getModelImplementedEntities() + ", outputFromAirflowTasks=" + this.getOutputFromAirflowTasks() + ", outputFromProcesses=" + this.getOutputFromProcesses() + ", outputFromSparkJobs=" + this.getOutputFromSparkJobs() + ", outputs=" + this.getOutputs() + ", process=" + this.getProcess() + ", sparkAppName=" + this.getSparkAppName() + ", sparkMaster=" + this.getSparkMaster() + ", sparkRunEndTime=" + this.getSparkRunEndTime() + ", sparkRunOpenLineageState=" + this.getSparkRunOpenLineageState() + ", sparkRunOpenLineageVersion=" + this.getSparkRunOpenLineageVersion() + ", sparkRunStartTime=" + this.getSparkRunStartTime() + ", sparkRunVersion=" + this.getSparkRunVersion() + ")";
}
/**
* Fixed typeName for SparkJobs.
*/
@Override
@java.lang.SuppressWarnings("all")
@lombok.Generated
public String getTypeName() {
return this.typeName;
}
}