![JAR search and dependency download from the Maven repository](/logo.png)
com.google.cloud.dataproc.v1.OrderedJob Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-dataproc-v1 Show documentation
Show all versions of proto-google-cloud-dataproc-v1 Show documentation
PROTO library for proto-google-cloud-dataproc-v1
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataproc/v1/workflow_templates.proto
// Protobuf Java Version: 3.25.5
package com.google.cloud.dataproc.v1;
/**
*
*
*
* A job executed by the workflow.
*
*
* Protobuf type {@code google.cloud.dataproc.v1.OrderedJob}
*/
public final class OrderedJob extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.OrderedJob)
OrderedJobOrBuilder {
private static final long serialVersionUID = 0L;
// Use OrderedJob.newBuilder() to construct.
private OrderedJob(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private OrderedJob() {
stepId_ = "";
prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OrderedJob();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 8:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.OrderedJob.class,
com.google.cloud.dataproc.v1.OrderedJob.Builder.class);
}
private int bitField0_;
private int jobTypeCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object jobType_;
public enum JobTypeCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
HADOOP_JOB(2),
SPARK_JOB(3),
PYSPARK_JOB(4),
HIVE_JOB(5),
PIG_JOB(6),
SPARK_R_JOB(11),
SPARK_SQL_JOB(7),
PRESTO_JOB(12),
TRINO_JOB(13),
FLINK_JOB(14),
JOBTYPE_NOT_SET(0);
private final int value;
private JobTypeCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static JobTypeCase valueOf(int value) {
return forNumber(value);
}
public static JobTypeCase forNumber(int value) {
switch (value) {
case 2:
return HADOOP_JOB;
case 3:
return SPARK_JOB;
case 4:
return PYSPARK_JOB;
case 5:
return HIVE_JOB;
case 6:
return PIG_JOB;
case 11:
return SPARK_R_JOB;
case 7:
return SPARK_SQL_JOB;
case 12:
return PRESTO_JOB;
case 13:
return TRINO_JOB;
case 14:
return FLINK_JOB;
case 0:
return JOBTYPE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public JobTypeCase getJobTypeCase() {
return JobTypeCase.forNumber(jobTypeCase_);
}
public static final int STEP_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object stepId_ = "";
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return The stepId.
*/
@java.lang.Override
public java.lang.String getStepId() {
java.lang.Object ref = stepId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stepId_ = s;
return s;
}
}
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for stepId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStepIdBytes() {
java.lang.Object ref = stepId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
stepId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int HADOOP_JOB_FIELD_NUMBER = 2;
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the hadoopJob field is set.
*/
@java.lang.Override
public boolean hasHadoopJob() {
return jobTypeCase_ == 2;
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The hadoopJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() {
if (jobTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
}
return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() {
if (jobTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
}
return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
}
public static final int SPARK_JOB_FIELD_NUMBER = 3;
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the sparkJob field is set.
*/
@java.lang.Override
public boolean hasSparkJob() {
return jobTypeCase_ == 3;
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The sparkJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkJob getSparkJob() {
if (jobTypeCase_ == 3) {
return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() {
if (jobTypeCase_ == 3) {
return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
}
public static final int PYSPARK_JOB_FIELD_NUMBER = 4;
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the pysparkJob field is set.
*/
@java.lang.Override
public boolean hasPysparkJob() {
return jobTypeCase_ == 4;
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The pysparkJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() {
if (jobTypeCase_ == 4) {
return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() {
if (jobTypeCase_ == 4) {
return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
}
public static final int HIVE_JOB_FIELD_NUMBER = 5;
/**
*
*
*
* Optional. Job is a Hive job.
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the hiveJob field is set.
*/
@java.lang.Override
public boolean hasHiveJob() {
return jobTypeCase_ == 5;
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The hiveJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HiveJob getHiveJob() {
if (jobTypeCase_ == 5) {
return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
}
return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() {
if (jobTypeCase_ == 5) {
return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
}
return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
}
public static final int PIG_JOB_FIELD_NUMBER = 6;
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the pigJob field is set.
*/
@java.lang.Override
public boolean hasPigJob() {
return jobTypeCase_ == 6;
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The pigJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PigJob getPigJob() {
if (jobTypeCase_ == 6) {
return (com.google.cloud.dataproc.v1.PigJob) jobType_;
}
return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() {
if (jobTypeCase_ == 6) {
return (com.google.cloud.dataproc.v1.PigJob) jobType_;
}
return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
}
public static final int SPARK_R_JOB_FIELD_NUMBER = 11;
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the sparkRJob field is set.
*/
@java.lang.Override
public boolean hasSparkRJob() {
return jobTypeCase_ == 11;
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The sparkRJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkRJob getSparkRJob() {
if (jobTypeCase_ == 11) {
return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkRJobOrBuilder getSparkRJobOrBuilder() {
if (jobTypeCase_ == 11) {
return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
}
public static final int SPARK_SQL_JOB_FIELD_NUMBER = 7;
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the sparkSqlJob field is set.
*/
@java.lang.Override
public boolean hasSparkSqlJob() {
return jobTypeCase_ == 7;
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The sparkSqlJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() {
if (jobTypeCase_ == 7) {
return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() {
if (jobTypeCase_ == 7) {
return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
}
public static final int PRESTO_JOB_FIELD_NUMBER = 12;
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the prestoJob field is set.
*/
@java.lang.Override
public boolean hasPrestoJob() {
return jobTypeCase_ == 12;
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The prestoJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PrestoJob getPrestoJob() {
if (jobTypeCase_ == 12) {
return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
}
return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PrestoJobOrBuilder getPrestoJobOrBuilder() {
if (jobTypeCase_ == 12) {
return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
}
return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
}
public static final int TRINO_JOB_FIELD_NUMBER = 13;
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the trinoJob field is set.
*/
@java.lang.Override
public boolean hasTrinoJob() {
return jobTypeCase_ == 13;
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The trinoJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.TrinoJob getTrinoJob() {
if (jobTypeCase_ == 13) {
return (com.google.cloud.dataproc.v1.TrinoJob) jobType_;
}
return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.TrinoJobOrBuilder getTrinoJobOrBuilder() {
if (jobTypeCase_ == 13) {
return (com.google.cloud.dataproc.v1.TrinoJob) jobType_;
}
return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
}
public static final int FLINK_JOB_FIELD_NUMBER = 14;
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the flinkJob field is set.
*/
@java.lang.Override
public boolean hasFlinkJob() {
return jobTypeCase_ == 14;
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The flinkJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.FlinkJob getFlinkJob() {
if (jobTypeCase_ == 14) {
return (com.google.cloud.dataproc.v1.FlinkJob) jobType_;
}
return com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance();
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.FlinkJobOrBuilder getFlinkJobOrBuilder() {
if (jobTypeCase_ == 14) {
return (com.google.cloud.dataproc.v1.FlinkJob) jobType_;
}
return com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance();
}
public static final int LABELS_FIELD_NUMBER = 8;
private static final class LabelsDefaultEntryHolder {
static final com.google.protobuf.MapEntry defaultEntry =
com.google.protobuf.MapEntry.newDefaultInstance(
com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_OrderedJob_LabelsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField labels_;
private com.google.protobuf.MapField internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map getLabels() {
return getLabelsMap();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public java.util.Map getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public static final int SCHEDULING_FIELD_NUMBER = 9;
private com.google.cloud.dataproc.v1.JobScheduling scheduling_;
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the scheduling field is set.
*/
@java.lang.Override
public boolean hasScheduling() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The scheduling.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.JobScheduling getScheduling() {
return scheduling_ == null
? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
: scheduling_;
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() {
return scheduling_ == null
? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
: scheduling_;
}
public static final int PREREQUISITE_STEP_IDS_FIELD_NUMBER = 10;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList prerequisiteStepIds_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return A list containing the prerequisiteStepIds.
*/
public com.google.protobuf.ProtocolStringList getPrerequisiteStepIdsList() {
return prerequisiteStepIds_;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The count of prerequisiteStepIds.
*/
public int getPrerequisiteStepIdsCount() {
return prerequisiteStepIds_.size();
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param index The index of the element to return.
* @return The prerequisiteStepIds at the given index.
*/
public java.lang.String getPrerequisiteStepIds(int index) {
return prerequisiteStepIds_.get(index);
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param index The index of the value to return.
* @return The bytes of the prerequisiteStepIds at the given index.
*/
public com.google.protobuf.ByteString getPrerequisiteStepIdsBytes(int index) {
return prerequisiteStepIds_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stepId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, stepId_);
}
if (jobTypeCase_ == 2) {
output.writeMessage(2, (com.google.cloud.dataproc.v1.HadoopJob) jobType_);
}
if (jobTypeCase_ == 3) {
output.writeMessage(3, (com.google.cloud.dataproc.v1.SparkJob) jobType_);
}
if (jobTypeCase_ == 4) {
output.writeMessage(4, (com.google.cloud.dataproc.v1.PySparkJob) jobType_);
}
if (jobTypeCase_ == 5) {
output.writeMessage(5, (com.google.cloud.dataproc.v1.HiveJob) jobType_);
}
if (jobTypeCase_ == 6) {
output.writeMessage(6, (com.google.cloud.dataproc.v1.PigJob) jobType_);
}
if (jobTypeCase_ == 7) {
output.writeMessage(7, (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 8);
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(9, getScheduling());
}
for (int i = 0; i < prerequisiteStepIds_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(
output, 10, prerequisiteStepIds_.getRaw(i));
}
if (jobTypeCase_ == 11) {
output.writeMessage(11, (com.google.cloud.dataproc.v1.SparkRJob) jobType_);
}
if (jobTypeCase_ == 12) {
output.writeMessage(12, (com.google.cloud.dataproc.v1.PrestoJob) jobType_);
}
if (jobTypeCase_ == 13) {
output.writeMessage(13, (com.google.cloud.dataproc.v1.TrinoJob) jobType_);
}
if (jobTypeCase_ == 14) {
output.writeMessage(14, (com.google.cloud.dataproc.v1.FlinkJob) jobType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stepId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, stepId_);
}
if (jobTypeCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.dataproc.v1.HadoopJob) jobType_);
}
if (jobTypeCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
3, (com.google.cloud.dataproc.v1.SparkJob) jobType_);
}
if (jobTypeCase_ == 4) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
4, (com.google.cloud.dataproc.v1.PySparkJob) jobType_);
}
if (jobTypeCase_ == 5) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
5, (com.google.cloud.dataproc.v1.HiveJob) jobType_);
}
if (jobTypeCase_ == 6) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
6, (com.google.cloud.dataproc.v1.PigJob) jobType_);
}
if (jobTypeCase_ == 7) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
7, (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_);
}
for (java.util.Map.Entry entry :
internalGetLabels().getMap().entrySet()) {
com.google.protobuf.MapEntry labels__ =
LabelsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, labels__);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, getScheduling());
}
{
int dataSize = 0;
for (int i = 0; i < prerequisiteStepIds_.size(); i++) {
dataSize += computeStringSizeNoTag(prerequisiteStepIds_.getRaw(i));
}
size += dataSize;
size += 1 * getPrerequisiteStepIdsList().size();
}
if (jobTypeCase_ == 11) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
11, (com.google.cloud.dataproc.v1.SparkRJob) jobType_);
}
if (jobTypeCase_ == 12) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
12, (com.google.cloud.dataproc.v1.PrestoJob) jobType_);
}
if (jobTypeCase_ == 13) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
13, (com.google.cloud.dataproc.v1.TrinoJob) jobType_);
}
if (jobTypeCase_ == 14) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
14, (com.google.cloud.dataproc.v1.FlinkJob) jobType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataproc.v1.OrderedJob)) {
return super.equals(obj);
}
com.google.cloud.dataproc.v1.OrderedJob other = (com.google.cloud.dataproc.v1.OrderedJob) obj;
if (!getStepId().equals(other.getStepId())) return false;
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
if (hasScheduling() != other.hasScheduling()) return false;
if (hasScheduling()) {
if (!getScheduling().equals(other.getScheduling())) return false;
}
if (!getPrerequisiteStepIdsList().equals(other.getPrerequisiteStepIdsList())) return false;
if (!getJobTypeCase().equals(other.getJobTypeCase())) return false;
switch (jobTypeCase_) {
case 2:
if (!getHadoopJob().equals(other.getHadoopJob())) return false;
break;
case 3:
if (!getSparkJob().equals(other.getSparkJob())) return false;
break;
case 4:
if (!getPysparkJob().equals(other.getPysparkJob())) return false;
break;
case 5:
if (!getHiveJob().equals(other.getHiveJob())) return false;
break;
case 6:
if (!getPigJob().equals(other.getPigJob())) return false;
break;
case 11:
if (!getSparkRJob().equals(other.getSparkRJob())) return false;
break;
case 7:
if (!getSparkSqlJob().equals(other.getSparkSqlJob())) return false;
break;
case 12:
if (!getPrestoJob().equals(other.getPrestoJob())) return false;
break;
case 13:
if (!getTrinoJob().equals(other.getTrinoJob())) return false;
break;
case 14:
if (!getFlinkJob().equals(other.getFlinkJob())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STEP_ID_FIELD_NUMBER;
hash = (53 * hash) + getStepId().hashCode();
if (!internalGetLabels().getMap().isEmpty()) {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
if (hasScheduling()) {
hash = (37 * hash) + SCHEDULING_FIELD_NUMBER;
hash = (53 * hash) + getScheduling().hashCode();
}
if (getPrerequisiteStepIdsCount() > 0) {
hash = (37 * hash) + PREREQUISITE_STEP_IDS_FIELD_NUMBER;
hash = (53 * hash) + getPrerequisiteStepIdsList().hashCode();
}
switch (jobTypeCase_) {
case 2:
hash = (37 * hash) + HADOOP_JOB_FIELD_NUMBER;
hash = (53 * hash) + getHadoopJob().hashCode();
break;
case 3:
hash = (37 * hash) + SPARK_JOB_FIELD_NUMBER;
hash = (53 * hash) + getSparkJob().hashCode();
break;
case 4:
hash = (37 * hash) + PYSPARK_JOB_FIELD_NUMBER;
hash = (53 * hash) + getPysparkJob().hashCode();
break;
case 5:
hash = (37 * hash) + HIVE_JOB_FIELD_NUMBER;
hash = (53 * hash) + getHiveJob().hashCode();
break;
case 6:
hash = (37 * hash) + PIG_JOB_FIELD_NUMBER;
hash = (53 * hash) + getPigJob().hashCode();
break;
case 11:
hash = (37 * hash) + SPARK_R_JOB_FIELD_NUMBER;
hash = (53 * hash) + getSparkRJob().hashCode();
break;
case 7:
hash = (37 * hash) + SPARK_SQL_JOB_FIELD_NUMBER;
hash = (53 * hash) + getSparkSqlJob().hashCode();
break;
case 12:
hash = (37 * hash) + PRESTO_JOB_FIELD_NUMBER;
hash = (53 * hash) + getPrestoJob().hashCode();
break;
case 13:
hash = (37 * hash) + TRINO_JOB_FIELD_NUMBER;
hash = (53 * hash) + getTrinoJob().hashCode();
break;
case 14:
hash = (37 * hash) + FLINK_JOB_FIELD_NUMBER;
hash = (53 * hash) + getFlinkJob().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataproc.v1.OrderedJob prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* A job executed by the workflow.
*
*
* Protobuf type {@code google.cloud.dataproc.v1.OrderedJob}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.OrderedJob)
com.google.cloud.dataproc.v1.OrderedJobOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 8:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 8:
return internalGetMutableLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.OrderedJob.class,
com.google.cloud.dataproc.v1.OrderedJob.Builder.class);
}
// Construct using com.google.cloud.dataproc.v1.OrderedJob.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSchedulingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
stepId_ = "";
if (hadoopJobBuilder_ != null) {
hadoopJobBuilder_.clear();
}
if (sparkJobBuilder_ != null) {
sparkJobBuilder_.clear();
}
if (pysparkJobBuilder_ != null) {
pysparkJobBuilder_.clear();
}
if (hiveJobBuilder_ != null) {
hiveJobBuilder_.clear();
}
if (pigJobBuilder_ != null) {
pigJobBuilder_.clear();
}
if (sparkRJobBuilder_ != null) {
sparkRJobBuilder_.clear();
}
if (sparkSqlJobBuilder_ != null) {
sparkSqlJobBuilder_.clear();
}
if (prestoJobBuilder_ != null) {
prestoJobBuilder_.clear();
}
if (trinoJobBuilder_ != null) {
trinoJobBuilder_.clear();
}
if (flinkJobBuilder_ != null) {
flinkJobBuilder_.clear();
}
internalGetMutableLabels().clear();
scheduling_ = null;
if (schedulingBuilder_ != null) {
schedulingBuilder_.dispose();
schedulingBuilder_ = null;
}
prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
jobTypeCase_ = 0;
jobType_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.OrderedJob getDefaultInstanceForType() {
return com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataproc.v1.OrderedJob build() {
com.google.cloud.dataproc.v1.OrderedJob result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.OrderedJob buildPartial() {
com.google.cloud.dataproc.v1.OrderedJob result =
new com.google.cloud.dataproc.v1.OrderedJob(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataproc.v1.OrderedJob result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.stepId_ = stepId_;
}
if (((from_bitField0_ & 0x00000800) != 0)) {
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00001000) != 0)) {
result.scheduling_ = schedulingBuilder_ == null ? scheduling_ : schedulingBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00002000) != 0)) {
prerequisiteStepIds_.makeImmutable();
result.prerequisiteStepIds_ = prerequisiteStepIds_;
}
result.bitField0_ |= to_bitField0_;
}
private void buildPartialOneofs(com.google.cloud.dataproc.v1.OrderedJob result) {
result.jobTypeCase_ = jobTypeCase_;
result.jobType_ = this.jobType_;
if (jobTypeCase_ == 2 && hadoopJobBuilder_ != null) {
result.jobType_ = hadoopJobBuilder_.build();
}
if (jobTypeCase_ == 3 && sparkJobBuilder_ != null) {
result.jobType_ = sparkJobBuilder_.build();
}
if (jobTypeCase_ == 4 && pysparkJobBuilder_ != null) {
result.jobType_ = pysparkJobBuilder_.build();
}
if (jobTypeCase_ == 5 && hiveJobBuilder_ != null) {
result.jobType_ = hiveJobBuilder_.build();
}
if (jobTypeCase_ == 6 && pigJobBuilder_ != null) {
result.jobType_ = pigJobBuilder_.build();
}
if (jobTypeCase_ == 11 && sparkRJobBuilder_ != null) {
result.jobType_ = sparkRJobBuilder_.build();
}
if (jobTypeCase_ == 7 && sparkSqlJobBuilder_ != null) {
result.jobType_ = sparkSqlJobBuilder_.build();
}
if (jobTypeCase_ == 12 && prestoJobBuilder_ != null) {
result.jobType_ = prestoJobBuilder_.build();
}
if (jobTypeCase_ == 13 && trinoJobBuilder_ != null) {
result.jobType_ = trinoJobBuilder_.build();
}
if (jobTypeCase_ == 14 && flinkJobBuilder_ != null) {
result.jobType_ = flinkJobBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataproc.v1.OrderedJob) {
return mergeFrom((com.google.cloud.dataproc.v1.OrderedJob) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataproc.v1.OrderedJob other) {
if (other == com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance()) return this;
if (!other.getStepId().isEmpty()) {
stepId_ = other.stepId_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
bitField0_ |= 0x00000800;
if (other.hasScheduling()) {
mergeScheduling(other.getScheduling());
}
if (!other.prerequisiteStepIds_.isEmpty()) {
if (prerequisiteStepIds_.isEmpty()) {
prerequisiteStepIds_ = other.prerequisiteStepIds_;
bitField0_ |= 0x00002000;
} else {
ensurePrerequisiteStepIdsIsMutable();
prerequisiteStepIds_.addAll(other.prerequisiteStepIds_);
}
onChanged();
}
switch (other.getJobTypeCase()) {
case HADOOP_JOB:
{
mergeHadoopJob(other.getHadoopJob());
break;
}
case SPARK_JOB:
{
mergeSparkJob(other.getSparkJob());
break;
}
case PYSPARK_JOB:
{
mergePysparkJob(other.getPysparkJob());
break;
}
case HIVE_JOB:
{
mergeHiveJob(other.getHiveJob());
break;
}
case PIG_JOB:
{
mergePigJob(other.getPigJob());
break;
}
case SPARK_R_JOB:
{
mergeSparkRJob(other.getSparkRJob());
break;
}
case SPARK_SQL_JOB:
{
mergeSparkSqlJob(other.getSparkSqlJob());
break;
}
case PRESTO_JOB:
{
mergePrestoJob(other.getPrestoJob());
break;
}
case TRINO_JOB:
{
mergeTrinoJob(other.getTrinoJob());
break;
}
case FLINK_JOB:
{
mergeFlinkJob(other.getFlinkJob());
break;
}
case JOBTYPE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
stepId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getHadoopJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 2;
break;
} // case 18
case 26:
{
input.readMessage(getSparkJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 3;
break;
} // case 26
case 34:
{
input.readMessage(getPysparkJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 4;
break;
} // case 34
case 42:
{
input.readMessage(getHiveJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 5;
break;
} // case 42
case 50:
{
input.readMessage(getPigJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 6;
break;
} // case 50
case 58:
{
input.readMessage(getSparkSqlJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 7;
break;
} // case 58
case 66:
{
com.google.protobuf.MapEntry labels__ =
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableLabels()
.getMutableMap()
.put(labels__.getKey(), labels__.getValue());
bitField0_ |= 0x00000800;
break;
} // case 66
case 74:
{
input.readMessage(getSchedulingFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00001000;
break;
} // case 74
case 82:
{
java.lang.String s = input.readStringRequireUtf8();
ensurePrerequisiteStepIdsIsMutable();
prerequisiteStepIds_.add(s);
break;
} // case 82
case 90:
{
input.readMessage(getSparkRJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 11;
break;
} // case 90
case 98:
{
input.readMessage(getPrestoJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 12;
break;
} // case 98
case 106:
{
input.readMessage(getTrinoJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 13;
break;
} // case 106
case 114:
{
input.readMessage(getFlinkJobFieldBuilder().getBuilder(), extensionRegistry);
jobTypeCase_ = 14;
break;
} // case 114
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int jobTypeCase_ = 0;
private java.lang.Object jobType_;
public JobTypeCase getJobTypeCase() {
return JobTypeCase.forNumber(jobTypeCase_);
}
public Builder clearJobType() {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
return this;
}
private int bitField0_;
private java.lang.Object stepId_ = "";
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return The stepId.
*/
public java.lang.String getStepId() {
java.lang.Object ref = stepId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stepId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for stepId.
*/
public com.google.protobuf.ByteString getStepIdBytes() {
java.lang.Object ref = stepId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
stepId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The stepId to set.
* @return This builder for chaining.
*/
public Builder setStepId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
stepId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
public Builder clearStepId() {
stepId_ = getDefaultInstance().getStepId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
*
* Required. The step id. The id must be unique among all jobs
* within the template.
*
* The step id is used as prefix for job id, as job
* `goog-dataproc-workflow-step-id` label, and in
* [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
* field from other steps.
*
* The id must contain only letters (a-z, A-Z), numbers (0-9),
* underscores (_), and hyphens (-). Cannot begin or end with underscore
* or hyphen. Must consist of between 3 and 50 characters.
*
*
* string step_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The bytes for stepId to set.
* @return This builder for chaining.
*/
public Builder setStepIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
stepId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.HadoopJob,
com.google.cloud.dataproc.v1.HadoopJob.Builder,
com.google.cloud.dataproc.v1.HadoopJobOrBuilder>
hadoopJobBuilder_;
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the hadoopJob field is set.
*/
@java.lang.Override
public boolean hasHadoopJob() {
return jobTypeCase_ == 2;
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The hadoopJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() {
if (hadoopJobBuilder_ == null) {
if (jobTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
}
return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 2) {
return hadoopJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) {
if (hadoopJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
hadoopJobBuilder_.setMessage(value);
}
jobTypeCase_ = 2;
return this;
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob.Builder builderForValue) {
if (hadoopJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
hadoopJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 2;
return this;
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) {
if (hadoopJobBuilder_ == null) {
if (jobTypeCase_ == 2
&& jobType_ != com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.HadoopJob.newBuilder(
(com.google.cloud.dataproc.v1.HadoopJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 2) {
hadoopJobBuilder_.mergeFrom(value);
} else {
hadoopJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 2;
return this;
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearHadoopJob() {
if (hadoopJobBuilder_ == null) {
if (jobTypeCase_ == 2) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 2) {
jobTypeCase_ = 0;
jobType_ = null;
}
hadoopJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.HadoopJob.Builder getHadoopJobBuilder() {
return getHadoopJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() {
if ((jobTypeCase_ == 2) && (hadoopJobBuilder_ != null)) {
return hadoopJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
}
return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Hadoop job.
*
*
*
* .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.HadoopJob,
com.google.cloud.dataproc.v1.HadoopJob.Builder,
com.google.cloud.dataproc.v1.HadoopJobOrBuilder>
getHadoopJobFieldBuilder() {
if (hadoopJobBuilder_ == null) {
if (!(jobTypeCase_ == 2)) {
jobType_ = com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
}
hadoopJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.HadoopJob,
com.google.cloud.dataproc.v1.HadoopJob.Builder,
com.google.cloud.dataproc.v1.HadoopJobOrBuilder>(
(com.google.cloud.dataproc.v1.HadoopJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 2;
onChanged();
return hadoopJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkJob,
com.google.cloud.dataproc.v1.SparkJob.Builder,
com.google.cloud.dataproc.v1.SparkJobOrBuilder>
sparkJobBuilder_;
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the sparkJob field is set.
*/
@java.lang.Override
public boolean hasSparkJob() {
return jobTypeCase_ == 3;
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The sparkJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkJob getSparkJob() {
if (sparkJobBuilder_ == null) {
if (jobTypeCase_ == 3) {
return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 3) {
return sparkJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob value) {
if (sparkJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
sparkJobBuilder_.setMessage(value);
}
jobTypeCase_ = 3;
return this;
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob.Builder builderForValue) {
if (sparkJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
sparkJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 3;
return this;
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeSparkJob(com.google.cloud.dataproc.v1.SparkJob value) {
if (sparkJobBuilder_ == null) {
if (jobTypeCase_ == 3
&& jobType_ != com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.SparkJob.newBuilder(
(com.google.cloud.dataproc.v1.SparkJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 3) {
sparkJobBuilder_.mergeFrom(value);
} else {
sparkJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 3;
return this;
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearSparkJob() {
if (sparkJobBuilder_ == null) {
if (jobTypeCase_ == 3) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 3) {
jobTypeCase_ = 0;
jobType_ = null;
}
sparkJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.SparkJob.Builder getSparkJobBuilder() {
return getSparkJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() {
if ((jobTypeCase_ == 3) && (sparkJobBuilder_ != null)) {
return sparkJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 3) {
return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Spark job.
*
*
*
* .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkJob,
com.google.cloud.dataproc.v1.SparkJob.Builder,
com.google.cloud.dataproc.v1.SparkJobOrBuilder>
getSparkJobFieldBuilder() {
if (sparkJobBuilder_ == null) {
if (!(jobTypeCase_ == 3)) {
jobType_ = com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
}
sparkJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkJob,
com.google.cloud.dataproc.v1.SparkJob.Builder,
com.google.cloud.dataproc.v1.SparkJobOrBuilder>(
(com.google.cloud.dataproc.v1.SparkJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 3;
onChanged();
return sparkJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PySparkJob,
com.google.cloud.dataproc.v1.PySparkJob.Builder,
com.google.cloud.dataproc.v1.PySparkJobOrBuilder>
pysparkJobBuilder_;
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the pysparkJob field is set.
*/
@java.lang.Override
public boolean hasPysparkJob() {
return jobTypeCase_ == 4;
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The pysparkJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() {
if (pysparkJobBuilder_ == null) {
if (jobTypeCase_ == 4) {
return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 4) {
return pysparkJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) {
if (pysparkJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
pysparkJobBuilder_.setMessage(value);
}
jobTypeCase_ = 4;
return this;
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob.Builder builderForValue) {
if (pysparkJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
pysparkJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 4;
return this;
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergePysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) {
if (pysparkJobBuilder_ == null) {
if (jobTypeCase_ == 4
&& jobType_ != com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.PySparkJob.newBuilder(
(com.google.cloud.dataproc.v1.PySparkJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 4) {
pysparkJobBuilder_.mergeFrom(value);
} else {
pysparkJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 4;
return this;
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearPysparkJob() {
if (pysparkJobBuilder_ == null) {
if (jobTypeCase_ == 4) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 4) {
jobTypeCase_ = 0;
jobType_ = null;
}
pysparkJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.PySparkJob.Builder getPysparkJobBuilder() {
return getPysparkJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() {
if ((jobTypeCase_ == 4) && (pysparkJobBuilder_ != null)) {
return pysparkJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 4) {
return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
}
return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a PySpark job.
*
*
*
* .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PySparkJob,
com.google.cloud.dataproc.v1.PySparkJob.Builder,
com.google.cloud.dataproc.v1.PySparkJobOrBuilder>
getPysparkJobFieldBuilder() {
if (pysparkJobBuilder_ == null) {
if (!(jobTypeCase_ == 4)) {
jobType_ = com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
}
pysparkJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PySparkJob,
com.google.cloud.dataproc.v1.PySparkJob.Builder,
com.google.cloud.dataproc.v1.PySparkJobOrBuilder>(
(com.google.cloud.dataproc.v1.PySparkJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 4;
onChanged();
return pysparkJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.HiveJob,
com.google.cloud.dataproc.v1.HiveJob.Builder,
com.google.cloud.dataproc.v1.HiveJobOrBuilder>
hiveJobBuilder_;
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the hiveJob field is set.
*/
@java.lang.Override
public boolean hasHiveJob() {
return jobTypeCase_ == 5;
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The hiveJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HiveJob getHiveJob() {
if (hiveJobBuilder_ == null) {
if (jobTypeCase_ == 5) {
return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
}
return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 5) {
return hiveJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob value) {
if (hiveJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
hiveJobBuilder_.setMessage(value);
}
jobTypeCase_ = 5;
return this;
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob.Builder builderForValue) {
if (hiveJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
hiveJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 5;
return this;
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeHiveJob(com.google.cloud.dataproc.v1.HiveJob value) {
if (hiveJobBuilder_ == null) {
if (jobTypeCase_ == 5
&& jobType_ != com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.HiveJob.newBuilder(
(com.google.cloud.dataproc.v1.HiveJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 5) {
hiveJobBuilder_.mergeFrom(value);
} else {
hiveJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 5;
return this;
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearHiveJob() {
if (hiveJobBuilder_ == null) {
if (jobTypeCase_ == 5) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 5) {
jobTypeCase_ = 0;
jobType_ = null;
}
hiveJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.HiveJob.Builder getHiveJobBuilder() {
return getHiveJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() {
if ((jobTypeCase_ == 5) && (hiveJobBuilder_ != null)) {
return hiveJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 5) {
return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
}
return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Hive job.
*
*
*
* .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.HiveJob,
com.google.cloud.dataproc.v1.HiveJob.Builder,
com.google.cloud.dataproc.v1.HiveJobOrBuilder>
getHiveJobFieldBuilder() {
if (hiveJobBuilder_ == null) {
if (!(jobTypeCase_ == 5)) {
jobType_ = com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
}
hiveJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.HiveJob,
com.google.cloud.dataproc.v1.HiveJob.Builder,
com.google.cloud.dataproc.v1.HiveJobOrBuilder>(
(com.google.cloud.dataproc.v1.HiveJob) jobType_, getParentForChildren(), isClean());
jobType_ = null;
}
jobTypeCase_ = 5;
onChanged();
return hiveJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PigJob,
com.google.cloud.dataproc.v1.PigJob.Builder,
com.google.cloud.dataproc.v1.PigJobOrBuilder>
pigJobBuilder_;
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the pigJob field is set.
*/
@java.lang.Override
public boolean hasPigJob() {
return jobTypeCase_ == 6;
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The pigJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PigJob getPigJob() {
if (pigJobBuilder_ == null) {
if (jobTypeCase_ == 6) {
return (com.google.cloud.dataproc.v1.PigJob) jobType_;
}
return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 6) {
return pigJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob value) {
if (pigJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
pigJobBuilder_.setMessage(value);
}
jobTypeCase_ = 6;
return this;
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob.Builder builderForValue) {
if (pigJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
pigJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 6;
return this;
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergePigJob(com.google.cloud.dataproc.v1.PigJob value) {
if (pigJobBuilder_ == null) {
if (jobTypeCase_ == 6
&& jobType_ != com.google.cloud.dataproc.v1.PigJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.PigJob.newBuilder(
(com.google.cloud.dataproc.v1.PigJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 6) {
pigJobBuilder_.mergeFrom(value);
} else {
pigJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 6;
return this;
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearPigJob() {
if (pigJobBuilder_ == null) {
if (jobTypeCase_ == 6) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 6) {
jobTypeCase_ = 0;
jobType_ = null;
}
pigJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.PigJob.Builder getPigJobBuilder() {
return getPigJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() {
if ((jobTypeCase_ == 6) && (pigJobBuilder_ != null)) {
return pigJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 6) {
return (com.google.cloud.dataproc.v1.PigJob) jobType_;
}
return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Pig job.
*
*
* .google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PigJob,
com.google.cloud.dataproc.v1.PigJob.Builder,
com.google.cloud.dataproc.v1.PigJobOrBuilder>
getPigJobFieldBuilder() {
if (pigJobBuilder_ == null) {
if (!(jobTypeCase_ == 6)) {
jobType_ = com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
}
pigJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PigJob,
com.google.cloud.dataproc.v1.PigJob.Builder,
com.google.cloud.dataproc.v1.PigJobOrBuilder>(
(com.google.cloud.dataproc.v1.PigJob) jobType_, getParentForChildren(), isClean());
jobType_ = null;
}
jobTypeCase_ = 6;
onChanged();
return pigJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkRJob,
com.google.cloud.dataproc.v1.SparkRJob.Builder,
com.google.cloud.dataproc.v1.SparkRJobOrBuilder>
sparkRJobBuilder_;
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the sparkRJob field is set.
*/
@java.lang.Override
public boolean hasSparkRJob() {
return jobTypeCase_ == 11;
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The sparkRJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkRJob getSparkRJob() {
if (sparkRJobBuilder_ == null) {
if (jobTypeCase_ == 11) {
return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 11) {
return sparkRJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value) {
if (sparkRJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
sparkRJobBuilder_.setMessage(value);
}
jobTypeCase_ = 11;
return this;
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob.Builder builderForValue) {
if (sparkRJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
sparkRJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 11;
return this;
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value) {
if (sparkRJobBuilder_ == null) {
if (jobTypeCase_ == 11
&& jobType_ != com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.SparkRJob.newBuilder(
(com.google.cloud.dataproc.v1.SparkRJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 11) {
sparkRJobBuilder_.mergeFrom(value);
} else {
sparkRJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 11;
return this;
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearSparkRJob() {
if (sparkRJobBuilder_ == null) {
if (jobTypeCase_ == 11) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 11) {
jobTypeCase_ = 0;
jobType_ = null;
}
sparkRJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.SparkRJob.Builder getSparkRJobBuilder() {
return getSparkRJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkRJobOrBuilder getSparkRJobOrBuilder() {
if ((jobTypeCase_ == 11) && (sparkRJobBuilder_ != null)) {
return sparkRJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 11) {
return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a SparkR job.
*
*
*
* .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkRJob,
com.google.cloud.dataproc.v1.SparkRJob.Builder,
com.google.cloud.dataproc.v1.SparkRJobOrBuilder>
getSparkRJobFieldBuilder() {
if (sparkRJobBuilder_ == null) {
if (!(jobTypeCase_ == 11)) {
jobType_ = com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
}
sparkRJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkRJob,
com.google.cloud.dataproc.v1.SparkRJob.Builder,
com.google.cloud.dataproc.v1.SparkRJobOrBuilder>(
(com.google.cloud.dataproc.v1.SparkRJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 11;
onChanged();
return sparkRJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkSqlJob,
com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>
sparkSqlJobBuilder_;
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the sparkSqlJob field is set.
*/
@java.lang.Override
public boolean hasSparkSqlJob() {
return jobTypeCase_ == 7;
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The sparkSqlJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() {
if (sparkSqlJobBuilder_ == null) {
if (jobTypeCase_ == 7) {
return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 7) {
return sparkSqlJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) {
if (sparkSqlJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
sparkSqlJobBuilder_.setMessage(value);
}
jobTypeCase_ = 7;
return this;
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setSparkSqlJob(
com.google.cloud.dataproc.v1.SparkSqlJob.Builder builderForValue) {
if (sparkSqlJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
sparkSqlJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 7;
return this;
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) {
if (sparkSqlJobBuilder_ == null) {
if (jobTypeCase_ == 7
&& jobType_ != com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.SparkSqlJob.newBuilder(
(com.google.cloud.dataproc.v1.SparkSqlJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 7) {
sparkSqlJobBuilder_.mergeFrom(value);
} else {
sparkSqlJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 7;
return this;
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearSparkSqlJob() {
if (sparkSqlJobBuilder_ == null) {
if (jobTypeCase_ == 7) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 7) {
jobTypeCase_ = 0;
jobType_ = null;
}
sparkSqlJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.SparkSqlJob.Builder getSparkSqlJobBuilder() {
return getSparkSqlJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() {
if ((jobTypeCase_ == 7) && (sparkSqlJobBuilder_ != null)) {
return sparkSqlJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 7) {
return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
}
return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a SparkSql job.
*
*
*
* .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkSqlJob,
com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>
getSparkSqlJobFieldBuilder() {
if (sparkSqlJobBuilder_ == null) {
if (!(jobTypeCase_ == 7)) {
jobType_ = com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
}
sparkSqlJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.SparkSqlJob,
com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>(
(com.google.cloud.dataproc.v1.SparkSqlJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 7;
onChanged();
return sparkSqlJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PrestoJob,
com.google.cloud.dataproc.v1.PrestoJob.Builder,
com.google.cloud.dataproc.v1.PrestoJobOrBuilder>
prestoJobBuilder_;
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the prestoJob field is set.
*/
@java.lang.Override
public boolean hasPrestoJob() {
return jobTypeCase_ == 12;
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The prestoJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PrestoJob getPrestoJob() {
if (prestoJobBuilder_ == null) {
if (jobTypeCase_ == 12) {
return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
}
return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 12) {
return prestoJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob value) {
if (prestoJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
prestoJobBuilder_.setMessage(value);
}
jobTypeCase_ = 12;
return this;
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob.Builder builderForValue) {
if (prestoJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
prestoJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 12;
return this;
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergePrestoJob(com.google.cloud.dataproc.v1.PrestoJob value) {
if (prestoJobBuilder_ == null) {
if (jobTypeCase_ == 12
&& jobType_ != com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.PrestoJob.newBuilder(
(com.google.cloud.dataproc.v1.PrestoJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 12) {
prestoJobBuilder_.mergeFrom(value);
} else {
prestoJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 12;
return this;
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearPrestoJob() {
if (prestoJobBuilder_ == null) {
if (jobTypeCase_ == 12) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 12) {
jobTypeCase_ = 0;
jobType_ = null;
}
prestoJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.PrestoJob.Builder getPrestoJobBuilder() {
return getPrestoJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.PrestoJobOrBuilder getPrestoJobOrBuilder() {
if ((jobTypeCase_ == 12) && (prestoJobBuilder_ != null)) {
return prestoJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 12) {
return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
}
return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Presto job.
*
*
*
* .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PrestoJob,
com.google.cloud.dataproc.v1.PrestoJob.Builder,
com.google.cloud.dataproc.v1.PrestoJobOrBuilder>
getPrestoJobFieldBuilder() {
if (prestoJobBuilder_ == null) {
if (!(jobTypeCase_ == 12)) {
jobType_ = com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
}
prestoJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.PrestoJob,
com.google.cloud.dataproc.v1.PrestoJob.Builder,
com.google.cloud.dataproc.v1.PrestoJobOrBuilder>(
(com.google.cloud.dataproc.v1.PrestoJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 12;
onChanged();
return prestoJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.TrinoJob,
com.google.cloud.dataproc.v1.TrinoJob.Builder,
com.google.cloud.dataproc.v1.TrinoJobOrBuilder>
trinoJobBuilder_;
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the trinoJob field is set.
*/
@java.lang.Override
public boolean hasTrinoJob() {
return jobTypeCase_ == 13;
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The trinoJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.TrinoJob getTrinoJob() {
if (trinoJobBuilder_ == null) {
if (jobTypeCase_ == 13) {
return (com.google.cloud.dataproc.v1.TrinoJob) jobType_;
}
return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 13) {
return trinoJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setTrinoJob(com.google.cloud.dataproc.v1.TrinoJob value) {
if (trinoJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
trinoJobBuilder_.setMessage(value);
}
jobTypeCase_ = 13;
return this;
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setTrinoJob(com.google.cloud.dataproc.v1.TrinoJob.Builder builderForValue) {
if (trinoJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
trinoJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 13;
return this;
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeTrinoJob(com.google.cloud.dataproc.v1.TrinoJob value) {
if (trinoJobBuilder_ == null) {
if (jobTypeCase_ == 13
&& jobType_ != com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.TrinoJob.newBuilder(
(com.google.cloud.dataproc.v1.TrinoJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 13) {
trinoJobBuilder_.mergeFrom(value);
} else {
trinoJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 13;
return this;
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearTrinoJob() {
if (trinoJobBuilder_ == null) {
if (jobTypeCase_ == 13) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 13) {
jobTypeCase_ = 0;
jobType_ = null;
}
trinoJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.TrinoJob.Builder getTrinoJobBuilder() {
return getTrinoJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.TrinoJobOrBuilder getTrinoJobOrBuilder() {
if ((jobTypeCase_ == 13) && (trinoJobBuilder_ != null)) {
return trinoJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 13) {
return (com.google.cloud.dataproc.v1.TrinoJob) jobType_;
}
return com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Trino job.
*
*
*
* .google.cloud.dataproc.v1.TrinoJob trino_job = 13 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.TrinoJob,
com.google.cloud.dataproc.v1.TrinoJob.Builder,
com.google.cloud.dataproc.v1.TrinoJobOrBuilder>
getTrinoJobFieldBuilder() {
if (trinoJobBuilder_ == null) {
if (!(jobTypeCase_ == 13)) {
jobType_ = com.google.cloud.dataproc.v1.TrinoJob.getDefaultInstance();
}
trinoJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.TrinoJob,
com.google.cloud.dataproc.v1.TrinoJob.Builder,
com.google.cloud.dataproc.v1.TrinoJobOrBuilder>(
(com.google.cloud.dataproc.v1.TrinoJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 13;
onChanged();
return trinoJobBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.FlinkJob,
com.google.cloud.dataproc.v1.FlinkJob.Builder,
com.google.cloud.dataproc.v1.FlinkJobOrBuilder>
flinkJobBuilder_;
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the flinkJob field is set.
*/
@java.lang.Override
public boolean hasFlinkJob() {
return jobTypeCase_ == 14;
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The flinkJob.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.FlinkJob getFlinkJob() {
if (flinkJobBuilder_ == null) {
if (jobTypeCase_ == 14) {
return (com.google.cloud.dataproc.v1.FlinkJob) jobType_;
}
return com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance();
} else {
if (jobTypeCase_ == 14) {
return flinkJobBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setFlinkJob(com.google.cloud.dataproc.v1.FlinkJob value) {
if (flinkJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
jobType_ = value;
onChanged();
} else {
flinkJobBuilder_.setMessage(value);
}
jobTypeCase_ = 14;
return this;
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setFlinkJob(com.google.cloud.dataproc.v1.FlinkJob.Builder builderForValue) {
if (flinkJobBuilder_ == null) {
jobType_ = builderForValue.build();
onChanged();
} else {
flinkJobBuilder_.setMessage(builderForValue.build());
}
jobTypeCase_ = 14;
return this;
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeFlinkJob(com.google.cloud.dataproc.v1.FlinkJob value) {
if (flinkJobBuilder_ == null) {
if (jobTypeCase_ == 14
&& jobType_ != com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance()) {
jobType_ =
com.google.cloud.dataproc.v1.FlinkJob.newBuilder(
(com.google.cloud.dataproc.v1.FlinkJob) jobType_)
.mergeFrom(value)
.buildPartial();
} else {
jobType_ = value;
}
onChanged();
} else {
if (jobTypeCase_ == 14) {
flinkJobBuilder_.mergeFrom(value);
} else {
flinkJobBuilder_.setMessage(value);
}
}
jobTypeCase_ = 14;
return this;
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearFlinkJob() {
if (flinkJobBuilder_ == null) {
if (jobTypeCase_ == 14) {
jobTypeCase_ = 0;
jobType_ = null;
onChanged();
}
} else {
if (jobTypeCase_ == 14) {
jobTypeCase_ = 0;
jobType_ = null;
}
flinkJobBuilder_.clear();
}
return this;
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.FlinkJob.Builder getFlinkJobBuilder() {
return getFlinkJobFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.FlinkJobOrBuilder getFlinkJobOrBuilder() {
if ((jobTypeCase_ == 14) && (flinkJobBuilder_ != null)) {
return flinkJobBuilder_.getMessageOrBuilder();
} else {
if (jobTypeCase_ == 14) {
return (com.google.cloud.dataproc.v1.FlinkJob) jobType_;
}
return com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance();
}
}
/**
*
*
*
* Optional. Job is a Flink job.
*
*
*
* .google.cloud.dataproc.v1.FlinkJob flink_job = 14 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.FlinkJob,
com.google.cloud.dataproc.v1.FlinkJob.Builder,
com.google.cloud.dataproc.v1.FlinkJobOrBuilder>
getFlinkJobFieldBuilder() {
if (flinkJobBuilder_ == null) {
if (!(jobTypeCase_ == 14)) {
jobType_ = com.google.cloud.dataproc.v1.FlinkJob.getDefaultInstance();
}
flinkJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.FlinkJob,
com.google.cloud.dataproc.v1.FlinkJob.Builder,
com.google.cloud.dataproc.v1.FlinkJobOrBuilder>(
(com.google.cloud.dataproc.v1.FlinkJob) jobType_,
getParentForChildren(),
isClean());
jobType_ = null;
}
jobTypeCase_ = 14;
onChanged();
return flinkJobBuilder_;
}
private com.google.protobuf.MapField labels_;
private com.google.protobuf.MapField internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
private com.google.protobuf.MapField
internalGetMutableLabels() {
if (labels_ == null) {
labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
}
if (!labels_.isMutable()) {
labels_ = labels_.copy();
}
bitField0_ |= 0x00000800;
onChanged();
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map getLabels() {
return getLabelsMap();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public java.util.Map getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearLabels() {
bitField0_ = (bitField0_ & ~0x00000800);
internalGetMutableLabels().getMutableMap().clear();
return this;
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
public Builder removeLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableLabels().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map getMutableLabels() {
bitField0_ |= 0x00000800;
return internalGetMutableLabels().getMutableMap();
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
public Builder putLabels(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableLabels().getMutableMap().put(key, value);
bitField0_ |= 0x00000800;
return this;
}
/**
*
*
*
* Optional. The labels to associate with this job.
*
* Label keys must be between 1 and 63 characters long, and must conform to
* the following regular expression:
* [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
*
* Label values must be between 1 and 63 characters long, and must conform to
* the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
*
* No more than 32 labels can be associated with a given job.
*
*
* map<string, string> labels = 8 [(.google.api.field_behavior) = OPTIONAL];
*/
public Builder putAllLabels(java.util.Map values) {
internalGetMutableLabels().getMutableMap().putAll(values);
bitField0_ |= 0x00000800;
return this;
}
private com.google.cloud.dataproc.v1.JobScheduling scheduling_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.JobScheduling,
com.google.cloud.dataproc.v1.JobScheduling.Builder,
com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>
schedulingBuilder_;
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return Whether the scheduling field is set.
*/
public boolean hasScheduling() {
return ((bitField0_ & 0x00001000) != 0);
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The scheduling.
*/
public com.google.cloud.dataproc.v1.JobScheduling getScheduling() {
if (schedulingBuilder_ == null) {
return scheduling_ == null
? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
: scheduling_;
} else {
return schedulingBuilder_.getMessage();
}
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setScheduling(com.google.cloud.dataproc.v1.JobScheduling value) {
if (schedulingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scheduling_ = value;
} else {
schedulingBuilder_.setMessage(value);
}
bitField0_ |= 0x00001000;
onChanged();
return this;
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder setScheduling(
com.google.cloud.dataproc.v1.JobScheduling.Builder builderForValue) {
if (schedulingBuilder_ == null) {
scheduling_ = builderForValue.build();
} else {
schedulingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00001000;
onChanged();
return this;
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder mergeScheduling(com.google.cloud.dataproc.v1.JobScheduling value) {
if (schedulingBuilder_ == null) {
if (((bitField0_ & 0x00001000) != 0)
&& scheduling_ != null
&& scheduling_ != com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()) {
getSchedulingBuilder().mergeFrom(value);
} else {
scheduling_ = value;
}
} else {
schedulingBuilder_.mergeFrom(value);
}
if (scheduling_ != null) {
bitField0_ |= 0x00001000;
onChanged();
}
return this;
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public Builder clearScheduling() {
bitField0_ = (bitField0_ & ~0x00001000);
scheduling_ = null;
if (schedulingBuilder_ != null) {
schedulingBuilder_.dispose();
schedulingBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.JobScheduling.Builder getSchedulingBuilder() {
bitField0_ |= 0x00001000;
onChanged();
return getSchedulingFieldBuilder().getBuilder();
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() {
if (schedulingBuilder_ != null) {
return schedulingBuilder_.getMessageOrBuilder();
} else {
return scheduling_ == null
? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
: scheduling_;
}
}
/**
*
*
*
* Optional. Job scheduling configuration.
*
*
*
* .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
*
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.JobScheduling,
com.google.cloud.dataproc.v1.JobScheduling.Builder,
com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>
getSchedulingFieldBuilder() {
if (schedulingBuilder_ == null) {
schedulingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.JobScheduling,
com.google.cloud.dataproc.v1.JobScheduling.Builder,
com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>(
getScheduling(), getParentForChildren(), isClean());
scheduling_ = null;
}
return schedulingBuilder_;
}
private com.google.protobuf.LazyStringArrayList prerequisiteStepIds_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensurePrerequisiteStepIdsIsMutable() {
if (!prerequisiteStepIds_.isModifiable()) {
prerequisiteStepIds_ = new com.google.protobuf.LazyStringArrayList(prerequisiteStepIds_);
}
bitField0_ |= 0x00002000;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return A list containing the prerequisiteStepIds.
*/
public com.google.protobuf.ProtocolStringList getPrerequisiteStepIdsList() {
prerequisiteStepIds_.makeImmutable();
return prerequisiteStepIds_;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return The count of prerequisiteStepIds.
*/
public int getPrerequisiteStepIdsCount() {
return prerequisiteStepIds_.size();
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param index The index of the element to return.
* @return The prerequisiteStepIds at the given index.
*/
public java.lang.String getPrerequisiteStepIds(int index) {
return prerequisiteStepIds_.get(index);
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param index The index of the value to return.
* @return The bytes of the prerequisiteStepIds at the given index.
*/
public com.google.protobuf.ByteString getPrerequisiteStepIdsBytes(int index) {
return prerequisiteStepIds_.getByteString(index);
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param index The index to set the value at.
* @param value The prerequisiteStepIds to set.
* @return This builder for chaining.
*/
public Builder setPrerequisiteStepIds(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePrerequisiteStepIdsIsMutable();
prerequisiteStepIds_.set(index, value);
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param value The prerequisiteStepIds to add.
* @return This builder for chaining.
*/
public Builder addPrerequisiteStepIds(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensurePrerequisiteStepIdsIsMutable();
prerequisiteStepIds_.add(value);
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param values The prerequisiteStepIds to add.
* @return This builder for chaining.
*/
public Builder addAllPrerequisiteStepIds(java.lang.Iterable values) {
ensurePrerequisiteStepIdsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, prerequisiteStepIds_);
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @return This builder for chaining.
*/
public Builder clearPrerequisiteStepIds() {
prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00002000);
;
onChanged();
return this;
}
/**
*
*
*
* Optional. The optional list of prerequisite job step_ids.
* If not specified, the job will start at the beginning of workflow.
*
*
* repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
*
*
* @param value The bytes of the prerequisiteStepIds to add.
* @return This builder for chaining.
*/
public Builder addPrerequisiteStepIdsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensurePrerequisiteStepIdsIsMutable();
prerequisiteStepIds_.add(value);
bitField0_ |= 0x00002000;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.OrderedJob)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.OrderedJob)
private static final com.google.cloud.dataproc.v1.OrderedJob DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.OrderedJob();
}
public static com.google.cloud.dataproc.v1.OrderedJob getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public OrderedJob parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.OrderedJob getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy