com.google.api.services.dataproc.model.WriteSessionSparkApplicationContextRequest Maven / Gradle / Ivy
The newest version!
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dataproc.model;
/**
* Write Spark Application data to internal storage systems
*
* This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Dataproc API. For a detailed explanation see:
* https://developers.google.com/api-client-library/java/google-http-java-client/json
*
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class WriteSessionSparkApplicationContextRequest extends com.google.api.client.json.GenericJson {
/**
* Required. Parent (Batch) resource reference.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String parent;
/**
* Required. The batch of spark application context objects sent for ingestion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List sparkWrapperObjects;
static {
// hack to force ProGuard to consider SparkWrapperObject used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(SparkWrapperObject.class);
}
/**
* Required. Parent (Batch) resource reference.
* @return value or {@code null} for none
*/
public java.lang.String getParent() {
return parent;
}
/**
* Required. Parent (Batch) resource reference.
* @param parent parent or {@code null} for none
*/
public WriteSessionSparkApplicationContextRequest setParent(java.lang.String parent) {
this.parent = parent;
return this;
}
/**
* Required. The batch of spark application context objects sent for ingestion.
* @return value or {@code null} for none
*/
public java.util.List getSparkWrapperObjects() {
return sparkWrapperObjects;
}
/**
* Required. The batch of spark application context objects sent for ingestion.
* @param sparkWrapperObjects sparkWrapperObjects or {@code null} for none
*/
public WriteSessionSparkApplicationContextRequest setSparkWrapperObjects(java.util.List sparkWrapperObjects) {
this.sparkWrapperObjects = sparkWrapperObjects;
return this;
}
@Override
public WriteSessionSparkApplicationContextRequest set(String fieldName, Object value) {
return (WriteSessionSparkApplicationContextRequest) super.set(fieldName, value);
}
@Override
public WriteSessionSparkApplicationContextRequest clone() {
return (WriteSessionSparkApplicationContextRequest) super.clone();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy