org.apache.hadoop.yarn.proto.CsiAdaptorProtos Maven / Gradle / Ivy
// Generated by the protocol buffer org.apache.hadoop.shaded.com.iler. DO NOT EDIT!
// source: yarn_csi_adaptor.proto
package org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto;
public final class CsiAdaptorProtos {
private CsiAdaptorProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface ValidateVolumeCapabilitiesRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string volume_id = 1;
*/
boolean hasVolumeId();
/**
* required string volume_id = 1;
*/
java.lang.String getVolumeId();
/**
* required string volume_id = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes();
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
java.util.List
getVolumeCapabilitiesList();
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapabilities(int index);
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
int getVolumeCapabilitiesCount();
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>
getVolumeCapabilitiesOrBuilderList();
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilitiesOrBuilder(
int index);
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
java.util.List
getVolumeAttributesList();
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeAttributes(int index);
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
int getVolumeAttributesCount();
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeAttributesOrBuilderList();
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeAttributesOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesRequest}
*/
public static final class ValidateVolumeCapabilitiesRequest extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
ValidateVolumeCapabilitiesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ValidateVolumeCapabilitiesRequest.newBuilder() to construct.
private ValidateVolumeCapabilitiesRequest(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ValidateVolumeCapabilitiesRequest() {
volumeId_ = "";
volumeCapabilities_ = java.util.Collections.emptyList();
volumeAttributes_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ValidateVolumeCapabilitiesRequest(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
volumeId_ = bs;
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
volumeCapabilities_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000002;
}
volumeCapabilities_.add(
input.readMessage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.PARSER, extensionRegistry));
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
volumeAttributes_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
volumeAttributes_.add(
input.readMessage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) != 0)) {
volumeCapabilities_ = java.util.Collections.unmodifiableList(volumeCapabilities_);
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
volumeAttributes_ = java.util.Collections.unmodifiableList(volumeAttributes_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.Builder.class);
}
private int bitField0_;
public static final int VOLUME_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object volumeId_;
/**
* required string volume_id = 1;
*/
public boolean hasVolumeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string volume_id = 1;
*/
public java.lang.String getVolumeId() {
java.lang.Object ref = volumeId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
volumeId_ = s;
}
return s;
}
}
/**
* required string volume_id = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes() {
java.lang.Object ref = volumeId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
volumeId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int VOLUME_CAPABILITIES_FIELD_NUMBER = 2;
private java.util.List volumeCapabilities_;
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public java.util.List getVolumeCapabilitiesList() {
return volumeCapabilities_;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>
getVolumeCapabilitiesOrBuilderList() {
return volumeCapabilities_;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public int getVolumeCapabilitiesCount() {
return volumeCapabilities_.size();
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapabilities(int index) {
return volumeCapabilities_.get(index);
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilitiesOrBuilder(
int index) {
return volumeCapabilities_.get(index);
}
public static final int VOLUME_ATTRIBUTES_FIELD_NUMBER = 3;
private java.util.List volumeAttributes_;
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public java.util.List getVolumeAttributesList() {
return volumeAttributes_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeAttributesOrBuilderList() {
return volumeAttributes_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public int getVolumeAttributesCount() {
return volumeAttributes_.size();
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeAttributes(int index) {
return volumeAttributes_.get(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeAttributesOrBuilder(
int index) {
return volumeAttributes_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasVolumeId()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getVolumeCapabilitiesCount(); i++) {
if (!getVolumeCapabilities(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, volumeId_);
}
for (int i = 0; i < volumeCapabilities_.size(); i++) {
output.writeMessage(2, volumeCapabilities_.get(i));
}
for (int i = 0; i < volumeAttributes_.size(); i++) {
output.writeMessage(3, volumeAttributes_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, volumeId_);
}
for (int i = 0; i < volumeCapabilities_.size(); i++) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteMessageSize(2, volumeCapabilities_.get(i));
}
for (int i = 0; i < volumeAttributes_.size(); i++) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteMessageSize(3, volumeAttributes_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest) obj;
if (hasVolumeId() != other.hasVolumeId()) return false;
if (hasVolumeId()) {
if (!getVolumeId()
.equals(other.getVolumeId())) return false;
}
if (!getVolumeCapabilitiesList()
.equals(other.getVolumeCapabilitiesList())) return false;
if (!getVolumeAttributesList()
.equals(other.getVolumeAttributesList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVolumeId()) {
hash = (37 * hash) + VOLUME_ID_FIELD_NUMBER;
hash = (53 * hash) + getVolumeId().hashCode();
}
if (getVolumeCapabilitiesCount() > 0) {
hash = (37 * hash) + VOLUME_CAPABILITIES_FIELD_NUMBER;
hash = (53 * hash) + getVolumeCapabilitiesList().hashCode();
}
if (getVolumeAttributesCount() > 0) {
hash = (37 * hash) + VOLUME_ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getVolumeAttributesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesRequest}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequestOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getVolumeCapabilitiesFieldBuilder();
getVolumeAttributesFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
volumeId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (volumeCapabilitiesBuilder_ == null) {
volumeCapabilities_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
volumeCapabilitiesBuilder_.clear();
}
if (volumeAttributesBuilder_ == null) {
volumeAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
volumeAttributesBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.volumeId_ = volumeId_;
if (volumeCapabilitiesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
volumeCapabilities_ = java.util.Collections.unmodifiableList(volumeCapabilities_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.volumeCapabilities_ = volumeCapabilities_;
} else {
result.volumeCapabilities_ = volumeCapabilitiesBuilder_.build();
}
if (volumeAttributesBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)) {
volumeAttributes_ = java.util.Collections.unmodifiableList(volumeAttributes_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.volumeAttributes_ = volumeAttributes_;
} else {
result.volumeAttributes_ = volumeAttributesBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.getDefaultInstance()) return this;
if (other.hasVolumeId()) {
bitField0_ |= 0x00000001;
volumeId_ = other.volumeId_;
onChanged();
}
if (volumeCapabilitiesBuilder_ == null) {
if (!other.volumeCapabilities_.isEmpty()) {
if (volumeCapabilities_.isEmpty()) {
volumeCapabilities_ = other.volumeCapabilities_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.addAll(other.volumeCapabilities_);
}
onChanged();
}
} else {
if (!other.volumeCapabilities_.isEmpty()) {
if (volumeCapabilitiesBuilder_.isEmpty()) {
volumeCapabilitiesBuilder_.dispose();
volumeCapabilitiesBuilder_ = null;
volumeCapabilities_ = other.volumeCapabilities_;
bitField0_ = (bitField0_ & ~0x00000002);
volumeCapabilitiesBuilder_ =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getVolumeCapabilitiesFieldBuilder() : null;
} else {
volumeCapabilitiesBuilder_.addAllMessages(other.volumeCapabilities_);
}
}
}
if (volumeAttributesBuilder_ == null) {
if (!other.volumeAttributes_.isEmpty()) {
if (volumeAttributes_.isEmpty()) {
volumeAttributes_ = other.volumeAttributes_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureVolumeAttributesIsMutable();
volumeAttributes_.addAll(other.volumeAttributes_);
}
onChanged();
}
} else {
if (!other.volumeAttributes_.isEmpty()) {
if (volumeAttributesBuilder_.isEmpty()) {
volumeAttributesBuilder_.dispose();
volumeAttributesBuilder_ = null;
volumeAttributes_ = other.volumeAttributes_;
bitField0_ = (bitField0_ & ~0x00000004);
volumeAttributesBuilder_ =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getVolumeAttributesFieldBuilder() : null;
} else {
volumeAttributesBuilder_.addAllMessages(other.volumeAttributes_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasVolumeId()) {
return false;
}
for (int i = 0; i < getVolumeCapabilitiesCount(); i++) {
if (!getVolumeCapabilities(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object volumeId_ = "";
/**
* required string volume_id = 1;
*/
public boolean hasVolumeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string volume_id = 1;
*/
public java.lang.String getVolumeId() {
java.lang.Object ref = volumeId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
volumeId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string volume_id = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes() {
java.lang.Object ref = volumeId_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
volumeId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string volume_id = 1;
*/
public Builder setVolumeId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeId_ = value;
onChanged();
return this;
}
/**
* required string volume_id = 1;
*/
public Builder clearVolumeId() {
bitField0_ = (bitField0_ & ~0x00000001);
volumeId_ = getDefaultInstance().getVolumeId();
onChanged();
return this;
}
/**
* required string volume_id = 1;
*/
public Builder setVolumeIdBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeId_ = value;
onChanged();
return this;
}
private java.util.List volumeCapabilities_ =
java.util.Collections.emptyList();
private void ensureVolumeCapabilitiesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
volumeCapabilities_ = new java.util.ArrayList(volumeCapabilities_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> volumeCapabilitiesBuilder_;
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public java.util.List getVolumeCapabilitiesList() {
if (volumeCapabilitiesBuilder_ == null) {
return java.util.Collections.unmodifiableList(volumeCapabilities_);
} else {
return volumeCapabilitiesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public int getVolumeCapabilitiesCount() {
if (volumeCapabilitiesBuilder_ == null) {
return volumeCapabilities_.size();
} else {
return volumeCapabilitiesBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapabilities(int index) {
if (volumeCapabilitiesBuilder_ == null) {
return volumeCapabilities_.get(index);
} else {
return volumeCapabilitiesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder setVolumeCapabilities(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
if (volumeCapabilitiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.set(index, value);
onChanged();
} else {
volumeCapabilitiesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder setVolumeCapabilities(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
if (volumeCapabilitiesBuilder_ == null) {
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.set(index, builderForValue.build());
onChanged();
} else {
volumeCapabilitiesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder addVolumeCapabilities(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
if (volumeCapabilitiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.add(value);
onChanged();
} else {
volumeCapabilitiesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder addVolumeCapabilities(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
if (volumeCapabilitiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.add(index, value);
onChanged();
} else {
volumeCapabilitiesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder addVolumeCapabilities(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
if (volumeCapabilitiesBuilder_ == null) {
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.add(builderForValue.build());
onChanged();
} else {
volumeCapabilitiesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder addVolumeCapabilities(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
if (volumeCapabilitiesBuilder_ == null) {
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.add(index, builderForValue.build());
onChanged();
} else {
volumeCapabilitiesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder addAllVolumeCapabilities(
java.lang.Iterable extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> values) {
if (volumeCapabilitiesBuilder_ == null) {
ensureVolumeCapabilitiesIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, volumeCapabilities_);
onChanged();
} else {
volumeCapabilitiesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder clearVolumeCapabilities() {
if (volumeCapabilitiesBuilder_ == null) {
volumeCapabilities_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
volumeCapabilitiesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public Builder removeVolumeCapabilities(int index) {
if (volumeCapabilitiesBuilder_ == null) {
ensureVolumeCapabilitiesIsMutable();
volumeCapabilities_.remove(index);
onChanged();
} else {
volumeCapabilitiesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder getVolumeCapabilitiesBuilder(
int index) {
return getVolumeCapabilitiesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilitiesOrBuilder(
int index) {
if (volumeCapabilitiesBuilder_ == null) {
return volumeCapabilities_.get(index); } else {
return volumeCapabilitiesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>
getVolumeCapabilitiesOrBuilderList() {
if (volumeCapabilitiesBuilder_ != null) {
return volumeCapabilitiesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(volumeCapabilities_);
}
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder addVolumeCapabilitiesBuilder() {
return getVolumeCapabilitiesFieldBuilder().addBuilder(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder addVolumeCapabilitiesBuilder(
int index) {
return getVolumeCapabilitiesFieldBuilder().addBuilder(
index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;
*/
public java.util.List
getVolumeCapabilitiesBuilderList() {
return getVolumeCapabilitiesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>
getVolumeCapabilitiesFieldBuilder() {
if (volumeCapabilitiesBuilder_ == null) {
volumeCapabilitiesBuilder_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>(
volumeCapabilities_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
volumeCapabilities_ = null;
}
return volumeCapabilitiesBuilder_;
}
private java.util.List volumeAttributes_ =
java.util.Collections.emptyList();
private void ensureVolumeAttributesIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
volumeAttributes_ = new java.util.ArrayList(volumeAttributes_);
bitField0_ |= 0x00000004;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> volumeAttributesBuilder_;
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public java.util.List getVolumeAttributesList() {
if (volumeAttributesBuilder_ == null) {
return java.util.Collections.unmodifiableList(volumeAttributes_);
} else {
return volumeAttributesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public int getVolumeAttributesCount() {
if (volumeAttributesBuilder_ == null) {
return volumeAttributes_.size();
} else {
return volumeAttributesBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeAttributes(int index) {
if (volumeAttributesBuilder_ == null) {
return volumeAttributes_.get(index);
} else {
return volumeAttributesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder setVolumeAttributes(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (volumeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeAttributesIsMutable();
volumeAttributes_.set(index, value);
onChanged();
} else {
volumeAttributesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder setVolumeAttributes(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (volumeAttributesBuilder_ == null) {
ensureVolumeAttributesIsMutable();
volumeAttributes_.set(index, builderForValue.build());
onChanged();
} else {
volumeAttributesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder addVolumeAttributes(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (volumeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeAttributesIsMutable();
volumeAttributes_.add(value);
onChanged();
} else {
volumeAttributesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder addVolumeAttributes(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (volumeAttributesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeAttributesIsMutable();
volumeAttributes_.add(index, value);
onChanged();
} else {
volumeAttributesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder addVolumeAttributes(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (volumeAttributesBuilder_ == null) {
ensureVolumeAttributesIsMutable();
volumeAttributes_.add(builderForValue.build());
onChanged();
} else {
volumeAttributesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder addVolumeAttributes(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (volumeAttributesBuilder_ == null) {
ensureVolumeAttributesIsMutable();
volumeAttributes_.add(index, builderForValue.build());
onChanged();
} else {
volumeAttributesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder addAllVolumeAttributes(
java.lang.Iterable extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
if (volumeAttributesBuilder_ == null) {
ensureVolumeAttributesIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, volumeAttributes_);
onChanged();
} else {
volumeAttributesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder clearVolumeAttributes() {
if (volumeAttributesBuilder_ == null) {
volumeAttributes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
volumeAttributesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public Builder removeVolumeAttributes(int index) {
if (volumeAttributesBuilder_ == null) {
ensureVolumeAttributesIsMutable();
volumeAttributes_.remove(index);
onChanged();
} else {
volumeAttributesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getVolumeAttributesBuilder(
int index) {
return getVolumeAttributesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeAttributesOrBuilder(
int index) {
if (volumeAttributesBuilder_ == null) {
return volumeAttributes_.get(index); } else {
return volumeAttributesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeAttributesOrBuilderList() {
if (volumeAttributesBuilder_ != null) {
return volumeAttributesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(volumeAttributes_);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeAttributesBuilder() {
return getVolumeAttributesFieldBuilder().addBuilder(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeAttributesBuilder(
int index) {
return getVolumeAttributesFieldBuilder().addBuilder(
index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;
*/
public java.util.List
getVolumeAttributesBuilderList() {
return getVolumeAttributesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeAttributesFieldBuilder() {
if (volumeAttributesBuilder_ == null) {
volumeAttributesBuilder_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
volumeAttributes_,
((bitField0_ & 0x00000004) != 0),
getParentForChildren(),
isClean());
volumeAttributes_ = null;
}
return volumeAttributesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ValidateVolumeCapabilitiesRequest parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new ValidateVolumeCapabilitiesRequest(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ValidateVolumeCapabilitiesResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
boolean hasSupported();
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
boolean getSupported();
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
boolean hasMessage();
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
java.lang.String getMessage();
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes();
}
/**
* Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesResponse}
*/
public static final class ValidateVolumeCapabilitiesResponse extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
ValidateVolumeCapabilitiesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ValidateVolumeCapabilitiesResponse.newBuilder() to construct.
private ValidateVolumeCapabilitiesResponse(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ValidateVolumeCapabilitiesResponse() {
message_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ValidateVolumeCapabilitiesResponse(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
bitField0_ |= 0x00000001;
supported_ = input.readBool();
break;
}
case 18: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
message_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.Builder.class);
}
private int bitField0_;
public static final int SUPPORTED_FIELD_NUMBER = 1;
private boolean supported_;
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
public boolean hasSupported() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
public boolean getSupported() {
return supported_;
}
public static final int MESSAGE_FIELD_NUMBER = 2;
private volatile java.lang.Object message_;
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasSupported()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeBool(1, supported_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, message_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteBoolSize(1, supported_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(2, message_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse) obj;
if (hasSupported() != other.hasSupported()) return false;
if (hasSupported()) {
if (getSupported()
!= other.getSupported()) return false;
}
if (hasMessage() != other.hasMessage()) return false;
if (hasMessage()) {
if (!getMessage()
.equals(other.getMessage())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSupported()) {
hash = (37 * hash) + SUPPORTED_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getSupported());
}
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesResponse}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponseOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
supported_ = false;
bitField0_ = (bitField0_ & ~0x00000001);
message_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.supported_ = supported_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.message_ = message_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance()) return this;
if (other.hasSupported()) {
setSupported(other.getSupported());
}
if (other.hasMessage()) {
bitField0_ |= 0x00000002;
message_ = other.message_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasSupported()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private boolean supported_ ;
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
public boolean hasSupported() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
public boolean getSupported() {
return supported_;
}
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
public Builder setSupported(boolean value) {
bitField0_ |= 0x00000001;
supported_ = value;
onChanged();
return this;
}
/**
*
* True if the Plugin supports the specified capabilities for the
* given volume. This field is REQUIRED.
*
*
* required bool supported = 1;
*/
public Builder clearSupported() {
bitField0_ = (bitField0_ & ~0x00000001);
supported_ = false;
onChanged();
return this;
}
private java.lang.Object message_ = "";
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public Builder setMessage(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
message_ = value;
onChanged();
return this;
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public Builder clearMessage() {
bitField0_ = (bitField0_ & ~0x00000002);
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
/**
*
* Message to the CO if `supported` above is false. This field is
* OPTIONAL.
* An empty string is equal to an unspecified field value.
*
*
* optional string message = 2;
*/
public Builder setMessageBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
message_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public ValidateVolumeCapabilitiesResponse parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new ValidateVolumeCapabilitiesResponse(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface VolumeCapabilityOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.VolumeCapability)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
boolean hasVolumeType();
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType getVolumeType();
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
boolean hasAccessMode();
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode getAccessMode();
/**
* repeated string mount_flags = 3;
*/
java.util.List
getMountFlagsList();
/**
* repeated string mount_flags = 3;
*/
int getMountFlagsCount();
/**
* repeated string mount_flags = 3;
*/
java.lang.String getMountFlags(int index);
/**
* repeated string mount_flags = 3;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMountFlagsBytes(int index);
}
/**
* Protobuf type {@code hadoop.yarn.VolumeCapability}
*/
public static final class VolumeCapability extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.VolumeCapability)
VolumeCapabilityOrBuilder {
private static final long serialVersionUID = 0L;
// Use VolumeCapability.newBuilder() to construct.
private VolumeCapability(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private VolumeCapability() {
volumeType_ = 0;
accessMode_ = 0;
mountFlags_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private VolumeCapability(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType value = org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
volumeType_ = rawValue;
}
break;
}
case 16: {
int rawValue = input.readEnum();
@SuppressWarnings("deprecation")
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode value = org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
accessMode_ = rawValue;
}
break;
}
case 26: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
mountFlags_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000004;
}
mountFlags_.add(bs);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) != 0)) {
mountFlags_ = mountFlags_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder.class);
}
/**
* Protobuf enum {@code hadoop.yarn.VolumeCapability.VolumeType}
*/
public enum VolumeType
implements org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
* BLOCK = 0;
*/
BLOCK(0),
/**
* FILE_SYSTEM = 1;
*/
FILE_SYSTEM(1),
;
/**
* BLOCK = 0;
*/
public static final int BLOCK_VALUE = 0;
/**
* FILE_SYSTEM = 1;
*/
public static final int FILE_SYSTEM_VALUE = 1;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static VolumeType valueOf(int value) {
return forNumber(value);
}
public static VolumeType forNumber(int value) {
switch (value) {
case 0: return BLOCK;
case 1: return FILE_SYSTEM;
default: return null;
}
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
VolumeType> internalValueMap =
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public VolumeType findValueByNumber(int number) {
return VolumeType.forNumber(number);
}
};
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDescriptor().getEnumTypes().get(0);
}
private static final VolumeType[] VALUES = values();
public static VolumeType valueOf(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private VolumeType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.yarn.VolumeCapability.VolumeType)
}
/**
* Protobuf enum {@code hadoop.yarn.VolumeCapability.AccessMode}
*/
public enum AccessMode
implements org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
/**
* UNKNOWN = 0;
*/
UNKNOWN(0),
/**
* SINGLE_NODE_WRITER = 1;
*/
SINGLE_NODE_WRITER(1),
/**
* SINGLE_NODE_READER_ONLY = 2;
*/
SINGLE_NODE_READER_ONLY(2),
/**
* MULTI_NODE_READER_ONLY = 3;
*/
MULTI_NODE_READER_ONLY(3),
/**
* MULTI_NODE_SINGLE_WRITER = 4;
*/
MULTI_NODE_SINGLE_WRITER(4),
/**
* MULTI_NODE_MULTI_WRITER = 5;
*/
MULTI_NODE_MULTI_WRITER(5),
;
/**
* UNKNOWN = 0;
*/
public static final int UNKNOWN_VALUE = 0;
/**
* SINGLE_NODE_WRITER = 1;
*/
public static final int SINGLE_NODE_WRITER_VALUE = 1;
/**
* SINGLE_NODE_READER_ONLY = 2;
*/
public static final int SINGLE_NODE_READER_ONLY_VALUE = 2;
/**
* MULTI_NODE_READER_ONLY = 3;
*/
public static final int MULTI_NODE_READER_ONLY_VALUE = 3;
/**
* MULTI_NODE_SINGLE_WRITER = 4;
*/
public static final int MULTI_NODE_SINGLE_WRITER_VALUE = 4;
/**
* MULTI_NODE_MULTI_WRITER = 5;
*/
public static final int MULTI_NODE_MULTI_WRITER_VALUE = 5;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AccessMode valueOf(int value) {
return forNumber(value);
}
public static AccessMode forNumber(int value) {
switch (value) {
case 0: return UNKNOWN;
case 1: return SINGLE_NODE_WRITER;
case 2: return SINGLE_NODE_READER_ONLY;
case 3: return MULTI_NODE_READER_ONLY;
case 4: return MULTI_NODE_SINGLE_WRITER;
case 5: return MULTI_NODE_MULTI_WRITER;
default: return null;
}
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
AccessMode> internalValueMap =
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap() {
public AccessMode findValueByNumber(int number) {
return AccessMode.forNumber(number);
}
};
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDescriptor().getEnumTypes().get(1);
}
private static final AccessMode[] VALUES = values();
public static AccessMode valueOf(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private AccessMode(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.yarn.VolumeCapability.AccessMode)
}
private int bitField0_;
public static final int VOLUME_TYPE_FIELD_NUMBER = 1;
private int volumeType_;
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
public boolean hasVolumeType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType getVolumeType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType result = org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.valueOf(volumeType_);
return result == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.BLOCK : result;
}
public static final int ACCESS_MODE_FIELD_NUMBER = 2;
private int accessMode_;
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
public boolean hasAccessMode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode getAccessMode() {
@SuppressWarnings("deprecation")
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode result = org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.valueOf(accessMode_);
return result == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.UNKNOWN : result;
}
public static final int MOUNT_FLAGS_FIELD_NUMBER = 3;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList mountFlags_;
/**
* repeated string mount_flags = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getMountFlagsList() {
return mountFlags_;
}
/**
* repeated string mount_flags = 3;
*/
public int getMountFlagsCount() {
return mountFlags_.size();
}
/**
* repeated string mount_flags = 3;
*/
public java.lang.String getMountFlags(int index) {
return mountFlags_.get(index);
}
/**
* repeated string mount_flags = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMountFlagsBytes(int index) {
return mountFlags_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasVolumeType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasAccessMode()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, volumeType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, accessMode_);
}
for (int i = 0; i < mountFlags_.size(); i++) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, mountFlags_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteEnumSize(1, volumeType_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteEnumSize(2, accessMode_);
}
{
int dataSize = 0;
for (int i = 0; i < mountFlags_.size(); i++) {
dataSize += org.apache.hadoop.shaded.com.uteStringSizeNoTag(mountFlags_.getRaw(i));
}
size += dataSize;
size += 1 * getMountFlagsList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability) obj;
if (hasVolumeType() != other.hasVolumeType()) return false;
if (hasVolumeType()) {
if (volumeType_ != other.volumeType_) return false;
}
if (hasAccessMode() != other.hasAccessMode()) return false;
if (hasAccessMode()) {
if (accessMode_ != other.accessMode_) return false;
}
if (!getMountFlagsList()
.equals(other.getMountFlagsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVolumeType()) {
hash = (37 * hash) + VOLUME_TYPE_FIELD_NUMBER;
hash = (53 * hash) + volumeType_;
}
if (hasAccessMode()) {
hash = (37 * hash) + ACCESS_MODE_FIELD_NUMBER;
hash = (53 * hash) + accessMode_;
}
if (getMountFlagsCount() > 0) {
hash = (37 * hash) + MOUNT_FLAGS_FIELD_NUMBER;
hash = (53 * hash) + getMountFlagsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.VolumeCapability}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.VolumeCapability)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
volumeType_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
accessMode_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
mountFlags_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.volumeType_ = volumeType_;
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.accessMode_ = accessMode_;
if (((bitField0_ & 0x00000004) != 0)) {
mountFlags_ = mountFlags_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000004);
}
result.mountFlags_ = mountFlags_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance()) return this;
if (other.hasVolumeType()) {
setVolumeType(other.getVolumeType());
}
if (other.hasAccessMode()) {
setAccessMode(other.getAccessMode());
}
if (!other.mountFlags_.isEmpty()) {
if (mountFlags_.isEmpty()) {
mountFlags_ = other.mountFlags_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureMountFlagsIsMutable();
mountFlags_.addAll(other.mountFlags_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasVolumeType()) {
return false;
}
if (!hasAccessMode()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int volumeType_ = 0;
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
public boolean hasVolumeType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType getVolumeType() {
@SuppressWarnings("deprecation")
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType result = org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.valueOf(volumeType_);
return result == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.BLOCK : result;
}
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
public Builder setVolumeType(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeType_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;
*/
public Builder clearVolumeType() {
bitField0_ = (bitField0_ & ~0x00000001);
volumeType_ = 0;
onChanged();
return this;
}
private int accessMode_ = 0;
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
public boolean hasAccessMode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode getAccessMode() {
@SuppressWarnings("deprecation")
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode result = org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.valueOf(accessMode_);
return result == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.UNKNOWN : result;
}
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
public Builder setAccessMode(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
accessMode_ = value.getNumber();
onChanged();
return this;
}
/**
* required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;
*/
public Builder clearAccessMode() {
bitField0_ = (bitField0_ & ~0x00000002);
accessMode_ = 0;
onChanged();
return this;
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringList mountFlags_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensureMountFlagsIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
mountFlags_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(mountFlags_);
bitField0_ |= 0x00000004;
}
}
/**
* repeated string mount_flags = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getMountFlagsList() {
return mountFlags_.getUnmodifiableView();
}
/**
* repeated string mount_flags = 3;
*/
public int getMountFlagsCount() {
return mountFlags_.size();
}
/**
* repeated string mount_flags = 3;
*/
public java.lang.String getMountFlags(int index) {
return mountFlags_.get(index);
}
/**
* repeated string mount_flags = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getMountFlagsBytes(int index) {
return mountFlags_.getByteString(index);
}
/**
* repeated string mount_flags = 3;
*/
public Builder setMountFlags(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMountFlagsIsMutable();
mountFlags_.set(index, value);
onChanged();
return this;
}
/**
* repeated string mount_flags = 3;
*/
public Builder addMountFlags(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMountFlagsIsMutable();
mountFlags_.add(value);
onChanged();
return this;
}
/**
* repeated string mount_flags = 3;
*/
public Builder addAllMountFlags(
java.lang.Iterable values) {
ensureMountFlagsIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, mountFlags_);
onChanged();
return this;
}
/**
* repeated string mount_flags = 3;
*/
public Builder clearMountFlags() {
mountFlags_ = org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* repeated string mount_flags = 3;
*/
public Builder addMountFlagsBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureMountFlagsIsMutable();
mountFlags_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.VolumeCapability)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.VolumeCapability)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public VolumeCapability parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new VolumeCapability(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetPluginInfoRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetPluginInfoRequest)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
*
* Intentionally empty.
*
*
* Protobuf type {@code hadoop.yarn.GetPluginInfoRequest}
*/
public static final class GetPluginInfoRequest extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetPluginInfoRequest)
GetPluginInfoRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetPluginInfoRequest.newBuilder() to construct.
private GetPluginInfoRequest(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetPluginInfoRequest() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetPluginInfoRequest(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Intentionally empty.
*
*
* Protobuf type {@code hadoop.yarn.GetPluginInfoRequest}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetPluginInfoRequest)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequestOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetPluginInfoRequest)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetPluginInfoRequest)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetPluginInfoRequest parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new GetPluginInfoRequest(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetPluginInfoResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.GetPluginInfoResponse)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string name = 1;
*/
boolean hasName();
/**
* required string name = 1;
*/
java.lang.String getName();
/**
* required string name = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNameBytes();
/**
* required string vendor_version = 2;
*/
boolean hasVendorVersion();
/**
* required string vendor_version = 2;
*/
java.lang.String getVendorVersion();
/**
* required string vendor_version = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVendorVersionBytes();
}
/**
* Protobuf type {@code hadoop.yarn.GetPluginInfoResponse}
*/
public static final class GetPluginInfoResponse extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.GetPluginInfoResponse)
GetPluginInfoResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetPluginInfoResponse.newBuilder() to construct.
private GetPluginInfoResponse(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetPluginInfoResponse() {
name_ = "";
vendorVersion_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetPluginInfoResponse(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
name_ = bs;
break;
}
case 18: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
vendorVersion_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* required string name = 1;
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string name = 1;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* required string name = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int VENDOR_VERSION_FIELD_NUMBER = 2;
private volatile java.lang.Object vendorVersion_;
/**
* required string vendor_version = 2;
*/
public boolean hasVendorVersion() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string vendor_version = 2;
*/
public java.lang.String getVendorVersion() {
java.lang.Object ref = vendorVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
vendorVersion_ = s;
}
return s;
}
}
/**
* required string vendor_version = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVendorVersionBytes() {
java.lang.Object ref = vendorVersion_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
vendorVersion_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasVendorVersion()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, vendorVersion_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, name_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(2, vendorVersion_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse) obj;
if (hasName() != other.hasName()) return false;
if (hasName()) {
if (!getName()
.equals(other.getName())) return false;
}
if (hasVendorVersion() != other.hasVendorVersion()) return false;
if (hasVendorVersion()) {
if (!getVendorVersion()
.equals(other.getVendorVersion())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasName()) {
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
}
if (hasVendorVersion()) {
hash = (37 * hash) + VENDOR_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVendorVersion().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.GetPluginInfoResponse}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.GetPluginInfoResponse)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponseOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
vendorVersion_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.vendorVersion_ = vendorVersion_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
if (other.hasVendorVersion()) {
bitField0_ |= 0x00000002;
vendorVersion_ = other.vendorVersion_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasName()) {
return false;
}
if (!hasVendorVersion()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
* required string name = 1;
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string name = 1;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string name = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string name = 1;
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* required string name = 1;
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* required string name = 1;
*/
public Builder setNameBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
private java.lang.Object vendorVersion_ = "";
/**
* required string vendor_version = 2;
*/
public boolean hasVendorVersion() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string vendor_version = 2;
*/
public java.lang.String getVendorVersion() {
java.lang.Object ref = vendorVersion_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
vendorVersion_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string vendor_version = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVendorVersionBytes() {
java.lang.Object ref = vendorVersion_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
vendorVersion_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string vendor_version = 2;
*/
public Builder setVendorVersion(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
vendorVersion_ = value;
onChanged();
return this;
}
/**
* required string vendor_version = 2;
*/
public Builder clearVendorVersion() {
bitField0_ = (bitField0_ & ~0x00000002);
vendorVersion_ = getDefaultInstance().getVendorVersion();
onChanged();
return this;
}
/**
* required string vendor_version = 2;
*/
public Builder setVendorVersionBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
vendorVersion_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.GetPluginInfoResponse)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.GetPluginInfoResponse)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetPluginInfoResponse parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new GetPluginInfoResponse(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodePublishVolumeRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodePublishVolumeRequest)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string volume_id = 1;
*/
boolean hasVolumeId();
/**
* required string volume_id = 1;
*/
java.lang.String getVolumeId();
/**
* required string volume_id = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes();
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
java.util.List
getPublishContextList();
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getPublishContext(int index);
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
int getPublishContextCount();
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getPublishContextOrBuilderList();
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getPublishContextOrBuilder(
int index);
/**
* optional string staging_target_path = 3;
*/
boolean hasStagingTargetPath();
/**
* optional string staging_target_path = 3;
*/
java.lang.String getStagingTargetPath();
/**
* optional string staging_target_path = 3;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getStagingTargetPathBytes();
/**
* required string target_path = 4;
*/
boolean hasTargetPath();
/**
* required string target_path = 4;
*/
java.lang.String getTargetPath();
/**
* required string target_path = 4;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getTargetPathBytes();
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
boolean hasVolumeCapability();
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapability();
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilityOrBuilder();
/**
* required bool readonly = 6;
*/
boolean hasReadonly();
/**
* required bool readonly = 6;
*/
boolean getReadonly();
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
java.util.List
getSecretsList();
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getSecrets(int index);
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
int getSecretsCount();
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getSecretsOrBuilderList();
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getSecretsOrBuilder(
int index);
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
java.util.List
getVolumeContextList();
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeContext(int index);
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
int getVolumeContextCount();
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeContextOrBuilderList();
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeContextOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.yarn.NodePublishVolumeRequest}
*/
public static final class NodePublishVolumeRequest extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodePublishVolumeRequest)
NodePublishVolumeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodePublishVolumeRequest.newBuilder() to construct.
private NodePublishVolumeRequest(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodePublishVolumeRequest() {
volumeId_ = "";
publishContext_ = java.util.Collections.emptyList();
stagingTargetPath_ = "";
targetPath_ = "";
secrets_ = java.util.Collections.emptyList();
volumeContext_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NodePublishVolumeRequest(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
volumeId_ = bs;
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
publishContext_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000002;
}
publishContext_.add(
input.readMessage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry));
break;
}
case 26: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
stagingTargetPath_ = bs;
break;
}
case 34: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000004;
targetPath_ = bs;
break;
}
case 42: {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder subBuilder = null;
if (((bitField0_ & 0x00000008) != 0)) {
subBuilder = volumeCapability_.toBuilder();
}
volumeCapability_ = input.readMessage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(volumeCapability_);
volumeCapability_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000008;
break;
}
case 48: {
bitField0_ |= 0x00000010;
readonly_ = input.readBool();
break;
}
case 58: {
if (!((mutable_bitField0_ & 0x00000040) != 0)) {
secrets_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000040;
}
secrets_.add(
input.readMessage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry));
break;
}
case 66: {
if (!((mutable_bitField0_ & 0x00000080) != 0)) {
volumeContext_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000080;
}
volumeContext_.add(
input.readMessage(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER, extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) != 0)) {
publishContext_ = java.util.Collections.unmodifiableList(publishContext_);
}
if (((mutable_bitField0_ & 0x00000040) != 0)) {
secrets_ = java.util.Collections.unmodifiableList(secrets_);
}
if (((mutable_bitField0_ & 0x00000080) != 0)) {
volumeContext_ = java.util.Collections.unmodifiableList(volumeContext_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.Builder.class);
}
private int bitField0_;
public static final int VOLUME_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object volumeId_;
/**
* required string volume_id = 1;
*/
public boolean hasVolumeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string volume_id = 1;
*/
public java.lang.String getVolumeId() {
java.lang.Object ref = volumeId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
volumeId_ = s;
}
return s;
}
}
/**
* required string volume_id = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes() {
java.lang.Object ref = volumeId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
volumeId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int PUBLISH_CONTEXT_FIELD_NUMBER = 2;
private java.util.List publishContext_;
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public java.util.List getPublishContextList() {
return publishContext_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getPublishContextOrBuilderList() {
return publishContext_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public int getPublishContextCount() {
return publishContext_.size();
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getPublishContext(int index) {
return publishContext_.get(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getPublishContextOrBuilder(
int index) {
return publishContext_.get(index);
}
public static final int STAGING_TARGET_PATH_FIELD_NUMBER = 3;
private volatile java.lang.Object stagingTargetPath_;
/**
* optional string staging_target_path = 3;
*/
public boolean hasStagingTargetPath() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string staging_target_path = 3;
*/
public java.lang.String getStagingTargetPath() {
java.lang.Object ref = stagingTargetPath_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
stagingTargetPath_ = s;
}
return s;
}
}
/**
* optional string staging_target_path = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getStagingTargetPathBytes() {
java.lang.Object ref = stagingTargetPath_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
stagingTargetPath_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int TARGET_PATH_FIELD_NUMBER = 4;
private volatile java.lang.Object targetPath_;
/**
* required string target_path = 4;
*/
public boolean hasTargetPath() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required string target_path = 4;
*/
public java.lang.String getTargetPath() {
java.lang.Object ref = targetPath_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
targetPath_ = s;
}
return s;
}
}
/**
* required string target_path = 4;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getTargetPathBytes() {
java.lang.Object ref = targetPath_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
targetPath_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int VOLUME_CAPABILITY_FIELD_NUMBER = 5;
private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability volumeCapability_;
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public boolean hasVolumeCapability() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapability() {
return volumeCapability_ == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilityOrBuilder() {
return volumeCapability_ == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
}
public static final int READONLY_FIELD_NUMBER = 6;
private boolean readonly_;
/**
* required bool readonly = 6;
*/
public boolean hasReadonly() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required bool readonly = 6;
*/
public boolean getReadonly() {
return readonly_;
}
public static final int SECRETS_FIELD_NUMBER = 7;
private java.util.List secrets_;
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public java.util.List getSecretsList() {
return secrets_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getSecretsOrBuilderList() {
return secrets_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public int getSecretsCount() {
return secrets_.size();
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getSecrets(int index) {
return secrets_.get(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getSecretsOrBuilder(
int index) {
return secrets_.get(index);
}
public static final int VOLUME_CONTEXT_FIELD_NUMBER = 8;
private java.util.List volumeContext_;
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public java.util.List getVolumeContextList() {
return volumeContext_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeContextOrBuilderList() {
return volumeContext_;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public int getVolumeContextCount() {
return volumeContext_.size();
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeContext(int index) {
return volumeContext_.get(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeContextOrBuilder(
int index) {
return volumeContext_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasVolumeId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTargetPath()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasVolumeCapability()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasReadonly()) {
memoizedIsInitialized = 0;
return false;
}
if (!getVolumeCapability().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, volumeId_);
}
for (int i = 0; i < publishContext_.size(); i++) {
output.writeMessage(2, publishContext_.get(i));
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, stagingTargetPath_);
}
if (((bitField0_ & 0x00000004) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, targetPath_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(5, getVolumeCapability());
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeBool(6, readonly_);
}
for (int i = 0; i < secrets_.size(); i++) {
output.writeMessage(7, secrets_.get(i));
}
for (int i = 0; i < volumeContext_.size(); i++) {
output.writeMessage(8, volumeContext_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, volumeId_);
}
for (int i = 0; i < publishContext_.size(); i++) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteMessageSize(2, publishContext_.get(i));
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(3, stagingTargetPath_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(4, targetPath_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteMessageSize(5, getVolumeCapability());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteBoolSize(6, readonly_);
}
for (int i = 0; i < secrets_.size(); i++) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteMessageSize(7, secrets_.get(i));
}
for (int i = 0; i < volumeContext_.size(); i++) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.org.apache.hadoop.shaded.com.uteMessageSize(8, volumeContext_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest) obj;
if (hasVolumeId() != other.hasVolumeId()) return false;
if (hasVolumeId()) {
if (!getVolumeId()
.equals(other.getVolumeId())) return false;
}
if (!getPublishContextList()
.equals(other.getPublishContextList())) return false;
if (hasStagingTargetPath() != other.hasStagingTargetPath()) return false;
if (hasStagingTargetPath()) {
if (!getStagingTargetPath()
.equals(other.getStagingTargetPath())) return false;
}
if (hasTargetPath() != other.hasTargetPath()) return false;
if (hasTargetPath()) {
if (!getTargetPath()
.equals(other.getTargetPath())) return false;
}
if (hasVolumeCapability() != other.hasVolumeCapability()) return false;
if (hasVolumeCapability()) {
if (!getVolumeCapability()
.equals(other.getVolumeCapability())) return false;
}
if (hasReadonly() != other.hasReadonly()) return false;
if (hasReadonly()) {
if (getReadonly()
!= other.getReadonly()) return false;
}
if (!getSecretsList()
.equals(other.getSecretsList())) return false;
if (!getVolumeContextList()
.equals(other.getVolumeContextList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVolumeId()) {
hash = (37 * hash) + VOLUME_ID_FIELD_NUMBER;
hash = (53 * hash) + getVolumeId().hashCode();
}
if (getPublishContextCount() > 0) {
hash = (37 * hash) + PUBLISH_CONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getPublishContextList().hashCode();
}
if (hasStagingTargetPath()) {
hash = (37 * hash) + STAGING_TARGET_PATH_FIELD_NUMBER;
hash = (53 * hash) + getStagingTargetPath().hashCode();
}
if (hasTargetPath()) {
hash = (37 * hash) + TARGET_PATH_FIELD_NUMBER;
hash = (53 * hash) + getTargetPath().hashCode();
}
if (hasVolumeCapability()) {
hash = (37 * hash) + VOLUME_CAPABILITY_FIELD_NUMBER;
hash = (53 * hash) + getVolumeCapability().hashCode();
}
if (hasReadonly()) {
hash = (37 * hash) + READONLY_FIELD_NUMBER;
hash = (53 * hash) + org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
getReadonly());
}
if (getSecretsCount() > 0) {
hash = (37 * hash) + SECRETS_FIELD_NUMBER;
hash = (53 * hash) + getSecretsList().hashCode();
}
if (getVolumeContextCount() > 0) {
hash = (37 * hash) + VOLUME_CONTEXT_FIELD_NUMBER;
hash = (53 * hash) + getVolumeContextList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodePublishVolumeRequest}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodePublishVolumeRequest)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequestOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPublishContextFieldBuilder();
getVolumeCapabilityFieldBuilder();
getSecretsFieldBuilder();
getVolumeContextFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
volumeId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (publishContextBuilder_ == null) {
publishContext_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
publishContextBuilder_.clear();
}
stagingTargetPath_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
targetPath_ = "";
bitField0_ = (bitField0_ & ~0x00000008);
if (volumeCapabilityBuilder_ == null) {
volumeCapability_ = null;
} else {
volumeCapabilityBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
readonly_ = false;
bitField0_ = (bitField0_ & ~0x00000020);
if (secretsBuilder_ == null) {
secrets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
} else {
secretsBuilder_.clear();
}
if (volumeContextBuilder_ == null) {
volumeContext_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
} else {
volumeContextBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.volumeId_ = volumeId_;
if (publishContextBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
publishContext_ = java.util.Collections.unmodifiableList(publishContext_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.publishContext_ = publishContext_;
} else {
result.publishContext_ = publishContextBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.stagingTargetPath_ = stagingTargetPath_;
if (((from_bitField0_ & 0x00000008) != 0)) {
to_bitField0_ |= 0x00000004;
}
result.targetPath_ = targetPath_;
if (((from_bitField0_ & 0x00000010) != 0)) {
if (volumeCapabilityBuilder_ == null) {
result.volumeCapability_ = volumeCapability_;
} else {
result.volumeCapability_ = volumeCapabilityBuilder_.build();
}
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.readonly_ = readonly_;
to_bitField0_ |= 0x00000010;
}
if (secretsBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0)) {
secrets_ = java.util.Collections.unmodifiableList(secrets_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.secrets_ = secrets_;
} else {
result.secrets_ = secretsBuilder_.build();
}
if (volumeContextBuilder_ == null) {
if (((bitField0_ & 0x00000080) != 0)) {
volumeContext_ = java.util.Collections.unmodifiableList(volumeContext_);
bitField0_ = (bitField0_ & ~0x00000080);
}
result.volumeContext_ = volumeContext_;
} else {
result.volumeContext_ = volumeContextBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.getDefaultInstance()) return this;
if (other.hasVolumeId()) {
bitField0_ |= 0x00000001;
volumeId_ = other.volumeId_;
onChanged();
}
if (publishContextBuilder_ == null) {
if (!other.publishContext_.isEmpty()) {
if (publishContext_.isEmpty()) {
publishContext_ = other.publishContext_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensurePublishContextIsMutable();
publishContext_.addAll(other.publishContext_);
}
onChanged();
}
} else {
if (!other.publishContext_.isEmpty()) {
if (publishContextBuilder_.isEmpty()) {
publishContextBuilder_.dispose();
publishContextBuilder_ = null;
publishContext_ = other.publishContext_;
bitField0_ = (bitField0_ & ~0x00000002);
publishContextBuilder_ =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getPublishContextFieldBuilder() : null;
} else {
publishContextBuilder_.addAllMessages(other.publishContext_);
}
}
}
if (other.hasStagingTargetPath()) {
bitField0_ |= 0x00000004;
stagingTargetPath_ = other.stagingTargetPath_;
onChanged();
}
if (other.hasTargetPath()) {
bitField0_ |= 0x00000008;
targetPath_ = other.targetPath_;
onChanged();
}
if (other.hasVolumeCapability()) {
mergeVolumeCapability(other.getVolumeCapability());
}
if (other.hasReadonly()) {
setReadonly(other.getReadonly());
}
if (secretsBuilder_ == null) {
if (!other.secrets_.isEmpty()) {
if (secrets_.isEmpty()) {
secrets_ = other.secrets_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureSecretsIsMutable();
secrets_.addAll(other.secrets_);
}
onChanged();
}
} else {
if (!other.secrets_.isEmpty()) {
if (secretsBuilder_.isEmpty()) {
secretsBuilder_.dispose();
secretsBuilder_ = null;
secrets_ = other.secrets_;
bitField0_ = (bitField0_ & ~0x00000040);
secretsBuilder_ =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSecretsFieldBuilder() : null;
} else {
secretsBuilder_.addAllMessages(other.secrets_);
}
}
}
if (volumeContextBuilder_ == null) {
if (!other.volumeContext_.isEmpty()) {
if (volumeContext_.isEmpty()) {
volumeContext_ = other.volumeContext_;
bitField0_ = (bitField0_ & ~0x00000080);
} else {
ensureVolumeContextIsMutable();
volumeContext_.addAll(other.volumeContext_);
}
onChanged();
}
} else {
if (!other.volumeContext_.isEmpty()) {
if (volumeContextBuilder_.isEmpty()) {
volumeContextBuilder_.dispose();
volumeContextBuilder_ = null;
volumeContext_ = other.volumeContext_;
bitField0_ = (bitField0_ & ~0x00000080);
volumeContextBuilder_ =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getVolumeContextFieldBuilder() : null;
} else {
volumeContextBuilder_.addAllMessages(other.volumeContext_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasVolumeId()) {
return false;
}
if (!hasTargetPath()) {
return false;
}
if (!hasVolumeCapability()) {
return false;
}
if (!hasReadonly()) {
return false;
}
if (!getVolumeCapability().isInitialized()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object volumeId_ = "";
/**
* required string volume_id = 1;
*/
public boolean hasVolumeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string volume_id = 1;
*/
public java.lang.String getVolumeId() {
java.lang.Object ref = volumeId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
volumeId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string volume_id = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes() {
java.lang.Object ref = volumeId_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
volumeId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string volume_id = 1;
*/
public Builder setVolumeId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeId_ = value;
onChanged();
return this;
}
/**
* required string volume_id = 1;
*/
public Builder clearVolumeId() {
bitField0_ = (bitField0_ & ~0x00000001);
volumeId_ = getDefaultInstance().getVolumeId();
onChanged();
return this;
}
/**
* required string volume_id = 1;
*/
public Builder setVolumeIdBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeId_ = value;
onChanged();
return this;
}
private java.util.List publishContext_ =
java.util.Collections.emptyList();
private void ensurePublishContextIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
publishContext_ = new java.util.ArrayList(publishContext_);
bitField0_ |= 0x00000002;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> publishContextBuilder_;
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public java.util.List getPublishContextList() {
if (publishContextBuilder_ == null) {
return java.util.Collections.unmodifiableList(publishContext_);
} else {
return publishContextBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public int getPublishContextCount() {
if (publishContextBuilder_ == null) {
return publishContext_.size();
} else {
return publishContextBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getPublishContext(int index) {
if (publishContextBuilder_ == null) {
return publishContext_.get(index);
} else {
return publishContextBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder setPublishContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (publishContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePublishContextIsMutable();
publishContext_.set(index, value);
onChanged();
} else {
publishContextBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder setPublishContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (publishContextBuilder_ == null) {
ensurePublishContextIsMutable();
publishContext_.set(index, builderForValue.build());
onChanged();
} else {
publishContextBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder addPublishContext(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (publishContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePublishContextIsMutable();
publishContext_.add(value);
onChanged();
} else {
publishContextBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder addPublishContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (publishContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePublishContextIsMutable();
publishContext_.add(index, value);
onChanged();
} else {
publishContextBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder addPublishContext(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (publishContextBuilder_ == null) {
ensurePublishContextIsMutable();
publishContext_.add(builderForValue.build());
onChanged();
} else {
publishContextBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder addPublishContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (publishContextBuilder_ == null) {
ensurePublishContextIsMutable();
publishContext_.add(index, builderForValue.build());
onChanged();
} else {
publishContextBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder addAllPublishContext(
java.lang.Iterable extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
if (publishContextBuilder_ == null) {
ensurePublishContextIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, publishContext_);
onChanged();
} else {
publishContextBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder clearPublishContext() {
if (publishContextBuilder_ == null) {
publishContext_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
publishContextBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public Builder removePublishContext(int index) {
if (publishContextBuilder_ == null) {
ensurePublishContextIsMutable();
publishContext_.remove(index);
onChanged();
} else {
publishContextBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getPublishContextBuilder(
int index) {
return getPublishContextFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getPublishContextOrBuilder(
int index) {
if (publishContextBuilder_ == null) {
return publishContext_.get(index); } else {
return publishContextBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getPublishContextOrBuilderList() {
if (publishContextBuilder_ != null) {
return publishContextBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(publishContext_);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addPublishContextBuilder() {
return getPublishContextFieldBuilder().addBuilder(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addPublishContextBuilder(
int index) {
return getPublishContextFieldBuilder().addBuilder(
index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto publish_context = 2;
*/
public java.util.List
getPublishContextBuilderList() {
return getPublishContextFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getPublishContextFieldBuilder() {
if (publishContextBuilder_ == null) {
publishContextBuilder_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
publishContext_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
publishContext_ = null;
}
return publishContextBuilder_;
}
private java.lang.Object stagingTargetPath_ = "";
/**
* optional string staging_target_path = 3;
*/
public boolean hasStagingTargetPath() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* optional string staging_target_path = 3;
*/
public java.lang.String getStagingTargetPath() {
java.lang.Object ref = stagingTargetPath_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
stagingTargetPath_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string staging_target_path = 3;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getStagingTargetPathBytes() {
java.lang.Object ref = stagingTargetPath_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
stagingTargetPath_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string staging_target_path = 3;
*/
public Builder setStagingTargetPath(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
stagingTargetPath_ = value;
onChanged();
return this;
}
/**
* optional string staging_target_path = 3;
*/
public Builder clearStagingTargetPath() {
bitField0_ = (bitField0_ & ~0x00000004);
stagingTargetPath_ = getDefaultInstance().getStagingTargetPath();
onChanged();
return this;
}
/**
* optional string staging_target_path = 3;
*/
public Builder setStagingTargetPathBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
stagingTargetPath_ = value;
onChanged();
return this;
}
private java.lang.Object targetPath_ = "";
/**
* required string target_path = 4;
*/
public boolean hasTargetPath() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required string target_path = 4;
*/
public java.lang.String getTargetPath() {
java.lang.Object ref = targetPath_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
targetPath_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string target_path = 4;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getTargetPathBytes() {
java.lang.Object ref = targetPath_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
targetPath_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string target_path = 4;
*/
public Builder setTargetPath(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
targetPath_ = value;
onChanged();
return this;
}
/**
* required string target_path = 4;
*/
public Builder clearTargetPath() {
bitField0_ = (bitField0_ & ~0x00000008);
targetPath_ = getDefaultInstance().getTargetPath();
onChanged();
return this;
}
/**
* required string target_path = 4;
*/
public Builder setTargetPathBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
targetPath_ = value;
onChanged();
return this;
}
private org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability volumeCapability_;
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> volumeCapabilityBuilder_;
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public boolean hasVolumeCapability() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapability() {
if (volumeCapabilityBuilder_ == null) {
return volumeCapability_ == null ? org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
} else {
return volumeCapabilityBuilder_.getMessage();
}
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public Builder setVolumeCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
if (volumeCapabilityBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
volumeCapability_ = value;
onChanged();
} else {
volumeCapabilityBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public Builder setVolumeCapability(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
if (volumeCapabilityBuilder_ == null) {
volumeCapability_ = builderForValue.build();
onChanged();
} else {
volumeCapabilityBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
return this;
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public Builder mergeVolumeCapability(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
if (volumeCapabilityBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0) &&
volumeCapability_ != null &&
volumeCapability_ != org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance()) {
volumeCapability_ =
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.newBuilder(volumeCapability_).mergeFrom(value).buildPartial();
} else {
volumeCapability_ = value;
}
onChanged();
} else {
volumeCapabilityBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
return this;
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public Builder clearVolumeCapability() {
if (volumeCapabilityBuilder_ == null) {
volumeCapability_ = null;
onChanged();
} else {
volumeCapabilityBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder getVolumeCapabilityBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getVolumeCapabilityFieldBuilder().getBuilder();
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilityOrBuilder() {
if (volumeCapabilityBuilder_ != null) {
return volumeCapabilityBuilder_.getMessageOrBuilder();
} else {
return volumeCapability_ == null ?
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
}
}
/**
* required .hadoop.yarn.VolumeCapability volume_capability = 5;
*/
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>
getVolumeCapabilityFieldBuilder() {
if (volumeCapabilityBuilder_ == null) {
volumeCapabilityBuilder_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>(
getVolumeCapability(),
getParentForChildren(),
isClean());
volumeCapability_ = null;
}
return volumeCapabilityBuilder_;
}
private boolean readonly_ ;
/**
* required bool readonly = 6;
*/
public boolean hasReadonly() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* required bool readonly = 6;
*/
public boolean getReadonly() {
return readonly_;
}
/**
* required bool readonly = 6;
*/
public Builder setReadonly(boolean value) {
bitField0_ |= 0x00000020;
readonly_ = value;
onChanged();
return this;
}
/**
* required bool readonly = 6;
*/
public Builder clearReadonly() {
bitField0_ = (bitField0_ & ~0x00000020);
readonly_ = false;
onChanged();
return this;
}
private java.util.List secrets_ =
java.util.Collections.emptyList();
private void ensureSecretsIsMutable() {
if (!((bitField0_ & 0x00000040) != 0)) {
secrets_ = new java.util.ArrayList(secrets_);
bitField0_ |= 0x00000040;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> secretsBuilder_;
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public java.util.List getSecretsList() {
if (secretsBuilder_ == null) {
return java.util.Collections.unmodifiableList(secrets_);
} else {
return secretsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public int getSecretsCount() {
if (secretsBuilder_ == null) {
return secrets_.size();
} else {
return secretsBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getSecrets(int index) {
if (secretsBuilder_ == null) {
return secrets_.get(index);
} else {
return secretsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder setSecrets(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (secretsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSecretsIsMutable();
secrets_.set(index, value);
onChanged();
} else {
secretsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder setSecrets(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (secretsBuilder_ == null) {
ensureSecretsIsMutable();
secrets_.set(index, builderForValue.build());
onChanged();
} else {
secretsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder addSecrets(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (secretsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSecretsIsMutable();
secrets_.add(value);
onChanged();
} else {
secretsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder addSecrets(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (secretsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSecretsIsMutable();
secrets_.add(index, value);
onChanged();
} else {
secretsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder addSecrets(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (secretsBuilder_ == null) {
ensureSecretsIsMutable();
secrets_.add(builderForValue.build());
onChanged();
} else {
secretsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder addSecrets(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (secretsBuilder_ == null) {
ensureSecretsIsMutable();
secrets_.add(index, builderForValue.build());
onChanged();
} else {
secretsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder addAllSecrets(
java.lang.Iterable extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
if (secretsBuilder_ == null) {
ensureSecretsIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, secrets_);
onChanged();
} else {
secretsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder clearSecrets() {
if (secretsBuilder_ == null) {
secrets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
} else {
secretsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public Builder removeSecrets(int index) {
if (secretsBuilder_ == null) {
ensureSecretsIsMutable();
secrets_.remove(index);
onChanged();
} else {
secretsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getSecretsBuilder(
int index) {
return getSecretsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getSecretsOrBuilder(
int index) {
if (secretsBuilder_ == null) {
return secrets_.get(index); } else {
return secretsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getSecretsOrBuilderList() {
if (secretsBuilder_ != null) {
return secretsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(secrets_);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addSecretsBuilder() {
return getSecretsFieldBuilder().addBuilder(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addSecretsBuilder(
int index) {
return getSecretsFieldBuilder().addBuilder(
index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto secrets = 7;
*/
public java.util.List
getSecretsBuilderList() {
return getSecretsFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getSecretsFieldBuilder() {
if (secretsBuilder_ == null) {
secretsBuilder_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
secrets_,
((bitField0_ & 0x00000040) != 0),
getParentForChildren(),
isClean());
secrets_ = null;
}
return secretsBuilder_;
}
private java.util.List volumeContext_ =
java.util.Collections.emptyList();
private void ensureVolumeContextIsMutable() {
if (!((bitField0_ & 0x00000080) != 0)) {
volumeContext_ = new java.util.ArrayList(volumeContext_);
bitField0_ |= 0x00000080;
}
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> volumeContextBuilder_;
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public java.util.List getVolumeContextList() {
if (volumeContextBuilder_ == null) {
return java.util.Collections.unmodifiableList(volumeContext_);
} else {
return volumeContextBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public int getVolumeContextCount() {
if (volumeContextBuilder_ == null) {
return volumeContext_.size();
} else {
return volumeContextBuilder_.getCount();
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeContext(int index) {
if (volumeContextBuilder_ == null) {
return volumeContext_.get(index);
} else {
return volumeContextBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder setVolumeContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (volumeContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeContextIsMutable();
volumeContext_.set(index, value);
onChanged();
} else {
volumeContextBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder setVolumeContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (volumeContextBuilder_ == null) {
ensureVolumeContextIsMutable();
volumeContext_.set(index, builderForValue.build());
onChanged();
} else {
volumeContextBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder addVolumeContext(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (volumeContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeContextIsMutable();
volumeContext_.add(value);
onChanged();
} else {
volumeContextBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder addVolumeContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
if (volumeContextBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVolumeContextIsMutable();
volumeContext_.add(index, value);
onChanged();
} else {
volumeContextBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder addVolumeContext(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (volumeContextBuilder_ == null) {
ensureVolumeContextIsMutable();
volumeContext_.add(builderForValue.build());
onChanged();
} else {
volumeContextBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder addVolumeContext(
int index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
if (volumeContextBuilder_ == null) {
ensureVolumeContextIsMutable();
volumeContext_.add(index, builderForValue.build());
onChanged();
} else {
volumeContextBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder addAllVolumeContext(
java.lang.Iterable extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
if (volumeContextBuilder_ == null) {
ensureVolumeContextIsMutable();
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, volumeContext_);
onChanged();
} else {
volumeContextBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder clearVolumeContext() {
if (volumeContextBuilder_ == null) {
volumeContext_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
} else {
volumeContextBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public Builder removeVolumeContext(int index) {
if (volumeContextBuilder_ == null) {
ensureVolumeContextIsMutable();
volumeContext_.remove(index);
onChanged();
} else {
volumeContextBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getVolumeContextBuilder(
int index) {
return getVolumeContextFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeContextOrBuilder(
int index) {
if (volumeContextBuilder_ == null) {
return volumeContext_.get(index); } else {
return volumeContextBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public java.util.List extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeContextOrBuilderList() {
if (volumeContextBuilder_ != null) {
return volumeContextBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(volumeContext_);
}
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeContextBuilder() {
return getVolumeContextFieldBuilder().addBuilder(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeContextBuilder(
int index) {
return getVolumeContextFieldBuilder().addBuilder(
index, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
}
/**
* repeated .hadoop.yarn.StringStringMapProto volume_context = 8;
*/
public java.util.List
getVolumeContextBuilderList() {
return getVolumeContextFieldBuilder().getBuilderList();
}
private org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>
getVolumeContextFieldBuilder() {
if (volumeContextBuilder_ == null) {
volumeContextBuilder_ = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
volumeContext_,
((bitField0_ & 0x00000080) != 0),
getParentForChildren(),
isClean());
volumeContext_ = null;
}
return volumeContextBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodePublishVolumeRequest)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodePublishVolumeRequest)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodePublishVolumeRequest parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new NodePublishVolumeRequest(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodePublishVolumeResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodePublishVolumeResponse)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
*
* Intentionally empty.
*
*
* Protobuf type {@code hadoop.yarn.NodePublishVolumeResponse}
*/
public static final class NodePublishVolumeResponse extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodePublishVolumeResponse)
NodePublishVolumeResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodePublishVolumeResponse.newBuilder() to construct.
private NodePublishVolumeResponse(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodePublishVolumeResponse() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NodePublishVolumeResponse(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Intentionally empty.
*
*
* Protobuf type {@code hadoop.yarn.NodePublishVolumeResponse}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodePublishVolumeResponse)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponseOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodePublishVolumeResponse)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodePublishVolumeResponse)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodePublishVolumeResponse parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new NodePublishVolumeResponse(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeUnpublishVolumeRequestOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeUnpublishVolumeRequest)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string volume_id = 1;
*/
boolean hasVolumeId();
/**
* required string volume_id = 1;
*/
java.lang.String getVolumeId();
/**
* required string volume_id = 1;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes();
/**
* required string target_path = 2;
*/
boolean hasTargetPath();
/**
* required string target_path = 2;
*/
java.lang.String getTargetPath();
/**
* required string target_path = 2;
*/
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getTargetPathBytes();
}
/**
* Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeRequest}
*/
public static final class NodeUnpublishVolumeRequest extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeUnpublishVolumeRequest)
NodeUnpublishVolumeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeUnpublishVolumeRequest.newBuilder() to construct.
private NodeUnpublishVolumeRequest(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeUnpublishVolumeRequest() {
volumeId_ = "";
targetPath_ = "";
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NodeUnpublishVolumeRequest(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
volumeId_ = bs;
break;
}
case 18: {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
targetPath_ = bs;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.Builder.class);
}
private int bitField0_;
public static final int VOLUME_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object volumeId_;
/**
* required string volume_id = 1;
*/
public boolean hasVolumeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string volume_id = 1;
*/
public java.lang.String getVolumeId() {
java.lang.Object ref = volumeId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
volumeId_ = s;
}
return s;
}
}
/**
* required string volume_id = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes() {
java.lang.Object ref = volumeId_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
volumeId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int TARGET_PATH_FIELD_NUMBER = 2;
private volatile java.lang.Object targetPath_;
/**
* required string target_path = 2;
*/
public boolean hasTargetPath() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string target_path = 2;
*/
public java.lang.String getTargetPath() {
java.lang.Object ref = targetPath_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
targetPath_ = s;
}
return s;
}
}
/**
* required string target_path = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getTargetPathBytes() {
java.lang.Object ref = targetPath_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
targetPath_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasVolumeId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTargetPath()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, volumeId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, targetPath_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(1, volumeId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.org.apache.hadoop.shaded.com.uteStringSize(2, targetPath_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest) obj;
if (hasVolumeId() != other.hasVolumeId()) return false;
if (hasVolumeId()) {
if (!getVolumeId()
.equals(other.getVolumeId())) return false;
}
if (hasTargetPath() != other.hasTargetPath()) return false;
if (hasTargetPath()) {
if (!getTargetPath()
.equals(other.getTargetPath())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVolumeId()) {
hash = (37 * hash) + VOLUME_ID_FIELD_NUMBER;
hash = (53 * hash) + getVolumeId().hashCode();
}
if (hasTargetPath()) {
hash = (37 * hash) + TARGET_PATH_FIELD_NUMBER;
hash = (53 * hash) + getTargetPath().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeRequest}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeUnpublishVolumeRequest)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequestOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
volumeId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
targetPath_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.volumeId_ = volumeId_;
if (((from_bitField0_ & 0x00000002) != 0)) {
to_bitField0_ |= 0x00000002;
}
result.targetPath_ = targetPath_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.getDefaultInstance()) return this;
if (other.hasVolumeId()) {
bitField0_ |= 0x00000001;
volumeId_ = other.volumeId_;
onChanged();
}
if (other.hasTargetPath()) {
bitField0_ |= 0x00000002;
targetPath_ = other.targetPath_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasVolumeId()) {
return false;
}
if (!hasTargetPath()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object volumeId_ = "";
/**
* required string volume_id = 1;
*/
public boolean hasVolumeId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string volume_id = 1;
*/
public java.lang.String getVolumeId() {
java.lang.Object ref = volumeId_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
volumeId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string volume_id = 1;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getVolumeIdBytes() {
java.lang.Object ref = volumeId_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
volumeId_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string volume_id = 1;
*/
public Builder setVolumeId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeId_ = value;
onChanged();
return this;
}
/**
* required string volume_id = 1;
*/
public Builder clearVolumeId() {
bitField0_ = (bitField0_ & ~0x00000001);
volumeId_ = getDefaultInstance().getVolumeId();
onChanged();
return this;
}
/**
* required string volume_id = 1;
*/
public Builder setVolumeIdBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
volumeId_ = value;
onChanged();
return this;
}
private java.lang.Object targetPath_ = "";
/**
* required string target_path = 2;
*/
public boolean hasTargetPath() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string target_path = 2;
*/
public java.lang.String getTargetPath() {
java.lang.Object ref = targetPath_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
targetPath_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string target_path = 2;
*/
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString
getTargetPathBytes() {
java.lang.Object ref = targetPath_;
if (ref instanceof String) {
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
targetPath_ = b;
return b;
} else {
return (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string target_path = 2;
*/
public Builder setTargetPath(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
targetPath_ = value;
onChanged();
return this;
}
/**
* required string target_path = 2;
*/
public Builder clearTargetPath() {
bitField0_ = (bitField0_ & ~0x00000002);
targetPath_ = getDefaultInstance().getTargetPath();
onChanged();
return this;
}
/**
* required string target_path = 2;
*/
public Builder setTargetPathBytes(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
targetPath_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeUnpublishVolumeRequest)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeUnpublishVolumeRequest)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeUnpublishVolumeRequest parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new NodeUnpublishVolumeRequest(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeUnpublishVolumeResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeUnpublishVolumeResponse)
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
*
* Intentionally empty.
*
*
* Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeResponse}
*/
public static final class NodeUnpublishVolumeResponse extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.yarn.NodeUnpublishVolumeResponse)
NodeUnpublishVolumeResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeUnpublishVolumeResponse.newBuilder() to construct.
private NodeUnpublishVolumeResponse(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NodeUnpublishVolumeResponse() {
}
@java.lang.Override
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NodeUnpublishVolumeResponse(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.Builder unknownFields =
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.org.apache.hadoop.shaded.io.IOException e) {
throw new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.org.apache.hadoop.shaded.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse)) {
return super.equals(obj);
}
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse other = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(byte[] data)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
byte[] data,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseDelimitedFrom(java.org.apache.hadoop.shaded.io.InputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseDelimitedFrom(
java.org.apache.hadoop.shaded.io.InputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
return org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Intentionally empty.
*
*
* Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeResponse}
*/
public static final class Builder extends
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeUnpublishVolumeResponse)
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponseOrBuilder {
public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.class, org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.Builder.class);
}
// Construct using org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse getDefaultInstanceForType() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse build() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse buildPartial() {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse result = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse) {
return mergeFrom((org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse other) {
if (other == org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.org.apache.hadoop.shaded.io.IOException {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeUnpublishVolumeResponse)
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.NodeUnpublishVolumeResponse)
private static final org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse();
}
public static org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public NodeUnpublishVolumeResponse parsePartialFrom(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new NodeUnpublishVolumeResponse(input, extensionRegistry);
}
};
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_VolumeCapability_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable;
private static final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
private static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable;
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\026yarn_csi_adaptor.proto\022\013hadoop.yarn\032\021y" +
"arn_protos.proto\"\260\001\n!ValidateVolumeCapab" +
"ilitiesRequest\022\021\n\tvolume_id\030\001 \002(\t\022:\n\023vol" +
"ume_capabilities\030\002 \003(\0132\035.hadoop.yarn.Vol" +
"umeCapability\022<\n\021volume_attributes\030\003 \003(\013" +
"2!.hadoop.yarn.StringStringMapProto\"H\n\"V" +
"alidateVolumeCapabilitiesResponse\022\021\n\tsup" +
"ported\030\001 \002(\010\022\017\n\007message\030\002 \001(\t\"\367\002\n\020Volume" +
"Capability\022=\n\013volume_type\030\001 \002(\0162(.hadoop" +
".yarn.VolumeCapability.VolumeType\022=\n\013acc" +
"ess_mode\030\002 \002(\0162(.hadoop.yarn.VolumeCapab" +
"ility.AccessMode\022\023\n\013mount_flags\030\003 \003(\t\"(\n" +
"\nVolumeType\022\t\n\005BLOCK\020\000\022\017\n\013FILE_SYSTEM\020\001\"" +
"\245\001\n\nAccessMode\022\013\n\007UNKNOWN\020\000\022\026\n\022SINGLE_NO" +
"DE_WRITER\020\001\022\033\n\027SINGLE_NODE_READER_ONLY\020\002" +
"\022\032\n\026MULTI_NODE_READER_ONLY\020\003\022\034\n\030MULTI_NO" +
"DE_SINGLE_WRITER\020\004\022\033\n\027MULTI_NODE_MULTI_W" +
"RITER\020\005\"\026\n\024GetPluginInfoRequest\"=\n\025GetPl" +
"uginInfoResponse\022\014\n\004name\030\001 \002(\t\022\026\n\016vendor" +
"_version\030\002 \002(\t\"\326\002\n\030NodePublishVolumeRequ" +
"est\022\021\n\tvolume_id\030\001 \002(\t\022:\n\017publish_contex" +
"t\030\002 \003(\0132!.hadoop.yarn.StringStringMapPro" +
"to\022\033\n\023staging_target_path\030\003 \001(\t\022\023\n\013targe" +
"t_path\030\004 \002(\t\0228\n\021volume_capability\030\005 \002(\0132" +
"\035.hadoop.yarn.VolumeCapability\022\020\n\010readon" +
"ly\030\006 \002(\010\0222\n\007secrets\030\007 \003(\0132!.hadoop.yarn." +
"StringStringMapProto\0229\n\016volume_context\030\010" +
" \003(\0132!.hadoop.yarn.StringStringMapProto\"" +
"\033\n\031NodePublishVolumeResponse\"D\n\032NodeUnpu" +
"blishVolumeRequest\022\021\n\tvolume_id\030\001 \002(\t\022\023\n" +
"\013target_path\030\002 \002(\t\"\035\n\033NodeUnpublishVolum" +
"eResponseB3\n\034org.apache.hadoop.yarn.prot" +
"oB\020CsiAdaptorProtos\240\001\001"
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
}, assigner);
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor,
new java.lang.String[] { "VolumeId", "VolumeCapabilities", "VolumeAttributes", });
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor,
new java.lang.String[] { "Supported", "Message", });
internal_static_hadoop_yarn_VolumeCapability_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_VolumeCapability_descriptor,
new java.lang.String[] { "VolumeType", "AccessMode", "MountFlags", });
internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor,
new java.lang.String[] { });
internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor,
new java.lang.String[] { "Name", "VendorVersion", });
internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor,
new java.lang.String[] { "VolumeId", "PublishContext", "StagingTargetPath", "TargetPath", "VolumeCapability", "Readonly", "Secrets", "VolumeContext", });
internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor,
new java.lang.String[] { });
internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor,
new java.lang.String[] { "VolumeId", "TargetPath", });
internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable = new
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor,
new java.lang.String[] { });
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy