Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: erasurecoding.proto
package org.apache.hadoop.hdfs.protocol.proto;
public final class ErasureCodingProtos {
private ErasureCodingProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface SetErasureCodingPolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.SetErasureCodingPolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string src = 1;
* @return Whether the src field is set.
*/
boolean hasSrc();
/**
* required string src = 1;
* @return The src.
*/
java.lang.String getSrc();
/**
* required string src = 1;
* @return The bytes for src.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes();
/**
* optional string ecPolicyName = 2;
* @return Whether the ecPolicyName field is set.
*/
boolean hasEcPolicyName();
/**
* optional string ecPolicyName = 2;
* @return The ecPolicyName.
*/
java.lang.String getEcPolicyName();
/**
* optional string ecPolicyName = 2;
* @return The bytes for ecPolicyName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.SetErasureCodingPolicyRequestProto}
*/
public static final class SetErasureCodingPolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.SetErasureCodingPolicyRequestProto)
SetErasureCodingPolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetErasureCodingPolicyRequestProto.newBuilder() to construct.
private SetErasureCodingPolicyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SetErasureCodingPolicyRequestProto() {
src_ = "";
ecPolicyName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SetErasureCodingPolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int SRC_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object src_ = "";
/**
* required string src = 1;
* @return Whether the src field is set.
*/
@java.lang.Override
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string src = 1;
* @return The src.
*/
@java.lang.Override
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
* required string src = 1;
* @return The bytes for src.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int ECPOLICYNAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object ecPolicyName_ = "";
/**
* optional string ecPolicyName = 2;
* @return Whether the ecPolicyName field is set.
*/
@java.lang.Override
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string ecPolicyName = 2;
* @return The ecPolicyName.
*/
@java.lang.Override
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
}
}
/**
* optional string ecPolicyName = 2;
* @return The bytes for ecPolicyName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, src_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, ecPolicyName_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, src_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, ecPolicyName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto) obj;
if (hasSrc() != other.hasSrc()) return false;
if (hasSrc()) {
if (!getSrc()
.equals(other.getSrc())) return false;
}
if (hasEcPolicyName() != other.hasEcPolicyName()) return false;
if (hasEcPolicyName()) {
if (!getEcPolicyName()
.equals(other.getEcPolicyName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
if (hasEcPolicyName()) {
hash = (37 * hash) + ECPOLICYNAME_FIELD_NUMBER;
hash = (53 * hash) + getEcPolicyName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.SetErasureCodingPolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.SetErasureCodingPolicyRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
src_ = "";
ecPolicyName_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.src_ = src_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.ecPolicyName_ = ecPolicyName_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
src_ = other.src_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasEcPolicyName()) {
ecPolicyName_ = other.ecPolicyName_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
src_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
ecPolicyName_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object src_ = "";
/**
* required string src = 1;
* @return Whether the src field is set.
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string src = 1;
* @return The src.
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string src = 1;
* @return The bytes for src.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string src = 1;
* @param value The src to set.
* @return This builder for chaining.
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
src_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string src = 1;
* @return This builder for chaining.
*/
public Builder clearSrc() {
src_ = getDefaultInstance().getSrc();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string src = 1;
* @param value The bytes for src to set.
* @return This builder for chaining.
*/
public Builder setSrcBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
src_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object ecPolicyName_ = "";
/**
* optional string ecPolicyName = 2;
* @return Whether the ecPolicyName field is set.
*/
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* optional string ecPolicyName = 2;
* @return The ecPolicyName.
*/
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* optional string ecPolicyName = 2;
* @return The bytes for ecPolicyName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* optional string ecPolicyName = 2;
* @param value The ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* optional string ecPolicyName = 2;
* @return This builder for chaining.
*/
public Builder clearEcPolicyName() {
ecPolicyName_ = getDefaultInstance().getEcPolicyName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* optional string ecPolicyName = 2;
* @param value The bytes for ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.SetErasureCodingPolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.SetErasureCodingPolicyRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SetErasureCodingPolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface SetErasureCodingPolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.SetErasureCodingPolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.SetErasureCodingPolicyResponseProto}
*/
public static final class SetErasureCodingPolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.SetErasureCodingPolicyResponseProto)
SetErasureCodingPolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetErasureCodingPolicyResponseProto.newBuilder() to construct.
private SetErasureCodingPolicyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private SetErasureCodingPolicyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SetErasureCodingPolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.SetErasureCodingPolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.SetErasureCodingPolicyResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.SetErasureCodingPolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.SetErasureCodingPolicyResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public SetErasureCodingPolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetErasureCodingPoliciesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetErasureCodingPoliciesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
*
* void request
*
*
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPoliciesRequestProto}
*/
public static final class GetErasureCodingPoliciesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetErasureCodingPoliciesRequestProto)
GetErasureCodingPoliciesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetErasureCodingPoliciesRequestProto.newBuilder() to construct.
private GetErasureCodingPoliciesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetErasureCodingPoliciesRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetErasureCodingPoliciesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* void request
*
*
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPoliciesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetErasureCodingPoliciesRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetErasureCodingPoliciesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetErasureCodingPoliciesRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetErasureCodingPoliciesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetErasureCodingPoliciesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetErasureCodingPoliciesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
java.util.List
getEcPoliciesList();
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicies(int index);
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
int getEcPoliciesCount();
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesOrBuilderList();
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPoliciesOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPoliciesResponseProto}
*/
public static final class GetErasureCodingPoliciesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetErasureCodingPoliciesResponseProto)
GetErasureCodingPoliciesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetErasureCodingPoliciesResponseProto.newBuilder() to construct.
private GetErasureCodingPoliciesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetErasureCodingPoliciesResponseProto() {
ecPolicies_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetErasureCodingPoliciesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.Builder.class);
}
public static final int ECPOLICIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List ecPolicies_;
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public java.util.List getEcPoliciesList() {
return ecPolicies_;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesOrBuilderList() {
return ecPolicies_;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public int getEcPoliciesCount() {
return ecPolicies_.size();
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicies(int index) {
return ecPolicies_.get(index);
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPoliciesOrBuilder(
int index) {
return ecPolicies_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getEcPoliciesCount(); i++) {
if (!getEcPolicies(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < ecPolicies_.size(); i++) {
output.writeMessage(1, ecPolicies_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < ecPolicies_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, ecPolicies_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto) obj;
if (!getEcPoliciesList()
.equals(other.getEcPoliciesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEcPoliciesCount() > 0) {
hash = (37 * hash) + ECPOLICIES_FIELD_NUMBER;
hash = (53 * hash) + getEcPoliciesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPoliciesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetErasureCodingPoliciesResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (ecPoliciesBuilder_ == null) {
ecPolicies_ = java.util.Collections.emptyList();
} else {
ecPolicies_ = null;
ecPoliciesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto result) {
if (ecPoliciesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
ecPolicies_ = java.util.Collections.unmodifiableList(ecPolicies_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.ecPolicies_ = ecPolicies_;
} else {
result.ecPolicies_ = ecPoliciesBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto.getDefaultInstance()) return this;
if (ecPoliciesBuilder_ == null) {
if (!other.ecPolicies_.isEmpty()) {
if (ecPolicies_.isEmpty()) {
ecPolicies_ = other.ecPolicies_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEcPoliciesIsMutable();
ecPolicies_.addAll(other.ecPolicies_);
}
onChanged();
}
} else {
if (!other.ecPolicies_.isEmpty()) {
if (ecPoliciesBuilder_.isEmpty()) {
ecPoliciesBuilder_.dispose();
ecPoliciesBuilder_ = null;
ecPolicies_ = other.ecPolicies_;
bitField0_ = (bitField0_ & ~0x00000001);
ecPoliciesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getEcPoliciesFieldBuilder() : null;
} else {
ecPoliciesBuilder_.addAllMessages(other.ecPolicies_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getEcPoliciesCount(); i++) {
if (!getEcPolicies(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto m =
input.readMessage(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.PARSER,
extensionRegistry);
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.add(m);
} else {
ecPoliciesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List ecPolicies_ =
java.util.Collections.emptyList();
private void ensureEcPoliciesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
ecPolicies_ = new java.util.ArrayList(ecPolicies_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder> ecPoliciesBuilder_;
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public java.util.List getEcPoliciesList() {
if (ecPoliciesBuilder_ == null) {
return java.util.Collections.unmodifiableList(ecPolicies_);
} else {
return ecPoliciesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public int getEcPoliciesCount() {
if (ecPoliciesBuilder_ == null) {
return ecPolicies_.size();
} else {
return ecPoliciesBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicies(int index) {
if (ecPoliciesBuilder_ == null) {
return ecPolicies_.get(index);
} else {
return ecPoliciesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder setEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEcPoliciesIsMutable();
ecPolicies_.set(index, value);
onChanged();
} else {
ecPoliciesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder setEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.set(index, builderForValue.build());
onChanged();
} else {
ecPoliciesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEcPoliciesIsMutable();
ecPolicies_.add(value);
onChanged();
} else {
ecPoliciesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEcPoliciesIsMutable();
ecPolicies_.add(index, value);
onChanged();
} else {
ecPoliciesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.add(builderForValue.build());
onChanged();
} else {
ecPoliciesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.add(index, builderForValue.build());
onChanged();
} else {
ecPoliciesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addAllEcPolicies(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto> values) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, ecPolicies_);
onChanged();
} else {
ecPoliciesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder clearEcPolicies() {
if (ecPoliciesBuilder_ == null) {
ecPolicies_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
ecPoliciesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder removeEcPolicies(int index) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.remove(index);
onChanged();
} else {
ecPoliciesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder getEcPoliciesBuilder(
int index) {
return getEcPoliciesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPoliciesOrBuilder(
int index) {
if (ecPoliciesBuilder_ == null) {
return ecPolicies_.get(index); } else {
return ecPoliciesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesOrBuilderList() {
if (ecPoliciesBuilder_ != null) {
return ecPoliciesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(ecPolicies_);
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder addEcPoliciesBuilder() {
return getEcPoliciesFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder addEcPoliciesBuilder(
int index) {
return getEcPoliciesFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public java.util.List
getEcPoliciesBuilderList() {
return getEcPoliciesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesFieldBuilder() {
if (ecPoliciesBuilder_ == null) {
ecPoliciesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>(
ecPolicies_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
ecPolicies_ = null;
}
return ecPoliciesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetErasureCodingPoliciesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetErasureCodingPoliciesResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetErasureCodingPoliciesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetErasureCodingCodecsRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetErasureCodingCodecsRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
*
* void request
*
*
* Protobuf type {@code hadoop.hdfs.GetErasureCodingCodecsRequestProto}
*/
public static final class GetErasureCodingCodecsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetErasureCodingCodecsRequestProto)
GetErasureCodingCodecsRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetErasureCodingCodecsRequestProto.newBuilder() to construct.
private GetErasureCodingCodecsRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetErasureCodingCodecsRequestProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetErasureCodingCodecsRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* void request
*
*
* Protobuf type {@code hadoop.hdfs.GetErasureCodingCodecsRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetErasureCodingCodecsRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetErasureCodingCodecsRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetErasureCodingCodecsRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetErasureCodingCodecsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetErasureCodingCodecsResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetErasureCodingCodecsResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
java.util.List
getCodecList();
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto getCodec(int index);
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
int getCodecCount();
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder>
getCodecOrBuilderList();
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder getCodecOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingCodecsResponseProto}
*/
public static final class GetErasureCodingCodecsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetErasureCodingCodecsResponseProto)
GetErasureCodingCodecsResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetErasureCodingCodecsResponseProto.newBuilder() to construct.
private GetErasureCodingCodecsResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetErasureCodingCodecsResponseProto() {
codec_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetErasureCodingCodecsResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.Builder.class);
}
public static final int CODEC_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List codec_;
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
@java.lang.Override
public java.util.List getCodecList() {
return codec_;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder>
getCodecOrBuilderList() {
return codec_;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
@java.lang.Override
public int getCodecCount() {
return codec_.size();
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto getCodec(int index) {
return codec_.get(index);
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder getCodecOrBuilder(
int index) {
return codec_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getCodecCount(); i++) {
if (!getCodec(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < codec_.size(); i++) {
output.writeMessage(1, codec_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < codec_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, codec_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto) obj;
if (!getCodecList()
.equals(other.getCodecList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCodecCount() > 0) {
hash = (37 * hash) + CODEC_FIELD_NUMBER;
hash = (53 * hash) + getCodecList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingCodecsResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetErasureCodingCodecsResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (codecBuilder_ == null) {
codec_ = java.util.Collections.emptyList();
} else {
codec_ = null;
codecBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto result) {
if (codecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
codec_ = java.util.Collections.unmodifiableList(codec_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.codec_ = codec_;
} else {
result.codec_ = codecBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto.getDefaultInstance()) return this;
if (codecBuilder_ == null) {
if (!other.codec_.isEmpty()) {
if (codec_.isEmpty()) {
codec_ = other.codec_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCodecIsMutable();
codec_.addAll(other.codec_);
}
onChanged();
}
} else {
if (!other.codec_.isEmpty()) {
if (codecBuilder_.isEmpty()) {
codecBuilder_.dispose();
codecBuilder_ = null;
codec_ = other.codec_;
bitField0_ = (bitField0_ & ~0x00000001);
codecBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getCodecFieldBuilder() : null;
} else {
codecBuilder_.addAllMessages(other.codec_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getCodecCount(); i++) {
if (!getCodec(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto m =
input.readMessage(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.PARSER,
extensionRegistry);
if (codecBuilder_ == null) {
ensureCodecIsMutable();
codec_.add(m);
} else {
codecBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List codec_ =
java.util.Collections.emptyList();
private void ensureCodecIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
codec_ = new java.util.ArrayList(codec_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder> codecBuilder_;
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public java.util.List getCodecList() {
if (codecBuilder_ == null) {
return java.util.Collections.unmodifiableList(codec_);
} else {
return codecBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public int getCodecCount() {
if (codecBuilder_ == null) {
return codec_.size();
} else {
return codecBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto getCodec(int index) {
if (codecBuilder_ == null) {
return codec_.get(index);
} else {
return codecBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder setCodec(
int index, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto value) {
if (codecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCodecIsMutable();
codec_.set(index, value);
onChanged();
} else {
codecBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder setCodec(
int index, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder builderForValue) {
if (codecBuilder_ == null) {
ensureCodecIsMutable();
codec_.set(index, builderForValue.build());
onChanged();
} else {
codecBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder addCodec(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto value) {
if (codecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCodecIsMutable();
codec_.add(value);
onChanged();
} else {
codecBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder addCodec(
int index, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto value) {
if (codecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCodecIsMutable();
codec_.add(index, value);
onChanged();
} else {
codecBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder addCodec(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder builderForValue) {
if (codecBuilder_ == null) {
ensureCodecIsMutable();
codec_.add(builderForValue.build());
onChanged();
} else {
codecBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder addCodec(
int index, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder builderForValue) {
if (codecBuilder_ == null) {
ensureCodecIsMutable();
codec_.add(index, builderForValue.build());
onChanged();
} else {
codecBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder addAllCodec(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto> values) {
if (codecBuilder_ == null) {
ensureCodecIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, codec_);
onChanged();
} else {
codecBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder clearCodec() {
if (codecBuilder_ == null) {
codec_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
codecBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public Builder removeCodec(int index) {
if (codecBuilder_ == null) {
ensureCodecIsMutable();
codec_.remove(index);
onChanged();
} else {
codecBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder getCodecBuilder(
int index) {
return getCodecFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder getCodecOrBuilder(
int index) {
if (codecBuilder_ == null) {
return codec_.get(index); } else {
return codecBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder>
getCodecOrBuilderList() {
if (codecBuilder_ != null) {
return codecBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(codec_);
}
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder addCodecBuilder() {
return getCodecFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder addCodecBuilder(
int index) {
return getCodecFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.CodecProto codec = 1;
*/
public java.util.List
getCodecBuilderList() {
return getCodecFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder>
getCodecFieldBuilder() {
if (codecBuilder_ == null) {
codecBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder>(
codec_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
codec_ = null;
}
return codecBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetErasureCodingCodecsResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetErasureCodingCodecsResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetErasureCodingCodecsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetErasureCodingPolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetErasureCodingPolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return Whether the src field is set.
*/
boolean hasSrc();
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return The src.
*/
java.lang.String getSrc();
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return The bytes for src.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPolicyRequestProto}
*/
public static final class GetErasureCodingPolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetErasureCodingPolicyRequestProto)
GetErasureCodingPolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetErasureCodingPolicyRequestProto.newBuilder() to construct.
private GetErasureCodingPolicyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetErasureCodingPolicyRequestProto() {
src_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetErasureCodingPolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int SRC_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object src_ = "";
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return Whether the src field is set.
*/
@java.lang.Override
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return The src.
*/
@java.lang.Override
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return The bytes for src.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, src_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, src_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto) obj;
if (hasSrc() != other.hasSrc()) return false;
if (hasSrc()) {
if (!getSrc()
.equals(other.getSrc())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetErasureCodingPolicyRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
src_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.src_ = src_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
src_ = other.src_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
src_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object src_ = "";
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return Whether the src field is set.
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return The src.
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return The bytes for src.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @param value The src to set.
* @return This builder for chaining.
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
src_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @return This builder for chaining.
*/
public Builder clearSrc() {
src_ = getDefaultInstance().getSrc();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
* path to get the policy info
*
*
* required string src = 1;
* @param value The bytes for src to set.
* @return This builder for chaining.
*/
public Builder setSrcBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
src_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetErasureCodingPolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetErasureCodingPolicyRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetErasureCodingPolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetErasureCodingPolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetErasureCodingPolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
* @return Whether the ecPolicy field is set.
*/
boolean hasEcPolicy();
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
* @return The ecPolicy.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicy();
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPolicyOrBuilder();
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPolicyResponseProto}
*/
public static final class GetErasureCodingPolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetErasureCodingPolicyResponseProto)
GetErasureCodingPolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetErasureCodingPolicyResponseProto.newBuilder() to construct.
private GetErasureCodingPolicyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetErasureCodingPolicyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetErasureCodingPolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.Builder.class);
}
private int bitField0_;
public static final int ECPOLICY_FIELD_NUMBER = 1;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto ecPolicy_;
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
* @return Whether the ecPolicy field is set.
*/
@java.lang.Override
public boolean hasEcPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
* @return The ecPolicy.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicy() {
return ecPolicy_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPolicyOrBuilder() {
return ecPolicy_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (hasEcPolicy()) {
if (!getEcPolicy().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getEcPolicy());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getEcPolicy());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto) obj;
if (hasEcPolicy() != other.hasEcPolicy()) return false;
if (hasEcPolicy()) {
if (!getEcPolicy()
.equals(other.getEcPolicy())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEcPolicy()) {
hash = (37 * hash) + ECPOLICY_FIELD_NUMBER;
hash = (53 * hash) + getEcPolicy().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetErasureCodingPolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetErasureCodingPolicyResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getEcPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
ecPolicy_ = null;
if (ecPolicyBuilder_ != null) {
ecPolicyBuilder_.dispose();
ecPolicyBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.ecPolicy_ = ecPolicyBuilder_ == null
? ecPolicy_
: ecPolicyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto.getDefaultInstance()) return this;
if (other.hasEcPolicy()) {
mergeEcPolicy(other.getEcPolicy());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (hasEcPolicy()) {
if (!getEcPolicy().isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getEcPolicyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto ecPolicy_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder> ecPolicyBuilder_;
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
* @return Whether the ecPolicy field is set.
*/
public boolean hasEcPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
* @return The ecPolicy.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicy() {
if (ecPolicyBuilder_ == null) {
return ecPolicy_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
} else {
return ecPolicyBuilder_.getMessage();
}
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
public Builder setEcPolicy(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ecPolicy_ = value;
} else {
ecPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
public Builder setEcPolicy(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPolicyBuilder_ == null) {
ecPolicy_ = builderForValue.build();
} else {
ecPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
public Builder mergeEcPolicy(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
ecPolicy_ != null &&
ecPolicy_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance()) {
getEcPolicyBuilder().mergeFrom(value);
} else {
ecPolicy_ = value;
}
} else {
ecPolicyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
public Builder clearEcPolicy() {
bitField0_ = (bitField0_ & ~0x00000001);
ecPolicy_ = null;
if (ecPolicyBuilder_ != null) {
ecPolicyBuilder_.dispose();
ecPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder getEcPolicyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getEcPolicyFieldBuilder().getBuilder();
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPolicyOrBuilder() {
if (ecPolicyBuilder_ != null) {
return ecPolicyBuilder_.getMessageOrBuilder();
} else {
return ecPolicy_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
}
}
/**
* optional .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPolicyFieldBuilder() {
if (ecPolicyBuilder_ == null) {
ecPolicyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>(
getEcPolicy(),
getParentForChildren(),
isClean());
ecPolicy_ = null;
}
return ecPolicyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetErasureCodingPolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetErasureCodingPolicyResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetErasureCodingPolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddErasureCodingPoliciesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.AddErasureCodingPoliciesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
java.util.List
getEcPoliciesList();
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicies(int index);
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
int getEcPoliciesCount();
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesOrBuilderList();
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPoliciesOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.AddErasureCodingPoliciesRequestProto}
*/
public static final class AddErasureCodingPoliciesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.AddErasureCodingPoliciesRequestProto)
AddErasureCodingPoliciesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddErasureCodingPoliciesRequestProto.newBuilder() to construct.
private AddErasureCodingPoliciesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddErasureCodingPoliciesRequestProto() {
ecPolicies_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddErasureCodingPoliciesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.Builder.class);
}
public static final int ECPOLICIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List ecPolicies_;
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public java.util.List getEcPoliciesList() {
return ecPolicies_;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesOrBuilderList() {
return ecPolicies_;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public int getEcPoliciesCount() {
return ecPolicies_.size();
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicies(int index) {
return ecPolicies_.get(index);
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPoliciesOrBuilder(
int index) {
return ecPolicies_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getEcPoliciesCount(); i++) {
if (!getEcPolicies(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < ecPolicies_.size(); i++) {
output.writeMessage(1, ecPolicies_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < ecPolicies_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, ecPolicies_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto) obj;
if (!getEcPoliciesList()
.equals(other.getEcPoliciesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEcPoliciesCount() > 0) {
hash = (37 * hash) + ECPOLICIES_FIELD_NUMBER;
hash = (53 * hash) + getEcPoliciesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.AddErasureCodingPoliciesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.AddErasureCodingPoliciesRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (ecPoliciesBuilder_ == null) {
ecPolicies_ = java.util.Collections.emptyList();
} else {
ecPolicies_ = null;
ecPoliciesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto result) {
if (ecPoliciesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
ecPolicies_ = java.util.Collections.unmodifiableList(ecPolicies_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.ecPolicies_ = ecPolicies_;
} else {
result.ecPolicies_ = ecPoliciesBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto.getDefaultInstance()) return this;
if (ecPoliciesBuilder_ == null) {
if (!other.ecPolicies_.isEmpty()) {
if (ecPolicies_.isEmpty()) {
ecPolicies_ = other.ecPolicies_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEcPoliciesIsMutable();
ecPolicies_.addAll(other.ecPolicies_);
}
onChanged();
}
} else {
if (!other.ecPolicies_.isEmpty()) {
if (ecPoliciesBuilder_.isEmpty()) {
ecPoliciesBuilder_.dispose();
ecPoliciesBuilder_ = null;
ecPolicies_ = other.ecPolicies_;
bitField0_ = (bitField0_ & ~0x00000001);
ecPoliciesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getEcPoliciesFieldBuilder() : null;
} else {
ecPoliciesBuilder_.addAllMessages(other.ecPolicies_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getEcPoliciesCount(); i++) {
if (!getEcPolicies(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto m =
input.readMessage(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.PARSER,
extensionRegistry);
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.add(m);
} else {
ecPoliciesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List ecPolicies_ =
java.util.Collections.emptyList();
private void ensureEcPoliciesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
ecPolicies_ = new java.util.ArrayList(ecPolicies_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder> ecPoliciesBuilder_;
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public java.util.List getEcPoliciesList() {
if (ecPoliciesBuilder_ == null) {
return java.util.Collections.unmodifiableList(ecPolicies_);
} else {
return ecPoliciesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public int getEcPoliciesCount() {
if (ecPoliciesBuilder_ == null) {
return ecPolicies_.size();
} else {
return ecPoliciesBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicies(int index) {
if (ecPoliciesBuilder_ == null) {
return ecPolicies_.get(index);
} else {
return ecPoliciesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder setEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEcPoliciesIsMutable();
ecPolicies_.set(index, value);
onChanged();
} else {
ecPoliciesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder setEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.set(index, builderForValue.build());
onChanged();
} else {
ecPoliciesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEcPoliciesIsMutable();
ecPolicies_.add(value);
onChanged();
} else {
ecPoliciesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEcPoliciesIsMutable();
ecPolicies_.add(index, value);
onChanged();
} else {
ecPoliciesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.add(builderForValue.build());
onChanged();
} else {
ecPoliciesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addEcPolicies(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.add(index, builderForValue.build());
onChanged();
} else {
ecPoliciesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder addAllEcPolicies(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto> values) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, ecPolicies_);
onChanged();
} else {
ecPoliciesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder clearEcPolicies() {
if (ecPoliciesBuilder_ == null) {
ecPolicies_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
ecPoliciesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public Builder removeEcPolicies(int index) {
if (ecPoliciesBuilder_ == null) {
ensureEcPoliciesIsMutable();
ecPolicies_.remove(index);
onChanged();
} else {
ecPoliciesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder getEcPoliciesBuilder(
int index) {
return getEcPoliciesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPoliciesOrBuilder(
int index) {
if (ecPoliciesBuilder_ == null) {
return ecPolicies_.get(index); } else {
return ecPoliciesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesOrBuilderList() {
if (ecPoliciesBuilder_ != null) {
return ecPoliciesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(ecPolicies_);
}
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder addEcPoliciesBuilder() {
return getEcPoliciesFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder addEcPoliciesBuilder(
int index) {
return getEcPoliciesFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.ErasureCodingPolicyProto ecPolicies = 1;
*/
public java.util.List
getEcPoliciesBuilderList() {
return getEcPoliciesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPoliciesFieldBuilder() {
if (ecPoliciesBuilder_ == null) {
ecPoliciesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>(
ecPolicies_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
ecPolicies_ = null;
}
return ecPoliciesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.AddErasureCodingPoliciesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.AddErasureCodingPoliciesRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddErasureCodingPoliciesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AddErasureCodingPoliciesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.AddErasureCodingPoliciesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
java.util.List
getResponsesList();
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto getResponses(int index);
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
int getResponsesCount();
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder>
getResponsesOrBuilderList();
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder getResponsesOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.AddErasureCodingPoliciesResponseProto}
*/
public static final class AddErasureCodingPoliciesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.AddErasureCodingPoliciesResponseProto)
AddErasureCodingPoliciesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AddErasureCodingPoliciesResponseProto.newBuilder() to construct.
private AddErasureCodingPoliciesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AddErasureCodingPoliciesResponseProto() {
responses_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AddErasureCodingPoliciesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.Builder.class);
}
public static final int RESPONSES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List responses_;
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
@java.lang.Override
public java.util.List getResponsesList() {
return responses_;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
@java.lang.Override
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder>
getResponsesOrBuilderList() {
return responses_;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
@java.lang.Override
public int getResponsesCount() {
return responses_.size();
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto getResponses(int index) {
return responses_.get(index);
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder getResponsesOrBuilder(
int index) {
return responses_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
for (int i = 0; i < getResponsesCount(); i++) {
if (!getResponses(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < responses_.size(); i++) {
output.writeMessage(1, responses_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < responses_.size(); i++) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, responses_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto) obj;
if (!getResponsesList()
.equals(other.getResponsesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResponsesCount() > 0) {
hash = (37 * hash) + RESPONSES_FIELD_NUMBER;
hash = (53 * hash) + getResponsesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.AddErasureCodingPoliciesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.AddErasureCodingPoliciesResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (responsesBuilder_ == null) {
responses_ = java.util.Collections.emptyList();
} else {
responses_ = null;
responsesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto result) {
if (responsesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
responses_ = java.util.Collections.unmodifiableList(responses_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.responses_ = responses_;
} else {
result.responses_ = responsesBuilder_.build();
}
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto.getDefaultInstance()) return this;
if (responsesBuilder_ == null) {
if (!other.responses_.isEmpty()) {
if (responses_.isEmpty()) {
responses_ = other.responses_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResponsesIsMutable();
responses_.addAll(other.responses_);
}
onChanged();
}
} else {
if (!other.responses_.isEmpty()) {
if (responsesBuilder_.isEmpty()) {
responsesBuilder_.dispose();
responsesBuilder_ = null;
responses_ = other.responses_;
bitField0_ = (bitField0_ & ~0x00000001);
responsesBuilder_ =
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResponsesFieldBuilder() : null;
} else {
responsesBuilder_.addAllMessages(other.responses_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
for (int i = 0; i < getResponsesCount(); i++) {
if (!getResponses(i).isInitialized()) {
return false;
}
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto m =
input.readMessage(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.PARSER,
extensionRegistry);
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.add(m);
} else {
responsesBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List responses_ =
java.util.Collections.emptyList();
private void ensureResponsesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
responses_ = new java.util.ArrayList(responses_);
bitField0_ |= 0x00000001;
}
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder> responsesBuilder_;
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public java.util.List getResponsesList() {
if (responsesBuilder_ == null) {
return java.util.Collections.unmodifiableList(responses_);
} else {
return responsesBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public int getResponsesCount() {
if (responsesBuilder_ == null) {
return responses_.size();
} else {
return responsesBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto getResponses(int index) {
if (responsesBuilder_ == null) {
return responses_.get(index);
} else {
return responsesBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder setResponses(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto value) {
if (responsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResponsesIsMutable();
responses_.set(index, value);
onChanged();
} else {
responsesBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder setResponses(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder builderForValue) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.set(index, builderForValue.build());
onChanged();
} else {
responsesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder addResponses(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto value) {
if (responsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResponsesIsMutable();
responses_.add(value);
onChanged();
} else {
responsesBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder addResponses(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto value) {
if (responsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResponsesIsMutable();
responses_.add(index, value);
onChanged();
} else {
responsesBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder addResponses(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder builderForValue) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.add(builderForValue.build());
onChanged();
} else {
responsesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder addResponses(
int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder builderForValue) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.add(index, builderForValue.build());
onChanged();
} else {
responsesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder addAllResponses(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto> values) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, responses_);
onChanged();
} else {
responsesBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder clearResponses() {
if (responsesBuilder_ == null) {
responses_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
responsesBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public Builder removeResponses(int index) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.remove(index);
onChanged();
} else {
responsesBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder getResponsesBuilder(
int index) {
return getResponsesFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder getResponsesOrBuilder(
int index) {
if (responsesBuilder_ == null) {
return responses_.get(index); } else {
return responsesBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder>
getResponsesOrBuilderList() {
if (responsesBuilder_ != null) {
return responsesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(responses_);
}
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder addResponsesBuilder() {
return getResponsesFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder addResponsesBuilder(
int index) {
return getResponsesFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.AddErasureCodingPolicyResponseProto responses = 1;
*/
public java.util.List
getResponsesBuilderList() {
return getResponsesFieldBuilder().getBuilderList();
}
private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder>
getResponsesFieldBuilder() {
if (responsesBuilder_ == null) {
responsesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProtoOrBuilder>(
responses_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
responses_ = null;
}
return responsesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.AddErasureCodingPoliciesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.AddErasureCodingPoliciesResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public AddErasureCodingPoliciesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.AddErasureCodingPoliciesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RemoveErasureCodingPolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.RemoveErasureCodingPolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
boolean hasEcPolicyName();
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
java.lang.String getEcPolicyName();
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveErasureCodingPolicyRequestProto}
*/
public static final class RemoveErasureCodingPolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.RemoveErasureCodingPolicyRequestProto)
RemoveErasureCodingPolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RemoveErasureCodingPolicyRequestProto.newBuilder() to construct.
private RemoveErasureCodingPolicyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RemoveErasureCodingPolicyRequestProto() {
ecPolicyName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RemoveErasureCodingPolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int ECPOLICYNAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object ecPolicyName_ = "";
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
@java.lang.Override
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
@java.lang.Override
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
}
}
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasEcPolicyName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, ecPolicyName_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, ecPolicyName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto) obj;
if (hasEcPolicyName() != other.hasEcPolicyName()) return false;
if (hasEcPolicyName()) {
if (!getEcPolicyName()
.equals(other.getEcPolicyName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEcPolicyName()) {
hash = (37 * hash) + ECPOLICYNAME_FIELD_NUMBER;
hash = (53 * hash) + getEcPolicyName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveErasureCodingPolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.RemoveErasureCodingPolicyRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
ecPolicyName_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.ecPolicyName_ = ecPolicyName_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto.getDefaultInstance()) return this;
if (other.hasEcPolicyName()) {
ecPolicyName_ = other.ecPolicyName_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasEcPolicyName()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
ecPolicyName_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object ecPolicyName_ = "";
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string ecPolicyName = 1;
* @param value The ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string ecPolicyName = 1;
* @return This builder for chaining.
*/
public Builder clearEcPolicyName() {
ecPolicyName_ = getDefaultInstance().getEcPolicyName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string ecPolicyName = 1;
* @param value The bytes for ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.RemoveErasureCodingPolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.RemoveErasureCodingPolicyRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RemoveErasureCodingPolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface RemoveErasureCodingPolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.RemoveErasureCodingPolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveErasureCodingPolicyResponseProto}
*/
public static final class RemoveErasureCodingPolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.RemoveErasureCodingPolicyResponseProto)
RemoveErasureCodingPolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use RemoveErasureCodingPolicyResponseProto.newBuilder() to construct.
private RemoveErasureCodingPolicyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RemoveErasureCodingPolicyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new RemoveErasureCodingPolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveErasureCodingPolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.RemoveErasureCodingPolicyResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.RemoveErasureCodingPolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.RemoveErasureCodingPolicyResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public RemoveErasureCodingPolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.RemoveErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EnableErasureCodingPolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.EnableErasureCodingPolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
boolean hasEcPolicyName();
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
java.lang.String getEcPolicyName();
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.EnableErasureCodingPolicyRequestProto}
*/
public static final class EnableErasureCodingPolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.EnableErasureCodingPolicyRequestProto)
EnableErasureCodingPolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EnableErasureCodingPolicyRequestProto.newBuilder() to construct.
private EnableErasureCodingPolicyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EnableErasureCodingPolicyRequestProto() {
ecPolicyName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new EnableErasureCodingPolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int ECPOLICYNAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object ecPolicyName_ = "";
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
@java.lang.Override
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
@java.lang.Override
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
}
}
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasEcPolicyName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, ecPolicyName_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, ecPolicyName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto) obj;
if (hasEcPolicyName() != other.hasEcPolicyName()) return false;
if (hasEcPolicyName()) {
if (!getEcPolicyName()
.equals(other.getEcPolicyName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEcPolicyName()) {
hash = (37 * hash) + ECPOLICYNAME_FIELD_NUMBER;
hash = (53 * hash) + getEcPolicyName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.EnableErasureCodingPolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.EnableErasureCodingPolicyRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
ecPolicyName_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.ecPolicyName_ = ecPolicyName_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto.getDefaultInstance()) return this;
if (other.hasEcPolicyName()) {
ecPolicyName_ = other.ecPolicyName_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasEcPolicyName()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
ecPolicyName_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object ecPolicyName_ = "";
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string ecPolicyName = 1;
* @param value The ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string ecPolicyName = 1;
* @return This builder for chaining.
*/
public Builder clearEcPolicyName() {
ecPolicyName_ = getDefaultInstance().getEcPolicyName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string ecPolicyName = 1;
* @param value The bytes for ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.EnableErasureCodingPolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.EnableErasureCodingPolicyRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EnableErasureCodingPolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface EnableErasureCodingPolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.EnableErasureCodingPolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.EnableErasureCodingPolicyResponseProto}
*/
public static final class EnableErasureCodingPolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.EnableErasureCodingPolicyResponseProto)
EnableErasureCodingPolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EnableErasureCodingPolicyResponseProto.newBuilder() to construct.
private EnableErasureCodingPolicyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private EnableErasureCodingPolicyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new EnableErasureCodingPolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.EnableErasureCodingPolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.EnableErasureCodingPolicyResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.EnableErasureCodingPolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.EnableErasureCodingPolicyResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public EnableErasureCodingPolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.EnableErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DisableErasureCodingPolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.DisableErasureCodingPolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
boolean hasEcPolicyName();
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
java.lang.String getEcPolicyName();
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.DisableErasureCodingPolicyRequestProto}
*/
public static final class DisableErasureCodingPolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.DisableErasureCodingPolicyRequestProto)
DisableErasureCodingPolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DisableErasureCodingPolicyRequestProto.newBuilder() to construct.
private DisableErasureCodingPolicyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DisableErasureCodingPolicyRequestProto() {
ecPolicyName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DisableErasureCodingPolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int ECPOLICYNAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object ecPolicyName_ = "";
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
@java.lang.Override
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
@java.lang.Override
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
}
}
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasEcPolicyName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, ecPolicyName_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, ecPolicyName_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto) obj;
if (hasEcPolicyName() != other.hasEcPolicyName()) return false;
if (hasEcPolicyName()) {
if (!getEcPolicyName()
.equals(other.getEcPolicyName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEcPolicyName()) {
hash = (37 * hash) + ECPOLICYNAME_FIELD_NUMBER;
hash = (53 * hash) + getEcPolicyName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.DisableErasureCodingPolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.DisableErasureCodingPolicyRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
ecPolicyName_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.ecPolicyName_ = ecPolicyName_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto.getDefaultInstance()) return this;
if (other.hasEcPolicyName()) {
ecPolicyName_ = other.ecPolicyName_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasEcPolicyName()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
ecPolicyName_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object ecPolicyName_ = "";
/**
* required string ecPolicyName = 1;
* @return Whether the ecPolicyName field is set.
*/
public boolean hasEcPolicyName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string ecPolicyName = 1;
* @return The ecPolicyName.
*/
public java.lang.String getEcPolicyName() {
java.lang.Object ref = ecPolicyName_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
ecPolicyName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string ecPolicyName = 1;
* @return The bytes for ecPolicyName.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getEcPolicyNameBytes() {
java.lang.Object ref = ecPolicyName_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
ecPolicyName_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string ecPolicyName = 1;
* @param value The ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string ecPolicyName = 1;
* @return This builder for chaining.
*/
public Builder clearEcPolicyName() {
ecPolicyName_ = getDefaultInstance().getEcPolicyName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string ecPolicyName = 1;
* @param value The bytes for ecPolicyName to set.
* @return This builder for chaining.
*/
public Builder setEcPolicyNameBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ecPolicyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.DisableErasureCodingPolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.DisableErasureCodingPolicyRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DisableErasureCodingPolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface DisableErasureCodingPolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.DisableErasureCodingPolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.DisableErasureCodingPolicyResponseProto}
*/
public static final class DisableErasureCodingPolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.DisableErasureCodingPolicyResponseProto)
DisableErasureCodingPolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DisableErasureCodingPolicyResponseProto.newBuilder() to construct.
private DisableErasureCodingPolicyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private DisableErasureCodingPolicyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DisableErasureCodingPolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.DisableErasureCodingPolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.DisableErasureCodingPolicyResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.DisableErasureCodingPolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.DisableErasureCodingPolicyResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public DisableErasureCodingPolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.DisableErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UnsetErasureCodingPolicyRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.UnsetErasureCodingPolicyRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string src = 1;
* @return Whether the src field is set.
*/
boolean hasSrc();
/**
* required string src = 1;
* @return The src.
*/
java.lang.String getSrc();
/**
* required string src = 1;
* @return The bytes for src.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.UnsetErasureCodingPolicyRequestProto}
*/
public static final class UnsetErasureCodingPolicyRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.UnsetErasureCodingPolicyRequestProto)
UnsetErasureCodingPolicyRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UnsetErasureCodingPolicyRequestProto.newBuilder() to construct.
private UnsetErasureCodingPolicyRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UnsetErasureCodingPolicyRequestProto() {
src_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UnsetErasureCodingPolicyRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.Builder.class);
}
private int bitField0_;
public static final int SRC_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object src_ = "";
/**
* required string src = 1;
* @return Whether the src field is set.
*/
@java.lang.Override
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string src = 1;
* @return The src.
*/
@java.lang.Override
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
* required string src = 1;
* @return The bytes for src.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, src_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, src_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto) obj;
if (hasSrc() != other.hasSrc()) return false;
if (hasSrc()) {
if (!getSrc()
.equals(other.getSrc())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.UnsetErasureCodingPolicyRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.UnsetErasureCodingPolicyRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
src_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.src_ = src_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
src_ = other.src_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
src_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object src_ = "";
/**
* required string src = 1;
* @return Whether the src field is set.
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string src = 1;
* @return The src.
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string src = 1;
* @return The bytes for src.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string src = 1;
* @param value The src to set.
* @return This builder for chaining.
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
src_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string src = 1;
* @return This builder for chaining.
*/
public Builder clearSrc() {
src_ = getDefaultInstance().getSrc();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string src = 1;
* @param value The bytes for src to set.
* @return This builder for chaining.
*/
public Builder setSrcBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
src_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.UnsetErasureCodingPolicyRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.UnsetErasureCodingPolicyRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UnsetErasureCodingPolicyRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface UnsetErasureCodingPolicyResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.UnsetErasureCodingPolicyResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.UnsetErasureCodingPolicyResponseProto}
*/
public static final class UnsetErasureCodingPolicyResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.UnsetErasureCodingPolicyResponseProto)
UnsetErasureCodingPolicyResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UnsetErasureCodingPolicyResponseProto.newBuilder() to construct.
private UnsetErasureCodingPolicyResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private UnsetErasureCodingPolicyResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UnsetErasureCodingPolicyResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.UnsetErasureCodingPolicyResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.UnsetErasureCodingPolicyResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.UnsetErasureCodingPolicyResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.UnsetErasureCodingPolicyResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public UnsetErasureCodingPolicyResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetECTopologyResultForPoliciesRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string policies = 1;
* @return A list containing the policies.
*/
java.util.List
getPoliciesList();
/**
* repeated string policies = 1;
* @return The count of policies.
*/
int getPoliciesCount();
/**
* repeated string policies = 1;
* @param index The index of the element to return.
* @return The policies at the given index.
*/
java.lang.String getPolicies(int index);
/**
* repeated string policies = 1;
* @param index The index of the value to return.
* @return The bytes of the policies at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getPoliciesBytes(int index);
}
/**
* Protobuf type {@code hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto}
*/
public static final class GetECTopologyResultForPoliciesRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto)
GetECTopologyResultForPoliciesRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetECTopologyResultForPoliciesRequestProto.newBuilder() to construct.
private GetECTopologyResultForPoliciesRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetECTopologyResultForPoliciesRequestProto() {
policies_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetECTopologyResultForPoliciesRequestProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.Builder.class);
}
public static final int POLICIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.thirdparty.protobuf.LazyStringList policies_;
/**
* repeated string policies = 1;
* @return A list containing the policies.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getPoliciesList() {
return policies_;
}
/**
* repeated string policies = 1;
* @return The count of policies.
*/
public int getPoliciesCount() {
return policies_.size();
}
/**
* repeated string policies = 1;
* @param index The index of the element to return.
* @return The policies at the given index.
*/
public java.lang.String getPolicies(int index) {
return policies_.get(index);
}
/**
* repeated string policies = 1;
* @param index The index of the value to return.
* @return The bytes of the policies at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getPoliciesBytes(int index) {
return policies_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < policies_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, policies_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < policies_.size(); i++) {
dataSize += computeStringSizeNoTag(policies_.getRaw(i));
}
size += dataSize;
size += 1 * getPoliciesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto) obj;
if (!getPoliciesList()
.equals(other.getPoliciesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPoliciesCount() > 0) {
hash = (37 * hash) + POLICIES_FIELD_NUMBER;
hash = (53 * hash) + getPoliciesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
policies_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto result) {
if (((bitField0_ & 0x00000001) != 0)) {
policies_ = policies_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.policies_ = policies_;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto.getDefaultInstance()) return this;
if (!other.policies_.isEmpty()) {
if (policies_.isEmpty()) {
policies_ = other.policies_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePoliciesIsMutable();
policies_.addAll(other.policies_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensurePoliciesIsMutable();
policies_.add(bs);
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.thirdparty.protobuf.LazyStringList policies_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
private void ensurePoliciesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
policies_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(policies_);
bitField0_ |= 0x00000001;
}
}
/**
* repeated string policies = 1;
* @return A list containing the policies.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getPoliciesList() {
return policies_.getUnmodifiableView();
}
/**
* repeated string policies = 1;
* @return The count of policies.
*/
public int getPoliciesCount() {
return policies_.size();
}
/**
* repeated string policies = 1;
* @param index The index of the element to return.
* @return The policies at the given index.
*/
public java.lang.String getPolicies(int index) {
return policies_.get(index);
}
/**
* repeated string policies = 1;
* @param index The index of the value to return.
* @return The bytes of the policies at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getPoliciesBytes(int index) {
return policies_.getByteString(index);
}
/**
* repeated string policies = 1;
* @param index The index to set the value at.
* @param value The policies to set.
* @return This builder for chaining.
*/
public Builder setPolicies(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensurePoliciesIsMutable();
policies_.set(index, value);
onChanged();
return this;
}
/**
* repeated string policies = 1;
* @param value The policies to add.
* @return This builder for chaining.
*/
public Builder addPolicies(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensurePoliciesIsMutable();
policies_.add(value);
onChanged();
return this;
}
/**
* repeated string policies = 1;
* @param values The policies to add.
* @return This builder for chaining.
*/
public Builder addAllPolicies(
java.lang.Iterable values) {
ensurePoliciesIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, policies_);
onChanged();
return this;
}
/**
* repeated string policies = 1;
* @return This builder for chaining.
*/
public Builder clearPolicies() {
policies_ = org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* repeated string policies = 1;
* @param value The bytes of the policies to add.
* @return This builder for chaining.
*/
public Builder addPoliciesBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensurePoliciesIsMutable();
policies_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetECTopologyResultForPoliciesRequestProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetECTopologyResultForPoliciesRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetECTopologyResultForPoliciesResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
* @return Whether the response field is set.
*/
boolean hasResponse();
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
* @return The response.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto getResponse();
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProtoOrBuilder getResponseOrBuilder();
}
/**
* Protobuf type {@code hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto}
*/
public static final class GetECTopologyResultForPoliciesResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto)
GetECTopologyResultForPoliciesResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetECTopologyResultForPoliciesResponseProto.newBuilder() to construct.
private GetECTopologyResultForPoliciesResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetECTopologyResultForPoliciesResponseProto() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetECTopologyResultForPoliciesResponseProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.Builder.class);
}
private int bitField0_;
public static final int RESPONSE_FIELD_NUMBER = 1;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto response_;
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
* @return Whether the response field is set.
*/
@java.lang.Override
public boolean hasResponse() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
* @return The response.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto getResponse() {
return response_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.getDefaultInstance() : response_;
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProtoOrBuilder getResponseOrBuilder() {
return response_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.getDefaultInstance() : response_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasResponse()) {
memoizedIsInitialized = 0;
return false;
}
if (!getResponse().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getResponse());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getResponse());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto) obj;
if (hasResponse() != other.hasResponse()) return false;
if (hasResponse()) {
if (!getResponse()
.equals(other.getResponse())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasResponse()) {
hash = (37 * hash) + RESPONSE_FIELD_NUMBER;
hash = (53 * hash) + getResponse().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getResponseFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
response_ = null;
if (responseBuilder_ != null) {
responseBuilder_.dispose();
responseBuilder_ = null;
}
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.response_ = responseBuilder_ == null
? response_
: responseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto.getDefaultInstance()) return this;
if (other.hasResponse()) {
mergeResponse(other.getResponse());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasResponse()) {
return false;
}
if (!getResponse().isInitialized()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getResponseFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto response_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProtoOrBuilder> responseBuilder_;
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
* @return Whether the response field is set.
*/
public boolean hasResponse() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
* @return The response.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto getResponse() {
if (responseBuilder_ == null) {
return response_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.getDefaultInstance() : response_;
} else {
return responseBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
public Builder setResponse(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto value) {
if (responseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
response_ = value;
} else {
responseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
public Builder setResponse(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.Builder builderForValue) {
if (responseBuilder_ == null) {
response_ = builderForValue.build();
} else {
responseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
public Builder mergeResponse(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto value) {
if (responseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
response_ != null &&
response_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.getDefaultInstance()) {
getResponseBuilder().mergeFrom(value);
} else {
response_ = value;
}
} else {
responseBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
public Builder clearResponse() {
bitField0_ = (bitField0_ & ~0x00000001);
response_ = null;
if (responseBuilder_ != null) {
responseBuilder_.dispose();
responseBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.Builder getResponseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getResponseFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProtoOrBuilder getResponseOrBuilder() {
if (responseBuilder_ != null) {
return responseBuilder_.getMessageOrBuilder();
} else {
return response_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.getDefaultInstance() : response_;
}
}
/**
* required .hadoop.hdfs.ECTopologyVerifierResultProto response = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProtoOrBuilder>
getResponseFieldBuilder() {
if (responseBuilder_ == null) {
responseBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ECTopologyVerifierResultProtoOrBuilder>(
getResponse(),
getParentForChildren(),
isClean());
response_ = null;
}
return responseBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetECTopologyResultForPoliciesResponseProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetECTopologyResultForPoliciesResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetECTopologyResultForPoliciesResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface BlockECReconstructionInfoProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.BlockECReconstructionInfoProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
* @return Whether the block field is set.
*/
boolean hasBlock();
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
* @return The block.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getBlock();
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder getBlockOrBuilder();
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
* @return Whether the sourceDnInfos field is set.
*/
boolean hasSourceDnInfos();
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
* @return The sourceDnInfos.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getSourceDnInfos();
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder getSourceDnInfosOrBuilder();
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
* @return Whether the targetDnInfos field is set.
*/
boolean hasTargetDnInfos();
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
* @return The targetDnInfos.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getTargetDnInfos();
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder getTargetDnInfosOrBuilder();
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
* @return Whether the targetStorageUuids field is set.
*/
boolean hasTargetStorageUuids();
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
* @return The targetStorageUuids.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto getTargetStorageUuids();
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProtoOrBuilder getTargetStorageUuidsOrBuilder();
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
* @return Whether the targetStorageTypes field is set.
*/
boolean hasTargetStorageTypes();
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
* @return The targetStorageTypes.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto getTargetStorageTypes();
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProtoOrBuilder getTargetStorageTypesOrBuilder();
/**
* required bytes liveBlockIndices = 6;
* @return Whether the liveBlockIndices field is set.
*/
boolean hasLiveBlockIndices();
/**
* required bytes liveBlockIndices = 6;
* @return The liveBlockIndices.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getLiveBlockIndices();
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
* @return Whether the ecPolicy field is set.
*/
boolean hasEcPolicy();
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
* @return The ecPolicy.
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicy();
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPolicyOrBuilder();
/**
* optional bytes excludeReconstructedIndices = 8;
* @return Whether the excludeReconstructedIndices field is set.
*/
boolean hasExcludeReconstructedIndices();
/**
* optional bytes excludeReconstructedIndices = 8;
* @return The excludeReconstructedIndices.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString getExcludeReconstructedIndices();
}
/**
*
**
* Block erasure coding reconstruction info
*
*
* Protobuf type {@code hadoop.hdfs.BlockECReconstructionInfoProto}
*/
public static final class BlockECReconstructionInfoProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.BlockECReconstructionInfoProto)
BlockECReconstructionInfoProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use BlockECReconstructionInfoProto.newBuilder() to construct.
private BlockECReconstructionInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private BlockECReconstructionInfoProto() {
liveBlockIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
excludeReconstructedIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BlockECReconstructionInfoProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.Builder.class);
}
private int bitField0_;
public static final int BLOCK_FIELD_NUMBER = 1;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto block_;
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
* @return Whether the block field is set.
*/
@java.lang.Override
public boolean hasBlock() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
* @return The block.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getBlock() {
return block_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance() : block_;
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder getBlockOrBuilder() {
return block_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance() : block_;
}
public static final int SOURCEDNINFOS_FIELD_NUMBER = 2;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto sourceDnInfos_;
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
* @return Whether the sourceDnInfos field is set.
*/
@java.lang.Override
public boolean hasSourceDnInfos() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
* @return The sourceDnInfos.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getSourceDnInfos() {
return sourceDnInfos_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : sourceDnInfos_;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder getSourceDnInfosOrBuilder() {
return sourceDnInfos_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : sourceDnInfos_;
}
public static final int TARGETDNINFOS_FIELD_NUMBER = 3;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto targetDnInfos_;
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
* @return Whether the targetDnInfos field is set.
*/
@java.lang.Override
public boolean hasTargetDnInfos() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
* @return The targetDnInfos.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getTargetDnInfos() {
return targetDnInfos_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : targetDnInfos_;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder getTargetDnInfosOrBuilder() {
return targetDnInfos_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : targetDnInfos_;
}
public static final int TARGETSTORAGEUUIDS_FIELD_NUMBER = 4;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto targetStorageUuids_;
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
* @return Whether the targetStorageUuids field is set.
*/
@java.lang.Override
public boolean hasTargetStorageUuids() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
* @return The targetStorageUuids.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto getTargetStorageUuids() {
return targetStorageUuids_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.getDefaultInstance() : targetStorageUuids_;
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProtoOrBuilder getTargetStorageUuidsOrBuilder() {
return targetStorageUuids_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.getDefaultInstance() : targetStorageUuids_;
}
public static final int TARGETSTORAGETYPES_FIELD_NUMBER = 5;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto targetStorageTypes_;
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
* @return Whether the targetStorageTypes field is set.
*/
@java.lang.Override
public boolean hasTargetStorageTypes() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
* @return The targetStorageTypes.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto getTargetStorageTypes() {
return targetStorageTypes_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.getDefaultInstance() : targetStorageTypes_;
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProtoOrBuilder getTargetStorageTypesOrBuilder() {
return targetStorageTypes_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.getDefaultInstance() : targetStorageTypes_;
}
public static final int LIVEBLOCKINDICES_FIELD_NUMBER = 6;
private org.apache.hadoop.thirdparty.protobuf.ByteString liveBlockIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* required bytes liveBlockIndices = 6;
* @return Whether the liveBlockIndices field is set.
*/
@java.lang.Override
public boolean hasLiveBlockIndices() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* required bytes liveBlockIndices = 6;
* @return The liveBlockIndices.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getLiveBlockIndices() {
return liveBlockIndices_;
}
public static final int ECPOLICY_FIELD_NUMBER = 7;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto ecPolicy_;
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
* @return Whether the ecPolicy field is set.
*/
@java.lang.Override
public boolean hasEcPolicy() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
* @return The ecPolicy.
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicy() {
return ecPolicy_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPolicyOrBuilder() {
return ecPolicy_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
}
public static final int EXCLUDERECONSTRUCTEDINDICES_FIELD_NUMBER = 8;
private org.apache.hadoop.thirdparty.protobuf.ByteString excludeReconstructedIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes excludeReconstructedIndices = 8;
* @return Whether the excludeReconstructedIndices field is set.
*/
@java.lang.Override
public boolean hasExcludeReconstructedIndices() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional bytes excludeReconstructedIndices = 8;
* @return The excludeReconstructedIndices.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getExcludeReconstructedIndices() {
return excludeReconstructedIndices_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasBlock()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasSourceDnInfos()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTargetDnInfos()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTargetStorageUuids()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTargetStorageTypes()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasLiveBlockIndices()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasEcPolicy()) {
memoizedIsInitialized = 0;
return false;
}
if (!getBlock().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getSourceDnInfos().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getTargetDnInfos().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
if (!getEcPolicy().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getBlock());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getSourceDnInfos());
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(3, getTargetDnInfos());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(4, getTargetStorageUuids());
}
if (((bitField0_ & 0x00000010) != 0)) {
output.writeMessage(5, getTargetStorageTypes());
}
if (((bitField0_ & 0x00000020) != 0)) {
output.writeBytes(6, liveBlockIndices_);
}
if (((bitField0_ & 0x00000040) != 0)) {
output.writeMessage(7, getEcPolicy());
}
if (((bitField0_ & 0x00000080) != 0)) {
output.writeBytes(8, excludeReconstructedIndices_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(1, getBlock());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(2, getSourceDnInfos());
}
if (((bitField0_ & 0x00000004) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(3, getTargetDnInfos());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(4, getTargetStorageUuids());
}
if (((bitField0_ & 0x00000010) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(5, getTargetStorageTypes());
}
if (((bitField0_ & 0x00000020) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(6, liveBlockIndices_);
}
if (((bitField0_ & 0x00000040) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeMessageSize(7, getEcPolicy());
}
if (((bitField0_ & 0x00000080) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
.computeBytesSize(8, excludeReconstructedIndices_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto) obj;
if (hasBlock() != other.hasBlock()) return false;
if (hasBlock()) {
if (!getBlock()
.equals(other.getBlock())) return false;
}
if (hasSourceDnInfos() != other.hasSourceDnInfos()) return false;
if (hasSourceDnInfos()) {
if (!getSourceDnInfos()
.equals(other.getSourceDnInfos())) return false;
}
if (hasTargetDnInfos() != other.hasTargetDnInfos()) return false;
if (hasTargetDnInfos()) {
if (!getTargetDnInfos()
.equals(other.getTargetDnInfos())) return false;
}
if (hasTargetStorageUuids() != other.hasTargetStorageUuids()) return false;
if (hasTargetStorageUuids()) {
if (!getTargetStorageUuids()
.equals(other.getTargetStorageUuids())) return false;
}
if (hasTargetStorageTypes() != other.hasTargetStorageTypes()) return false;
if (hasTargetStorageTypes()) {
if (!getTargetStorageTypes()
.equals(other.getTargetStorageTypes())) return false;
}
if (hasLiveBlockIndices() != other.hasLiveBlockIndices()) return false;
if (hasLiveBlockIndices()) {
if (!getLiveBlockIndices()
.equals(other.getLiveBlockIndices())) return false;
}
if (hasEcPolicy() != other.hasEcPolicy()) return false;
if (hasEcPolicy()) {
if (!getEcPolicy()
.equals(other.getEcPolicy())) return false;
}
if (hasExcludeReconstructedIndices() != other.hasExcludeReconstructedIndices()) return false;
if (hasExcludeReconstructedIndices()) {
if (!getExcludeReconstructedIndices()
.equals(other.getExcludeReconstructedIndices())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasBlock()) {
hash = (37 * hash) + BLOCK_FIELD_NUMBER;
hash = (53 * hash) + getBlock().hashCode();
}
if (hasSourceDnInfos()) {
hash = (37 * hash) + SOURCEDNINFOS_FIELD_NUMBER;
hash = (53 * hash) + getSourceDnInfos().hashCode();
}
if (hasTargetDnInfos()) {
hash = (37 * hash) + TARGETDNINFOS_FIELD_NUMBER;
hash = (53 * hash) + getTargetDnInfos().hashCode();
}
if (hasTargetStorageUuids()) {
hash = (37 * hash) + TARGETSTORAGEUUIDS_FIELD_NUMBER;
hash = (53 * hash) + getTargetStorageUuids().hashCode();
}
if (hasTargetStorageTypes()) {
hash = (37 * hash) + TARGETSTORAGETYPES_FIELD_NUMBER;
hash = (53 * hash) + getTargetStorageTypes().hashCode();
}
if (hasLiveBlockIndices()) {
hash = (37 * hash) + LIVEBLOCKINDICES_FIELD_NUMBER;
hash = (53 * hash) + getLiveBlockIndices().hashCode();
}
if (hasEcPolicy()) {
hash = (37 * hash) + ECPOLICY_FIELD_NUMBER;
hash = (53 * hash) + getEcPolicy().hashCode();
}
if (hasExcludeReconstructedIndices()) {
hash = (37 * hash) + EXCLUDERECONSTRUCTEDINDICES_FIELD_NUMBER;
hash = (53 * hash) + getExcludeReconstructedIndices().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Block erasure coding reconstruction info
*
*
* Protobuf type {@code hadoop.hdfs.BlockECReconstructionInfoProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.BlockECReconstructionInfoProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getBlockFieldBuilder();
getSourceDnInfosFieldBuilder();
getTargetDnInfosFieldBuilder();
getTargetStorageUuidsFieldBuilder();
getTargetStorageTypesFieldBuilder();
getEcPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
block_ = null;
if (blockBuilder_ != null) {
blockBuilder_.dispose();
blockBuilder_ = null;
}
sourceDnInfos_ = null;
if (sourceDnInfosBuilder_ != null) {
sourceDnInfosBuilder_.dispose();
sourceDnInfosBuilder_ = null;
}
targetDnInfos_ = null;
if (targetDnInfosBuilder_ != null) {
targetDnInfosBuilder_.dispose();
targetDnInfosBuilder_ = null;
}
targetStorageUuids_ = null;
if (targetStorageUuidsBuilder_ != null) {
targetStorageUuidsBuilder_.dispose();
targetStorageUuidsBuilder_ = null;
}
targetStorageTypes_ = null;
if (targetStorageTypesBuilder_ != null) {
targetStorageTypesBuilder_.dispose();
targetStorageTypesBuilder_ = null;
}
liveBlockIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
ecPolicy_ = null;
if (ecPolicyBuilder_ != null) {
ecPolicyBuilder_.dispose();
ecPolicyBuilder_ = null;
}
excludeReconstructedIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.block_ = blockBuilder_ == null
? block_
: blockBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sourceDnInfos_ = sourceDnInfosBuilder_ == null
? sourceDnInfos_
: sourceDnInfosBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.targetDnInfos_ = targetDnInfosBuilder_ == null
? targetDnInfos_
: targetDnInfosBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.targetStorageUuids_ = targetStorageUuidsBuilder_ == null
? targetStorageUuids_
: targetStorageUuidsBuilder_.build();
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.targetStorageTypes_ = targetStorageTypesBuilder_ == null
? targetStorageTypes_
: targetStorageTypesBuilder_.build();
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.liveBlockIndices_ = liveBlockIndices_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.ecPolicy_ = ecPolicyBuilder_ == null
? ecPolicy_
: ecPolicyBuilder_.build();
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.excludeReconstructedIndices_ = excludeReconstructedIndices_;
to_bitField0_ |= 0x00000080;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto.getDefaultInstance()) return this;
if (other.hasBlock()) {
mergeBlock(other.getBlock());
}
if (other.hasSourceDnInfos()) {
mergeSourceDnInfos(other.getSourceDnInfos());
}
if (other.hasTargetDnInfos()) {
mergeTargetDnInfos(other.getTargetDnInfos());
}
if (other.hasTargetStorageUuids()) {
mergeTargetStorageUuids(other.getTargetStorageUuids());
}
if (other.hasTargetStorageTypes()) {
mergeTargetStorageTypes(other.getTargetStorageTypes());
}
if (other.hasLiveBlockIndices()) {
setLiveBlockIndices(other.getLiveBlockIndices());
}
if (other.hasEcPolicy()) {
mergeEcPolicy(other.getEcPolicy());
}
if (other.hasExcludeReconstructedIndices()) {
setExcludeReconstructedIndices(other.getExcludeReconstructedIndices());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasBlock()) {
return false;
}
if (!hasSourceDnInfos()) {
return false;
}
if (!hasTargetDnInfos()) {
return false;
}
if (!hasTargetStorageUuids()) {
return false;
}
if (!hasTargetStorageTypes()) {
return false;
}
if (!hasLiveBlockIndices()) {
return false;
}
if (!hasEcPolicy()) {
return false;
}
if (!getBlock().isInitialized()) {
return false;
}
if (!getSourceDnInfos().isInitialized()) {
return false;
}
if (!getTargetDnInfos().isInitialized()) {
return false;
}
if (!getEcPolicy().isInitialized()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getBlockFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getSourceDnInfosFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
input.readMessage(
getTargetDnInfosFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 34: {
input.readMessage(
getTargetStorageUuidsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 34
case 42: {
input.readMessage(
getTargetStorageTypesFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000010;
break;
} // case 42
case 50: {
liveBlockIndices_ = input.readBytes();
bitField0_ |= 0x00000020;
break;
} // case 50
case 58: {
input.readMessage(
getEcPolicyFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000040;
break;
} // case 58
case 66: {
excludeReconstructedIndices_ = input.readBytes();
bitField0_ |= 0x00000080;
break;
} // case 66
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto block_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder> blockBuilder_;
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
* @return Whether the block field is set.
*/
public boolean hasBlock() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
* @return The block.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getBlock() {
if (blockBuilder_ == null) {
return block_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance() : block_;
} else {
return blockBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
public Builder setBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto value) {
if (blockBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
block_ = value;
} else {
blockBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
public Builder setBlock(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder builderForValue) {
if (blockBuilder_ == null) {
block_ = builderForValue.build();
} else {
blockBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
public Builder mergeBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto value) {
if (blockBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
block_ != null &&
block_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance()) {
getBlockBuilder().mergeFrom(value);
} else {
block_ = value;
}
} else {
blockBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
public Builder clearBlock() {
bitField0_ = (bitField0_ & ~0x00000001);
block_ = null;
if (blockBuilder_ != null) {
blockBuilder_.dispose();
blockBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder getBlockBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getBlockFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder getBlockOrBuilder() {
if (blockBuilder_ != null) {
return blockBuilder_.getMessageOrBuilder();
} else {
return block_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance() : block_;
}
}
/**
* required .hadoop.hdfs.ExtendedBlockProto block = 1;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder>
getBlockFieldBuilder() {
if (blockBuilder_ == null) {
blockBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder>(
getBlock(),
getParentForChildren(),
isClean());
block_ = null;
}
return blockBuilder_;
}
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto sourceDnInfos_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder> sourceDnInfosBuilder_;
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
* @return Whether the sourceDnInfos field is set.
*/
public boolean hasSourceDnInfos() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
* @return The sourceDnInfos.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getSourceDnInfos() {
if (sourceDnInfosBuilder_ == null) {
return sourceDnInfos_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : sourceDnInfos_;
} else {
return sourceDnInfosBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
public Builder setSourceDnInfos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto value) {
if (sourceDnInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sourceDnInfos_ = value;
} else {
sourceDnInfosBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
public Builder setSourceDnInfos(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder builderForValue) {
if (sourceDnInfosBuilder_ == null) {
sourceDnInfos_ = builderForValue.build();
} else {
sourceDnInfosBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
public Builder mergeSourceDnInfos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto value) {
if (sourceDnInfosBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
sourceDnInfos_ != null &&
sourceDnInfos_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance()) {
getSourceDnInfosBuilder().mergeFrom(value);
} else {
sourceDnInfos_ = value;
}
} else {
sourceDnInfosBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
public Builder clearSourceDnInfos() {
bitField0_ = (bitField0_ & ~0x00000002);
sourceDnInfos_ = null;
if (sourceDnInfosBuilder_ != null) {
sourceDnInfosBuilder_.dispose();
sourceDnInfosBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder getSourceDnInfosBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSourceDnInfosFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder getSourceDnInfosOrBuilder() {
if (sourceDnInfosBuilder_ != null) {
return sourceDnInfosBuilder_.getMessageOrBuilder();
} else {
return sourceDnInfos_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : sourceDnInfos_;
}
}
/**
* required .hadoop.hdfs.DatanodeInfosProto sourceDnInfos = 2;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder>
getSourceDnInfosFieldBuilder() {
if (sourceDnInfosBuilder_ == null) {
sourceDnInfosBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder>(
getSourceDnInfos(),
getParentForChildren(),
isClean());
sourceDnInfos_ = null;
}
return sourceDnInfosBuilder_;
}
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto targetDnInfos_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder> targetDnInfosBuilder_;
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
* @return Whether the targetDnInfos field is set.
*/
public boolean hasTargetDnInfos() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
* @return The targetDnInfos.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getTargetDnInfos() {
if (targetDnInfosBuilder_ == null) {
return targetDnInfos_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : targetDnInfos_;
} else {
return targetDnInfosBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
public Builder setTargetDnInfos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto value) {
if (targetDnInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
targetDnInfos_ = value;
} else {
targetDnInfosBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
public Builder setTargetDnInfos(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder builderForValue) {
if (targetDnInfosBuilder_ == null) {
targetDnInfos_ = builderForValue.build();
} else {
targetDnInfosBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
public Builder mergeTargetDnInfos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto value) {
if (targetDnInfosBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
targetDnInfos_ != null &&
targetDnInfos_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance()) {
getTargetDnInfosBuilder().mergeFrom(value);
} else {
targetDnInfos_ = value;
}
} else {
targetDnInfosBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
public Builder clearTargetDnInfos() {
bitField0_ = (bitField0_ & ~0x00000004);
targetDnInfos_ = null;
if (targetDnInfosBuilder_ != null) {
targetDnInfosBuilder_.dispose();
targetDnInfosBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder getTargetDnInfosBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTargetDnInfosFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder getTargetDnInfosOrBuilder() {
if (targetDnInfosBuilder_ != null) {
return targetDnInfosBuilder_.getMessageOrBuilder();
} else {
return targetDnInfos_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance() : targetDnInfos_;
}
}
/**
* required .hadoop.hdfs.DatanodeInfosProto targetDnInfos = 3;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder>
getTargetDnInfosFieldBuilder() {
if (targetDnInfosBuilder_ == null) {
targetDnInfosBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder>(
getTargetDnInfos(),
getParentForChildren(),
isClean());
targetDnInfos_ = null;
}
return targetDnInfosBuilder_;
}
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto targetStorageUuids_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProtoOrBuilder> targetStorageUuidsBuilder_;
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
* @return Whether the targetStorageUuids field is set.
*/
public boolean hasTargetStorageUuids() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
* @return The targetStorageUuids.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto getTargetStorageUuids() {
if (targetStorageUuidsBuilder_ == null) {
return targetStorageUuids_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.getDefaultInstance() : targetStorageUuids_;
} else {
return targetStorageUuidsBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
public Builder setTargetStorageUuids(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto value) {
if (targetStorageUuidsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
targetStorageUuids_ = value;
} else {
targetStorageUuidsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
public Builder setTargetStorageUuids(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.Builder builderForValue) {
if (targetStorageUuidsBuilder_ == null) {
targetStorageUuids_ = builderForValue.build();
} else {
targetStorageUuidsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
public Builder mergeTargetStorageUuids(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto value) {
if (targetStorageUuidsBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0) &&
targetStorageUuids_ != null &&
targetStorageUuids_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.getDefaultInstance()) {
getTargetStorageUuidsBuilder().mergeFrom(value);
} else {
targetStorageUuids_ = value;
}
} else {
targetStorageUuidsBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
public Builder clearTargetStorageUuids() {
bitField0_ = (bitField0_ & ~0x00000008);
targetStorageUuids_ = null;
if (targetStorageUuidsBuilder_ != null) {
targetStorageUuidsBuilder_.dispose();
targetStorageUuidsBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.Builder getTargetStorageUuidsBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getTargetStorageUuidsFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProtoOrBuilder getTargetStorageUuidsOrBuilder() {
if (targetStorageUuidsBuilder_ != null) {
return targetStorageUuidsBuilder_.getMessageOrBuilder();
} else {
return targetStorageUuids_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.getDefaultInstance() : targetStorageUuids_;
}
}
/**
* required .hadoop.hdfs.StorageUuidsProto targetStorageUuids = 4;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProtoOrBuilder>
getTargetStorageUuidsFieldBuilder() {
if (targetStorageUuidsBuilder_ == null) {
targetStorageUuidsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProtoOrBuilder>(
getTargetStorageUuids(),
getParentForChildren(),
isClean());
targetStorageUuids_ = null;
}
return targetStorageUuidsBuilder_;
}
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto targetStorageTypes_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProtoOrBuilder> targetStorageTypesBuilder_;
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
* @return Whether the targetStorageTypes field is set.
*/
public boolean hasTargetStorageTypes() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
* @return The targetStorageTypes.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto getTargetStorageTypes() {
if (targetStorageTypesBuilder_ == null) {
return targetStorageTypes_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.getDefaultInstance() : targetStorageTypes_;
} else {
return targetStorageTypesBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
public Builder setTargetStorageTypes(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto value) {
if (targetStorageTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
targetStorageTypes_ = value;
} else {
targetStorageTypesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
public Builder setTargetStorageTypes(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.Builder builderForValue) {
if (targetStorageTypesBuilder_ == null) {
targetStorageTypes_ = builderForValue.build();
} else {
targetStorageTypesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
public Builder mergeTargetStorageTypes(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto value) {
if (targetStorageTypesBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0) &&
targetStorageTypes_ != null &&
targetStorageTypes_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.getDefaultInstance()) {
getTargetStorageTypesBuilder().mergeFrom(value);
} else {
targetStorageTypes_ = value;
}
} else {
targetStorageTypesBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
public Builder clearTargetStorageTypes() {
bitField0_ = (bitField0_ & ~0x00000010);
targetStorageTypes_ = null;
if (targetStorageTypesBuilder_ != null) {
targetStorageTypesBuilder_.dispose();
targetStorageTypesBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.Builder getTargetStorageTypesBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getTargetStorageTypesFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProtoOrBuilder getTargetStorageTypesOrBuilder() {
if (targetStorageTypesBuilder_ != null) {
return targetStorageTypesBuilder_.getMessageOrBuilder();
} else {
return targetStorageTypes_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.getDefaultInstance() : targetStorageTypes_;
}
}
/**
* required .hadoop.hdfs.StorageTypesProto targetStorageTypes = 5;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProtoOrBuilder>
getTargetStorageTypesFieldBuilder() {
if (targetStorageTypesBuilder_ == null) {
targetStorageTypesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypesProtoOrBuilder>(
getTargetStorageTypes(),
getParentForChildren(),
isClean());
targetStorageTypes_ = null;
}
return targetStorageTypesBuilder_;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString liveBlockIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* required bytes liveBlockIndices = 6;
* @return Whether the liveBlockIndices field is set.
*/
@java.lang.Override
public boolean hasLiveBlockIndices() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* required bytes liveBlockIndices = 6;
* @return The liveBlockIndices.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getLiveBlockIndices() {
return liveBlockIndices_;
}
/**
* required bytes liveBlockIndices = 6;
* @param value The liveBlockIndices to set.
* @return This builder for chaining.
*/
public Builder setLiveBlockIndices(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
liveBlockIndices_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* required bytes liveBlockIndices = 6;
* @return This builder for chaining.
*/
public Builder clearLiveBlockIndices() {
bitField0_ = (bitField0_ & ~0x00000020);
liveBlockIndices_ = getDefaultInstance().getLiveBlockIndices();
onChanged();
return this;
}
private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto ecPolicy_;
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder> ecPolicyBuilder_;
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
* @return Whether the ecPolicy field is set.
*/
public boolean hasEcPolicy() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
* @return The ecPolicy.
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto getEcPolicy() {
if (ecPolicyBuilder_ == null) {
return ecPolicy_ == null ? org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
} else {
return ecPolicyBuilder_.getMessage();
}
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
public Builder setEcPolicy(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ecPolicy_ = value;
} else {
ecPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
public Builder setEcPolicy(
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder builderForValue) {
if (ecPolicyBuilder_ == null) {
ecPolicy_ = builderForValue.build();
} else {
ecPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
public Builder mergeEcPolicy(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto value) {
if (ecPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0) &&
ecPolicy_ != null &&
ecPolicy_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance()) {
getEcPolicyBuilder().mergeFrom(value);
} else {
ecPolicy_ = value;
}
} else {
ecPolicyBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
public Builder clearEcPolicy() {
bitField0_ = (bitField0_ & ~0x00000040);
ecPolicy_ = null;
if (ecPolicyBuilder_ != null) {
ecPolicyBuilder_.dispose();
ecPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder getEcPolicyBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getEcPolicyFieldBuilder().getBuilder();
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder getEcPolicyOrBuilder() {
if (ecPolicyBuilder_ != null) {
return ecPolicyBuilder_.getMessageOrBuilder();
} else {
return ecPolicy_ == null ?
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.getDefaultInstance() : ecPolicy_;
}
}
/**
* required .hadoop.hdfs.ErasureCodingPolicyProto ecPolicy = 7;
*/
private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>
getEcPolicyFieldBuilder() {
if (ecPolicyBuilder_ == null) {
ecPolicyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProtoOrBuilder>(
getEcPolicy(),
getParentForChildren(),
isClean());
ecPolicy_ = null;
}
return ecPolicyBuilder_;
}
private org.apache.hadoop.thirdparty.protobuf.ByteString excludeReconstructedIndices_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
/**
* optional bytes excludeReconstructedIndices = 8;
* @return Whether the excludeReconstructedIndices field is set.
*/
@java.lang.Override
public boolean hasExcludeReconstructedIndices() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* optional bytes excludeReconstructedIndices = 8;
* @return The excludeReconstructedIndices.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString getExcludeReconstructedIndices() {
return excludeReconstructedIndices_;
}
/**
* optional bytes excludeReconstructedIndices = 8;
* @param value The excludeReconstructedIndices to set.
* @return This builder for chaining.
*/
public Builder setExcludeReconstructedIndices(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
excludeReconstructedIndices_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* optional bytes excludeReconstructedIndices = 8;
* @return This builder for chaining.
*/
public Builder clearExcludeReconstructedIndices() {
bitField0_ = (bitField0_ & ~0x00000080);
excludeReconstructedIndices_ = getDefaultInstance().getExcludeReconstructedIndices();
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.BlockECReconstructionInfoProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.BlockECReconstructionInfoProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public BlockECReconstructionInfoProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.BlockECReconstructionInfoProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface CodecProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.hdfs.CodecProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string codec = 1;
* @return Whether the codec field is set.
*/
boolean hasCodec();
/**
* required string codec = 1;
* @return The codec.
*/
java.lang.String getCodec();
/**
* required string codec = 1;
* @return The bytes for codec.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getCodecBytes();
/**
* required string coders = 2;
* @return Whether the coders field is set.
*/
boolean hasCoders();
/**
* required string coders = 2;
* @return The coders.
*/
java.lang.String getCoders();
/**
* required string coders = 2;
* @return The bytes for coders.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getCodersBytes();
}
/**
*
**
* Codec and it's corresponding coders
*
*
* Protobuf type {@code hadoop.hdfs.CodecProto}
*/
public static final class CodecProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.hdfs.CodecProto)
CodecProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use CodecProto.newBuilder() to construct.
private CodecProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private CodecProto() {
codec_ = "";
coders_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CodecProto();
}
@java.lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_CodecProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_CodecProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder.class);
}
private int bitField0_;
public static final int CODEC_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object codec_ = "";
/**
* required string codec = 1;
* @return Whether the codec field is set.
*/
@java.lang.Override
public boolean hasCodec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string codec = 1;
* @return The codec.
*/
@java.lang.Override
public java.lang.String getCodec() {
java.lang.Object ref = codec_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
codec_ = s;
}
return s;
}
}
/**
* required string codec = 1;
* @return The bytes for codec.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCodecBytes() {
java.lang.Object ref = codec_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
codec_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
public static final int CODERS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object coders_ = "";
/**
* required string coders = 2;
* @return Whether the coders field is set.
*/
@java.lang.Override
public boolean hasCoders() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string coders = 2;
* @return The coders.
*/
@java.lang.Override
public java.lang.String getCoders() {
java.lang.Object ref = coders_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
coders_ = s;
}
return s;
}
}
/**
* required string coders = 2;
* @return The bytes for coders.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCodersBytes() {
java.lang.Object ref = coders_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
coders_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasCodec()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasCoders()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, codec_);
}
if (((bitField0_ & 0x00000002) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, coders_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, codec_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, coders_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto other = (org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto) obj;
if (hasCodec() != other.hasCodec()) return false;
if (hasCodec()) {
if (!getCodec()
.equals(other.getCodec())) return false;
}
if (hasCoders() != other.hasCoders()) return false;
if (hasCoders()) {
if (!getCoders()
.equals(other.getCoders())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCodec()) {
hash = (37 * hash) + CODEC_FIELD_NUMBER;
hash = (53 * hash) + getCodec().hashCode();
}
if (hasCoders()) {
hash = (37 * hash) + CODERS_FIELD_NUMBER;
hash = (53 * hash) + getCoders().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Codec and it's corresponding coders
*
*
* Protobuf type {@code hadoop.hdfs.CodecProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.hdfs.CodecProto)
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_CodecProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_CodecProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.class, org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
codec_ = "";
coders_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.internal_static_hadoop_hdfs_CodecProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto build() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto result = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.codec_ = codec_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.coders_ = coders_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto.getDefaultInstance()) return this;
if (other.hasCodec()) {
codec_ = other.codec_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasCoders()) {
coders_ = other.coders_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasCodec()) {
return false;
}
if (!hasCoders()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
codec_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
coders_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object codec_ = "";
/**
* required string codec = 1;
* @return Whether the codec field is set.
*/
public boolean hasCodec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string codec = 1;
* @return The codec.
*/
public java.lang.String getCodec() {
java.lang.Object ref = codec_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
codec_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string codec = 1;
* @return The bytes for codec.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCodecBytes() {
java.lang.Object ref = codec_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
codec_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string codec = 1;
* @param value The codec to set.
* @return This builder for chaining.
*/
public Builder setCodec(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
codec_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string codec = 1;
* @return This builder for chaining.
*/
public Builder clearCodec() {
codec_ = getDefaultInstance().getCodec();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string codec = 1;
* @param value The bytes for codec to set.
* @return This builder for chaining.
*/
public Builder setCodecBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
codec_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object coders_ = "";
/**
* required string coders = 2;
* @return Whether the coders field is set.
*/
public boolean hasCoders() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* required string coders = 2;
* @return The coders.
*/
public java.lang.String getCoders() {
java.lang.Object ref = coders_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
coders_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string coders = 2;
* @return The bytes for coders.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getCodersBytes() {
java.lang.Object ref = coders_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
coders_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string coders = 2;
* @param value The coders to set.
* @return This builder for chaining.
*/
public Builder setCoders(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
coders_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* required string coders = 2;
* @return This builder for chaining.
*/
public Builder clearCoders() {
coders_ = getDefaultInstance().getCoders();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* required string coders = 2;
* @param value The bytes for coders to set.
* @return This builder for chaining.
*/
public Builder setCodersBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
coders_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.CodecProto)
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.CodecProto)
private static final org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto();
}
public static org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public CodecProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_CodecProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_hdfs_CodecProto_fieldAccessorTable;
public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\023erasurecoding.proto\022\013hadoop.hdfs\032\nhdfs" +
".proto\"G\n\"SetErasureCodingPolicyRequestP" +
"roto\022\013\n\003src\030\001 \002(\t\022\024\n\014ecPolicyName\030\002 \001(\t\"" +
"%\n#SetErasureCodingPolicyResponseProto\"&" +
"\n$GetErasureCodingPoliciesRequestProto\"b" +
"\n%GetErasureCodingPoliciesResponseProto\022" +
"9\n\necPolicies\030\001 \003(\0132%.hadoop.hdfs.Erasur" +
"eCodingPolicyProto\"$\n\"GetErasureCodingCo" +
"decsRequestProto\"M\n#GetErasureCodingCode" +
"csResponseProto\022&\n\005codec\030\001 \003(\0132\027.hadoop." +
"hdfs.CodecProto\"1\n\"GetErasureCodingPolic" +
"yRequestProto\022\013\n\003src\030\001 \002(\t\"^\n#GetErasure" +
"CodingPolicyResponseProto\0227\n\010ecPolicy\030\001 " +
"\001(\0132%.hadoop.hdfs.ErasureCodingPolicyPro" +
"to\"a\n$AddErasureCodingPoliciesRequestPro" +
"to\0229\n\necPolicies\030\001 \003(\0132%.hadoop.hdfs.Era" +
"sureCodingPolicyProto\"l\n%AddErasureCodin" +
"gPoliciesResponseProto\022C\n\tresponses\030\001 \003(" +
"\01320.hadoop.hdfs.AddErasureCodingPolicyRe" +
"sponseProto\"=\n%RemoveErasureCodingPolicy" +
"RequestProto\022\024\n\014ecPolicyName\030\001 \002(\t\"(\n&Re" +
"moveErasureCodingPolicyResponseProto\"=\n%" +
"EnableErasureCodingPolicyRequestProto\022\024\n" +
"\014ecPolicyName\030\001 \002(\t\"(\n&EnableErasureCodi" +
"ngPolicyResponseProto\">\n&DisableErasureC" +
"odingPolicyRequestProto\022\024\n\014ecPolicyName\030" +
"\001 \002(\t\")\n\'DisableErasureCodingPolicyRespo" +
"nseProto\"3\n$UnsetErasureCodingPolicyRequ" +
"estProto\022\013\n\003src\030\001 \002(\t\"\'\n%UnsetErasureCod" +
"ingPolicyResponseProto\">\n*GetECTopologyR" +
"esultForPoliciesRequestProto\022\020\n\010policies" +
"\030\001 \003(\t\"k\n+GetECTopologyResultForPolicies" +
"ResponseProto\022<\n\010response\030\001 \002(\0132*.hadoop" +
".hdfs.ECTopologyVerifierResultProto\"\260\003\n\036" +
"BlockECReconstructionInfoProto\022.\n\005block\030" +
"\001 \002(\0132\037.hadoop.hdfs.ExtendedBlockProto\0226" +
"\n\rsourceDnInfos\030\002 \002(\0132\037.hadoop.hdfs.Data" +
"nodeInfosProto\0226\n\rtargetDnInfos\030\003 \002(\0132\037." +
"hadoop.hdfs.DatanodeInfosProto\022:\n\022target" +
"StorageUuids\030\004 \002(\0132\036.hadoop.hdfs.Storage" +
"UuidsProto\022:\n\022targetStorageTypes\030\005 \002(\0132\036" +
".hadoop.hdfs.StorageTypesProto\022\030\n\020liveBl" +
"ockIndices\030\006 \002(\014\0227\n\010ecPolicy\030\007 \002(\0132%.had" +
"oop.hdfs.ErasureCodingPolicyProto\022#\n\033exc" +
"ludeReconstructedIndices\030\010 \001(\014\"+\n\nCodecP" +
"roto\022\r\n\005codec\030\001 \002(\t\022\016\n\006coders\030\002 \002(\tB?\n%o" +
"rg.apache.hadoop.hdfs.protocol.protoB\023Er" +
"asureCodingProtos\240\001\001"
};
descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor(),
});
internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_SetErasureCodingPolicyRequestProto_descriptor,
new java.lang.String[] { "Src", "EcPolicyName", });
internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_SetErasureCodingPolicyResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetErasureCodingPoliciesRequestProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetErasureCodingPoliciesResponseProto_descriptor,
new java.lang.String[] { "EcPolicies", });
internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetErasureCodingCodecsRequestProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetErasureCodingCodecsResponseProto_descriptor,
new java.lang.String[] { "Codec", });
internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetErasureCodingPolicyRequestProto_descriptor,
new java.lang.String[] { "Src", });
internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetErasureCodingPolicyResponseProto_descriptor,
new java.lang.String[] { "EcPolicy", });
internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_AddErasureCodingPoliciesRequestProto_descriptor,
new java.lang.String[] { "EcPolicies", });
internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_AddErasureCodingPoliciesResponseProto_descriptor,
new java.lang.String[] { "Responses", });
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyRequestProto_descriptor,
new java.lang.String[] { "EcPolicyName", });
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_RemoveErasureCodingPolicyResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_EnableErasureCodingPolicyRequestProto_descriptor,
new java.lang.String[] { "EcPolicyName", });
internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_EnableErasureCodingPolicyResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_DisableErasureCodingPolicyRequestProto_descriptor,
new java.lang.String[] { "EcPolicyName", });
internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_DisableErasureCodingPolicyResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyRequestProto_descriptor,
new java.lang.String[] { "Src", });
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_UnsetErasureCodingPolicyResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesRequestProto_descriptor,
new java.lang.String[] { "Policies", });
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_GetECTopologyResultForPoliciesResponseProto_descriptor,
new java.lang.String[] { "Response", });
internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_BlockECReconstructionInfoProto_descriptor,
new java.lang.String[] { "Block", "SourceDnInfos", "TargetDnInfos", "TargetStorageUuids", "TargetStorageTypes", "LiveBlockIndices", "EcPolicy", "ExcludeReconstructedIndices", });
internal_static_hadoop_hdfs_CodecProto_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_hadoop_hdfs_CodecProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_hdfs_CodecProto_descriptor,
new java.lang.String[] { "Codec", "Coders", });
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}