Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: xattr.proto
package org.apache.hadoop.hdfs.protocol.proto;
public final class XAttrProtos {
private XAttrProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf enum {@code hadoop.hdfs.XAttrSetFlagProto}
*/
public enum XAttrSetFlagProto
implements com.google.protobuf.ProtocolMessageEnum {
/**
* XATTR_CREATE = 1;
*/
XATTR_CREATE(0, 1),
/**
* XATTR_REPLACE = 2;
*/
XATTR_REPLACE(1, 2),
;
/**
* XATTR_CREATE = 1;
*/
public static final int XATTR_CREATE_VALUE = 1;
/**
* XATTR_REPLACE = 2;
*/
public static final int XATTR_REPLACE_VALUE = 2;
public final int getNumber() { return value; }
public static XAttrSetFlagProto valueOf(int value) {
switch (value) {
case 1: return XATTR_CREATE;
case 2: return XATTR_REPLACE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public XAttrSetFlagProto findValueByNumber(int number) {
return XAttrSetFlagProto.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.getDescriptor().getEnumTypes().get(0);
}
private static final XAttrSetFlagProto[] VALUES = values();
public static XAttrSetFlagProto valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private XAttrSetFlagProto(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.hdfs.XAttrSetFlagProto)
}
public interface XAttrProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
boolean hasNamespace();
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto getNamespace();
// required string name = 2;
/**
* required string name = 2;
*/
boolean hasName();
/**
* required string name = 2;
*/
java.lang.String getName();
/**
* required string name = 2;
*/
com.google.protobuf.ByteString
getNameBytes();
// optional bytes value = 3;
/**
* optional bytes value = 3;
*/
boolean hasValue();
/**
* optional bytes value = 3;
*/
com.google.protobuf.ByteString getValue();
}
/**
* Protobuf type {@code hadoop.hdfs.XAttrProto}
*/
public static final class XAttrProto extends
com.google.protobuf.GeneratedMessage
implements XAttrProtoOrBuilder {
// Use XAttrProto.newBuilder() to construct.
private XAttrProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private XAttrProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final XAttrProto defaultInstance;
public static XAttrProto getDefaultInstance() {
return defaultInstance;
}
public XAttrProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private XAttrProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto value = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
namespace_ = value;
}
break;
}
case 18: {
bitField0_ |= 0x00000002;
name_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
value_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_XAttrProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_XAttrProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public XAttrProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new XAttrProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code hadoop.hdfs.XAttrProto.XAttrNamespaceProto}
*/
public enum XAttrNamespaceProto
implements com.google.protobuf.ProtocolMessageEnum {
/**
* USER = 0;
*/
USER(0, 0),
/**
* TRUSTED = 1;
*/
TRUSTED(1, 1),
/**
* SECURITY = 2;
*/
SECURITY(2, 2),
/**
* SYSTEM = 3;
*/
SYSTEM(3, 3),
/**
* RAW = 4;
*/
RAW(4, 4),
;
/**
* USER = 0;
*/
public static final int USER_VALUE = 0;
/**
* TRUSTED = 1;
*/
public static final int TRUSTED_VALUE = 1;
/**
* SECURITY = 2;
*/
public static final int SECURITY_VALUE = 2;
/**
* SYSTEM = 3;
*/
public static final int SYSTEM_VALUE = 3;
/**
* RAW = 4;
*/
public static final int RAW_VALUE = 4;
public final int getNumber() { return value; }
public static XAttrNamespaceProto valueOf(int value) {
switch (value) {
case 0: return USER;
case 1: return TRUSTED;
case 2: return SECURITY;
case 3: return SYSTEM;
case 4: return RAW;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public XAttrNamespaceProto findValueByNumber(int number) {
return XAttrNamespaceProto.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDescriptor().getEnumTypes().get(0);
}
private static final XAttrNamespaceProto[] VALUES = values();
public static XAttrNamespaceProto valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private XAttrNamespaceProto(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:hadoop.hdfs.XAttrProto.XAttrNamespaceProto)
}
private int bitField0_;
// required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
public static final int NAMESPACE_FIELD_NUMBER = 1;
private org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto namespace_;
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto getNamespace() {
return namespace_;
}
// required string name = 2;
public static final int NAME_FIELD_NUMBER = 2;
private java.lang.Object name_;
/**
* required string name = 2;
*/
public boolean hasName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required string name = 2;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* required string name = 2;
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bytes value = 3;
public static final int VALUE_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString value_;
/**
* optional bytes value = 3;
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional bytes value = 3;
*/
public com.google.protobuf.ByteString getValue() {
return value_;
}
private void initFields() {
namespace_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto.USER;
name_ = "";
value_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasNamespace()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, namespace_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, value_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, namespace_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, value_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto) obj;
boolean result = true;
result = result && (hasNamespace() == other.hasNamespace());
if (hasNamespace()) {
result = result &&
(getNamespace() == other.getNamespace());
}
result = result && (hasName() == other.hasName());
if (hasName()) {
result = result && getName()
.equals(other.getName());
}
result = result && (hasValue() == other.hasValue());
if (hasValue()) {
result = result && getValue()
.equals(other.getValue());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasNamespace()) {
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getNamespace());
}
if (hasName()) {
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
}
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.XAttrProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_XAttrProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_XAttrProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
namespace_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto.USER;
bitField0_ = (bitField0_ & ~0x00000001);
name_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
value_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_XAttrProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.namespace_ = namespace_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.value_ = value_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance()) return this;
if (other.hasNamespace()) {
setNamespace(other.getNamespace());
}
if (other.hasName()) {
bitField0_ |= 0x00000002;
name_ = other.name_;
onChanged();
}
if (other.hasValue()) {
setValue(other.getValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasNamespace()) {
return false;
}
if (!hasName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
private org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto namespace_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto.USER;
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
public boolean hasNamespace() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto getNamespace() {
return namespace_;
}
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
public Builder setNamespace(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
namespace_ = value;
onChanged();
return this;
}
/**
* required .hadoop.hdfs.XAttrProto.XAttrNamespaceProto namespace = 1;
*/
public Builder clearNamespace() {
bitField0_ = (bitField0_ & ~0x00000001);
namespace_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto.USER;
onChanged();
return this;
}
// required string name = 2;
private java.lang.Object name_ = "";
/**
* required string name = 2;
*/
public boolean hasName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required string name = 2;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string name = 2;
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string name = 2;
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
name_ = value;
onChanged();
return this;
}
/**
* required string name = 2;
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000002);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* required string name = 2;
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
name_ = value;
onChanged();
return this;
}
// optional bytes value = 3;
private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes value = 3;
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional bytes value = 3;
*/
public com.google.protobuf.ByteString getValue() {
return value_;
}
/**
* optional bytes value = 3;
*/
public Builder setValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
value_ = value;
onChanged();
return this;
}
/**
* optional bytes value = 3;
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000004);
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.XAttrProto)
}
static {
defaultInstance = new XAttrProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.XAttrProto)
}
public interface SetXAttrRequestProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string src = 1;
/**
* required string src = 1;
*/
boolean hasSrc();
/**
* required string src = 1;
*/
java.lang.String getSrc();
/**
* required string src = 1;
*/
com.google.protobuf.ByteString
getSrcBytes();
// optional .hadoop.hdfs.XAttrProto xAttr = 2;
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
boolean hasXAttr();
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttr();
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrOrBuilder();
// optional uint32 flag = 3;
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
boolean hasFlag();
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
int getFlag();
}
/**
* Protobuf type {@code hadoop.hdfs.SetXAttrRequestProto}
*/
public static final class SetXAttrRequestProto extends
com.google.protobuf.GeneratedMessage
implements SetXAttrRequestProtoOrBuilder {
// Use SetXAttrRequestProto.newBuilder() to construct.
private SetXAttrRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SetXAttrRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SetXAttrRequestProto defaultInstance;
public static SetXAttrRequestProto getDefaultInstance() {
return defaultInstance;
}
public SetXAttrRequestProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SetXAttrRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
src_ = input.readBytes();
break;
}
case 18: {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = xAttr_.toBuilder();
}
xAttr_ = input.readMessage(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(xAttr_);
xAttr_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
case 24: {
bitField0_ |= 0x00000004;
flag_ = input.readUInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public SetXAttrRequestProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SetXAttrRequestProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string src = 1;
public static final int SRC_FIELD_NUMBER = 1;
private java.lang.Object src_;
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .hadoop.hdfs.XAttrProto xAttr = 2;
public static final int XATTR_FIELD_NUMBER = 2;
private org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto xAttr_;
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public boolean hasXAttr() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttr() {
return xAttr_;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrOrBuilder() {
return xAttr_;
}
// optional uint32 flag = 3;
public static final int FLAG_FIELD_NUMBER = 3;
private int flag_;
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
public boolean hasFlag() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
public int getFlag() {
return flag_;
}
private void initFields() {
src_ = "";
xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
flag_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
if (hasXAttr()) {
if (!getXAttr().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getSrcBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, xAttr_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt32(3, flag_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getSrcBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, xAttr_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(3, flag_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto) obj;
boolean result = true;
result = result && (hasSrc() == other.hasSrc());
if (hasSrc()) {
result = result && getSrc()
.equals(other.getSrc());
}
result = result && (hasXAttr() == other.hasXAttr());
if (hasXAttr()) {
result = result && getXAttr()
.equals(other.getXAttr());
}
result = result && (hasFlag() == other.hasFlag());
if (hasFlag()) {
result = result && (getFlag()
== other.getFlag());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
if (hasXAttr()) {
hash = (37 * hash) + XATTR_FIELD_NUMBER;
hash = (53 * hash) + getXAttr().hashCode();
}
if (hasFlag()) {
hash = (37 * hash) + FLAG_FIELD_NUMBER;
hash = (53 * hash) + getFlag();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.SetXAttrRequestProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getXAttrFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
src_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (xAttrBuilder_ == null) {
xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
} else {
xAttrBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
flag_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrRequestProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.src_ = src_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (xAttrBuilder_ == null) {
result.xAttr_ = xAttr_;
} else {
result.xAttr_ = xAttrBuilder_.build();
}
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.flag_ = flag_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
bitField0_ |= 0x00000001;
src_ = other.src_;
onChanged();
}
if (other.hasXAttr()) {
mergeXAttr(other.getXAttr());
}
if (other.hasFlag()) {
setFlag(other.getFlag());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
if (hasXAttr()) {
if (!getXAttr().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrRequestProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string src = 1;
private java.lang.Object src_ = "";
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
src_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string src = 1;
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder clearSrc() {
bitField0_ = (bitField0_ & ~0x00000001);
src_ = getDefaultInstance().getSrc();
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder setSrcBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
// optional .hadoop.hdfs.XAttrProto xAttr = 2;
private org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder> xAttrBuilder_;
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public boolean hasXAttr() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttr() {
if (xAttrBuilder_ == null) {
return xAttr_;
} else {
return xAttrBuilder_.getMessage();
}
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder setXAttr(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
xAttr_ = value;
onChanged();
} else {
xAttrBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder setXAttr(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrBuilder_ == null) {
xAttr_ = builderForValue.build();
onChanged();
} else {
xAttrBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder mergeXAttr(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
xAttr_ != org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance()) {
xAttr_ =
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.newBuilder(xAttr_).mergeFrom(value).buildPartial();
} else {
xAttr_ = value;
}
onChanged();
} else {
xAttrBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder clearXAttr() {
if (xAttrBuilder_ == null) {
xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
onChanged();
} else {
xAttrBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder getXAttrBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getXAttrFieldBuilder().getBuilder();
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrOrBuilder() {
if (xAttrBuilder_ != null) {
return xAttrBuilder_.getMessageOrBuilder();
} else {
return xAttr_;
}
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrFieldBuilder() {
if (xAttrBuilder_ == null) {
xAttrBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>(
xAttr_,
getParentForChildren(),
isClean());
xAttr_ = null;
}
return xAttrBuilder_;
}
// optional uint32 flag = 3;
private int flag_ ;
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
public boolean hasFlag() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
public int getFlag() {
return flag_;
}
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
public Builder setFlag(int value) {
bitField0_ |= 0x00000004;
flag_ = value;
onChanged();
return this;
}
/**
* optional uint32 flag = 3;
*
*
*bits set using XAttrSetFlagProto
*
*/
public Builder clearFlag() {
bitField0_ = (bitField0_ & ~0x00000004);
flag_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.SetXAttrRequestProto)
}
static {
defaultInstance = new SetXAttrRequestProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.SetXAttrRequestProto)
}
public interface SetXAttrResponseProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.SetXAttrResponseProto}
*/
public static final class SetXAttrResponseProto extends
com.google.protobuf.GeneratedMessage
implements SetXAttrResponseProtoOrBuilder {
// Use SetXAttrResponseProto.newBuilder() to construct.
private SetXAttrResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SetXAttrResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SetXAttrResponseProto defaultInstance;
public static SetXAttrResponseProto getDefaultInstance() {
return defaultInstance;
}
public SetXAttrResponseProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SetXAttrResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public SetXAttrResponseProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SetXAttrResponseProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.SetXAttrResponseProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_SetXAttrResponseProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.SetXAttrResponseProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.SetXAttrResponseProto)
}
static {
defaultInstance = new SetXAttrResponseProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.SetXAttrResponseProto)
}
public interface GetXAttrsRequestProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string src = 1;
/**
* required string src = 1;
*/
boolean hasSrc();
/**
* required string src = 1;
*/
java.lang.String getSrc();
/**
* required string src = 1;
*/
com.google.protobuf.ByteString
getSrcBytes();
// repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
java.util.List
getXAttrsList();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index);
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
int getXAttrsCount();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.GetXAttrsRequestProto}
*/
public static final class GetXAttrsRequestProto extends
com.google.protobuf.GeneratedMessage
implements GetXAttrsRequestProtoOrBuilder {
// Use GetXAttrsRequestProto.newBuilder() to construct.
private GetXAttrsRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetXAttrsRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetXAttrsRequestProto defaultInstance;
public static GetXAttrsRequestProto getDefaultInstance() {
return defaultInstance;
}
public GetXAttrsRequestProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetXAttrsRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
src_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
xAttrs_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000002;
}
xAttrs_.add(input.readMessage(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public GetXAttrsRequestProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetXAttrsRequestProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string src = 1;
public static final int SRC_FIELD_NUMBER = 1;
private java.lang.Object src_;
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
public static final int XATTRS_FIELD_NUMBER = 2;
private java.util.List xAttrs_;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public java.util.List getXAttrsList() {
return xAttrs_;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList() {
return xAttrs_;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public int getXAttrsCount() {
return xAttrs_.size();
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index) {
return xAttrs_.get(index);
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index) {
return xAttrs_.get(index);
}
private void initFields() {
src_ = "";
xAttrs_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getXAttrsCount(); i++) {
if (!getXAttrs(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getSrcBytes());
}
for (int i = 0; i < xAttrs_.size(); i++) {
output.writeMessage(2, xAttrs_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getSrcBytes());
}
for (int i = 0; i < xAttrs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, xAttrs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto) obj;
boolean result = true;
result = result && (hasSrc() == other.hasSrc());
if (hasSrc()) {
result = result && getSrc()
.equals(other.getSrc());
}
result = result && getXAttrsList()
.equals(other.getXAttrsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
if (getXAttrsCount() > 0) {
hash = (37 * hash) + XATTRS_FIELD_NUMBER;
hash = (53 * hash) + getXAttrsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetXAttrsRequestProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getXAttrsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
src_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (xAttrsBuilder_ == null) {
xAttrs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
xAttrsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsRequestProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.src_ = src_;
if (xAttrsBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.xAttrs_ = xAttrs_;
} else {
result.xAttrs_ = xAttrsBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
bitField0_ |= 0x00000001;
src_ = other.src_;
onChanged();
}
if (xAttrsBuilder_ == null) {
if (!other.xAttrs_.isEmpty()) {
if (xAttrs_.isEmpty()) {
xAttrs_ = other.xAttrs_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureXAttrsIsMutable();
xAttrs_.addAll(other.xAttrs_);
}
onChanged();
}
} else {
if (!other.xAttrs_.isEmpty()) {
if (xAttrsBuilder_.isEmpty()) {
xAttrsBuilder_.dispose();
xAttrsBuilder_ = null;
xAttrs_ = other.xAttrs_;
bitField0_ = (bitField0_ & ~0x00000002);
xAttrsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getXAttrsFieldBuilder() : null;
} else {
xAttrsBuilder_.addAllMessages(other.xAttrs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
for (int i = 0; i < getXAttrsCount(); i++) {
if (!getXAttrs(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string src = 1;
private java.lang.Object src_ = "";
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
src_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string src = 1;
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder clearSrc() {
bitField0_ = (bitField0_ & ~0x00000001);
src_ = getDefaultInstance().getSrc();
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder setSrcBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
// repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
private java.util.List xAttrs_ =
java.util.Collections.emptyList();
private void ensureXAttrsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
xAttrs_ = new java.util.ArrayList(xAttrs_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder> xAttrsBuilder_;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public java.util.List getXAttrsList() {
if (xAttrsBuilder_ == null) {
return java.util.Collections.unmodifiableList(xAttrs_);
} else {
return xAttrsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public int getXAttrsCount() {
if (xAttrsBuilder_ == null) {
return xAttrs_.size();
} else {
return xAttrsBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index) {
if (xAttrsBuilder_ == null) {
return xAttrs_.get(index);
} else {
return xAttrsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder setXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.set(index, value);
onChanged();
} else {
xAttrsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder setXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.set(index, builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder addXAttrs(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.add(value);
onChanged();
} else {
xAttrsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder addXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.add(index, value);
onChanged();
} else {
xAttrsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder addXAttrs(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.add(builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder addXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.add(index, builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder addAllXAttrs(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto> values) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
super.addAll(values, xAttrs_);
onChanged();
} else {
xAttrsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder clearXAttrs() {
if (xAttrsBuilder_ == null) {
xAttrs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
xAttrsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public Builder removeXAttrs(int index) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.remove(index);
onChanged();
} else {
xAttrsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder getXAttrsBuilder(
int index) {
return getXAttrsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index) {
if (xAttrsBuilder_ == null) {
return xAttrs_.get(index); } else {
return xAttrsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList() {
if (xAttrsBuilder_ != null) {
return xAttrsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(xAttrs_);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder addXAttrsBuilder() {
return getXAttrsFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder addXAttrsBuilder(
int index) {
return getXAttrsFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 2;
*/
public java.util.List
getXAttrsBuilderList() {
return getXAttrsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsFieldBuilder() {
if (xAttrsBuilder_ == null) {
xAttrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>(
xAttrs_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
xAttrs_ = null;
}
return xAttrsBuilder_;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetXAttrsRequestProto)
}
static {
defaultInstance = new GetXAttrsRequestProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetXAttrsRequestProto)
}
public interface GetXAttrsResponseProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
java.util.List
getXAttrsList();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index);
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
int getXAttrsCount();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.GetXAttrsResponseProto}
*/
public static final class GetXAttrsResponseProto extends
com.google.protobuf.GeneratedMessage
implements GetXAttrsResponseProtoOrBuilder {
// Use GetXAttrsResponseProto.newBuilder() to construct.
private GetXAttrsResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetXAttrsResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetXAttrsResponseProto defaultInstance;
public static GetXAttrsResponseProto getDefaultInstance() {
return defaultInstance;
}
public GetXAttrsResponseProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetXAttrsResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
xAttrs_.add(input.readMessage(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public GetXAttrsResponseProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetXAttrsResponseProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
// repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
public static final int XATTRS_FIELD_NUMBER = 1;
private java.util.List xAttrs_;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List getXAttrsList() {
return xAttrs_;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList() {
return xAttrs_;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public int getXAttrsCount() {
return xAttrs_.size();
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index) {
return xAttrs_.get(index);
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index) {
return xAttrs_.get(index);
}
private void initFields() {
xAttrs_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getXAttrsCount(); i++) {
if (!getXAttrs(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < xAttrs_.size(); i++) {
output.writeMessage(1, xAttrs_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < xAttrs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, xAttrs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto) obj;
boolean result = true;
result = result && getXAttrsList()
.equals(other.getXAttrsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getXAttrsCount() > 0) {
hash = (37 * hash) + XATTRS_FIELD_NUMBER;
hash = (53 * hash) + getXAttrsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.GetXAttrsResponseProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getXAttrsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (xAttrsBuilder_ == null) {
xAttrs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
xAttrsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_GetXAttrsResponseProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto(this);
int from_bitField0_ = bitField0_;
if (xAttrsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.xAttrs_ = xAttrs_;
} else {
result.xAttrs_ = xAttrsBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto.getDefaultInstance()) return this;
if (xAttrsBuilder_ == null) {
if (!other.xAttrs_.isEmpty()) {
if (xAttrs_.isEmpty()) {
xAttrs_ = other.xAttrs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureXAttrsIsMutable();
xAttrs_.addAll(other.xAttrs_);
}
onChanged();
}
} else {
if (!other.xAttrs_.isEmpty()) {
if (xAttrsBuilder_.isEmpty()) {
xAttrsBuilder_.dispose();
xAttrsBuilder_ = null;
xAttrs_ = other.xAttrs_;
bitField0_ = (bitField0_ & ~0x00000001);
xAttrsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getXAttrsFieldBuilder() : null;
} else {
xAttrsBuilder_.addAllMessages(other.xAttrs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getXAttrsCount(); i++) {
if (!getXAttrs(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
private java.util.List xAttrs_ =
java.util.Collections.emptyList();
private void ensureXAttrsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = new java.util.ArrayList(xAttrs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder> xAttrsBuilder_;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List getXAttrsList() {
if (xAttrsBuilder_ == null) {
return java.util.Collections.unmodifiableList(xAttrs_);
} else {
return xAttrsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public int getXAttrsCount() {
if (xAttrsBuilder_ == null) {
return xAttrs_.size();
} else {
return xAttrsBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index) {
if (xAttrsBuilder_ == null) {
return xAttrs_.get(index);
} else {
return xAttrsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder setXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.set(index, value);
onChanged();
} else {
xAttrsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder setXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.set(index, builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.add(value);
onChanged();
} else {
xAttrsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.add(index, value);
onChanged();
} else {
xAttrsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.add(builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.add(index, builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addAllXAttrs(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto> values) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
super.addAll(values, xAttrs_);
onChanged();
} else {
xAttrsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder clearXAttrs() {
if (xAttrsBuilder_ == null) {
xAttrs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
xAttrsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder removeXAttrs(int index) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.remove(index);
onChanged();
} else {
xAttrsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder getXAttrsBuilder(
int index) {
return getXAttrsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index) {
if (xAttrsBuilder_ == null) {
return xAttrs_.get(index); } else {
return xAttrsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList() {
if (xAttrsBuilder_ != null) {
return xAttrsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(xAttrs_);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder addXAttrsBuilder() {
return getXAttrsFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder addXAttrsBuilder(
int index) {
return getXAttrsFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List
getXAttrsBuilderList() {
return getXAttrsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsFieldBuilder() {
if (xAttrsBuilder_ == null) {
xAttrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>(
xAttrs_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
xAttrs_ = null;
}
return xAttrsBuilder_;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.GetXAttrsResponseProto)
}
static {
defaultInstance = new GetXAttrsResponseProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.GetXAttrsResponseProto)
}
public interface ListXAttrsRequestProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string src = 1;
/**
* required string src = 1;
*/
boolean hasSrc();
/**
* required string src = 1;
*/
java.lang.String getSrc();
/**
* required string src = 1;
*/
com.google.protobuf.ByteString
getSrcBytes();
}
/**
* Protobuf type {@code hadoop.hdfs.ListXAttrsRequestProto}
*/
public static final class ListXAttrsRequestProto extends
com.google.protobuf.GeneratedMessage
implements ListXAttrsRequestProtoOrBuilder {
// Use ListXAttrsRequestProto.newBuilder() to construct.
private ListXAttrsRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ListXAttrsRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ListXAttrsRequestProto defaultInstance;
public static ListXAttrsRequestProto getDefaultInstance() {
return defaultInstance;
}
public ListXAttrsRequestProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ListXAttrsRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
src_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public ListXAttrsRequestProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListXAttrsRequestProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string src = 1;
public static final int SRC_FIELD_NUMBER = 1;
private java.lang.Object src_;
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
src_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getSrcBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getSrcBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto) obj;
boolean result = true;
result = result && (hasSrc() == other.hasSrc());
if (hasSrc()) {
result = result && getSrc()
.equals(other.getSrc());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.ListXAttrsRequestProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
src_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsRequestProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.src_ = src_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
bitField0_ |= 0x00000001;
src_ = other.src_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string src = 1;
private java.lang.Object src_ = "";
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
src_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string src = 1;
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder clearSrc() {
bitField0_ = (bitField0_ & ~0x00000001);
src_ = getDefaultInstance().getSrc();
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder setSrcBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.ListXAttrsRequestProto)
}
static {
defaultInstance = new ListXAttrsRequestProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.ListXAttrsRequestProto)
}
public interface ListXAttrsResponseProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
java.util.List
getXAttrsList();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index);
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
int getXAttrsCount();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList();
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index);
}
/**
* Protobuf type {@code hadoop.hdfs.ListXAttrsResponseProto}
*/
public static final class ListXAttrsResponseProto extends
com.google.protobuf.GeneratedMessage
implements ListXAttrsResponseProtoOrBuilder {
// Use ListXAttrsResponseProto.newBuilder() to construct.
private ListXAttrsResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ListXAttrsResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ListXAttrsResponseProto defaultInstance;
public static ListXAttrsResponseProto getDefaultInstance() {
return defaultInstance;
}
public ListXAttrsResponseProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ListXAttrsResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
xAttrs_.add(input.readMessage(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public ListXAttrsResponseProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListXAttrsResponseProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
// repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
public static final int XATTRS_FIELD_NUMBER = 1;
private java.util.List xAttrs_;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List getXAttrsList() {
return xAttrs_;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList() {
return xAttrs_;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public int getXAttrsCount() {
return xAttrs_.size();
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index) {
return xAttrs_.get(index);
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index) {
return xAttrs_.get(index);
}
private void initFields() {
xAttrs_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getXAttrsCount(); i++) {
if (!getXAttrs(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < xAttrs_.size(); i++) {
output.writeMessage(1, xAttrs_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < xAttrs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, xAttrs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto) obj;
boolean result = true;
result = result && getXAttrsList()
.equals(other.getXAttrsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getXAttrsCount() > 0) {
hash = (37 * hash) + XATTRS_FIELD_NUMBER;
hash = (53 * hash) + getXAttrsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.ListXAttrsResponseProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getXAttrsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (xAttrsBuilder_ == null) {
xAttrs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
xAttrsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_ListXAttrsResponseProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto(this);
int from_bitField0_ = bitField0_;
if (xAttrsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = java.util.Collections.unmodifiableList(xAttrs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.xAttrs_ = xAttrs_;
} else {
result.xAttrs_ = xAttrsBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto.getDefaultInstance()) return this;
if (xAttrsBuilder_ == null) {
if (!other.xAttrs_.isEmpty()) {
if (xAttrs_.isEmpty()) {
xAttrs_ = other.xAttrs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureXAttrsIsMutable();
xAttrs_.addAll(other.xAttrs_);
}
onChanged();
}
} else {
if (!other.xAttrs_.isEmpty()) {
if (xAttrsBuilder_.isEmpty()) {
xAttrsBuilder_.dispose();
xAttrsBuilder_ = null;
xAttrs_ = other.xAttrs_;
bitField0_ = (bitField0_ & ~0x00000001);
xAttrsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getXAttrsFieldBuilder() : null;
} else {
xAttrsBuilder_.addAllMessages(other.xAttrs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getXAttrsCount(); i++) {
if (!getXAttrs(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
private java.util.List xAttrs_ =
java.util.Collections.emptyList();
private void ensureXAttrsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
xAttrs_ = new java.util.ArrayList(xAttrs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder> xAttrsBuilder_;
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List getXAttrsList() {
if (xAttrsBuilder_ == null) {
return java.util.Collections.unmodifiableList(xAttrs_);
} else {
return xAttrsBuilder_.getMessageList();
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public int getXAttrsCount() {
if (xAttrsBuilder_ == null) {
return xAttrs_.size();
} else {
return xAttrsBuilder_.getCount();
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttrs(int index) {
if (xAttrsBuilder_ == null) {
return xAttrs_.get(index);
} else {
return xAttrsBuilder_.getMessage(index);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder setXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.set(index, value);
onChanged();
} else {
xAttrsBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder setXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.set(index, builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.add(value);
onChanged();
} else {
xAttrsBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureXAttrsIsMutable();
xAttrs_.add(index, value);
onChanged();
} else {
xAttrsBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.add(builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addXAttrs(
int index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.add(index, builderForValue.build());
onChanged();
} else {
xAttrsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder addAllXAttrs(
java.lang.Iterable extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto> values) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
super.addAll(values, xAttrs_);
onChanged();
} else {
xAttrsBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder clearXAttrs() {
if (xAttrsBuilder_ == null) {
xAttrs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
xAttrsBuilder_.clear();
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public Builder removeXAttrs(int index) {
if (xAttrsBuilder_ == null) {
ensureXAttrsIsMutable();
xAttrs_.remove(index);
onChanged();
} else {
xAttrsBuilder_.remove(index);
}
return this;
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder getXAttrsBuilder(
int index) {
return getXAttrsFieldBuilder().getBuilder(index);
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrsOrBuilder(
int index) {
if (xAttrsBuilder_ == null) {
return xAttrs_.get(index); } else {
return xAttrsBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List extends org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsOrBuilderList() {
if (xAttrsBuilder_ != null) {
return xAttrsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(xAttrs_);
}
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder addXAttrsBuilder() {
return getXAttrsFieldBuilder().addBuilder(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder addXAttrsBuilder(
int index) {
return getXAttrsFieldBuilder().addBuilder(
index, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance());
}
/**
* repeated .hadoop.hdfs.XAttrProto xAttrs = 1;
*/
public java.util.List
getXAttrsBuilderList() {
return getXAttrsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrsFieldBuilder() {
if (xAttrsBuilder_ == null) {
xAttrsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>(
xAttrs_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
xAttrs_ = null;
}
return xAttrsBuilder_;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.ListXAttrsResponseProto)
}
static {
defaultInstance = new ListXAttrsResponseProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.ListXAttrsResponseProto)
}
public interface RemoveXAttrRequestProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string src = 1;
/**
* required string src = 1;
*/
boolean hasSrc();
/**
* required string src = 1;
*/
java.lang.String getSrc();
/**
* required string src = 1;
*/
com.google.protobuf.ByteString
getSrcBytes();
// optional .hadoop.hdfs.XAttrProto xAttr = 2;
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
boolean hasXAttr();
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttr();
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrOrBuilder();
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveXAttrRequestProto}
*/
public static final class RemoveXAttrRequestProto extends
com.google.protobuf.GeneratedMessage
implements RemoveXAttrRequestProtoOrBuilder {
// Use RemoveXAttrRequestProto.newBuilder() to construct.
private RemoveXAttrRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RemoveXAttrRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RemoveXAttrRequestProto defaultInstance;
public static RemoveXAttrRequestProto getDefaultInstance() {
return defaultInstance;
}
public RemoveXAttrRequestProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RemoveXAttrRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
src_ = input.readBytes();
break;
}
case 18: {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder subBuilder = null;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = xAttr_.toBuilder();
}
xAttr_ = input.readMessage(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(xAttr_);
xAttr_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000002;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RemoveXAttrRequestProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RemoveXAttrRequestProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private int bitField0_;
// required string src = 1;
public static final int SRC_FIELD_NUMBER = 1;
private java.lang.Object src_;
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
src_ = s;
}
return s;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional .hadoop.hdfs.XAttrProto xAttr = 2;
public static final int XATTR_FIELD_NUMBER = 2;
private org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto xAttr_;
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public boolean hasXAttr() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttr() {
return xAttr_;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrOrBuilder() {
return xAttr_;
}
private void initFields() {
src_ = "";
xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasSrc()) {
memoizedIsInitialized = 0;
return false;
}
if (hasXAttr()) {
if (!getXAttr().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getSrcBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, xAttr_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getSrcBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, xAttr_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto) obj;
boolean result = true;
result = result && (hasSrc() == other.hasSrc());
if (hasSrc()) {
result = result && getSrc()
.equals(other.getSrc());
}
result = result && (hasXAttr() == other.hasXAttr());
if (hasXAttr()) {
result = result && getXAttr()
.equals(other.getXAttr());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSrc()) {
hash = (37 * hash) + SRC_FIELD_NUMBER;
hash = (53 * hash) + getSrc().hashCode();
}
if (hasXAttr()) {
hash = (37 * hash) + XATTR_FIELD_NUMBER;
hash = (53 * hash) + getXAttr().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveXAttrRequestProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrRequestProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getXAttrFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
src_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (xAttrBuilder_ == null) {
xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
} else {
xAttrBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrRequestProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.src_ = src_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
if (xAttrBuilder_ == null) {
result.xAttr_ = xAttr_;
} else {
result.xAttr_ = xAttrBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto.getDefaultInstance()) return this;
if (other.hasSrc()) {
bitField0_ |= 0x00000001;
src_ = other.src_;
onChanged();
}
if (other.hasXAttr()) {
mergeXAttr(other.getXAttr());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasSrc()) {
return false;
}
if (hasXAttr()) {
if (!getXAttr().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrRequestProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string src = 1;
private java.lang.Object src_ = "";
/**
* required string src = 1;
*/
public boolean hasSrc() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* required string src = 1;
*/
public java.lang.String getSrc() {
java.lang.Object ref = src_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
src_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string src = 1;
*/
public com.google.protobuf.ByteString
getSrcBytes() {
java.lang.Object ref = src_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
src_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* required string src = 1;
*/
public Builder setSrc(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder clearSrc() {
bitField0_ = (bitField0_ & ~0x00000001);
src_ = getDefaultInstance().getSrc();
onChanged();
return this;
}
/**
* required string src = 1;
*/
public Builder setSrcBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
src_ = value;
onChanged();
return this;
}
// optional .hadoop.hdfs.XAttrProto xAttr = 2;
private org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder> xAttrBuilder_;
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public boolean hasXAttr() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto getXAttr() {
if (xAttrBuilder_ == null) {
return xAttr_;
} else {
return xAttrBuilder_.getMessage();
}
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder setXAttr(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
xAttr_ = value;
onChanged();
} else {
xAttrBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder setXAttr(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder builderForValue) {
if (xAttrBuilder_ == null) {
xAttr_ = builderForValue.build();
onChanged();
} else {
xAttrBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder mergeXAttr(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto value) {
if (xAttrBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
xAttr_ != org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance()) {
xAttr_ =
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.newBuilder(xAttr_).mergeFrom(value).buildPartial();
} else {
xAttr_ = value;
}
onChanged();
} else {
xAttrBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000002;
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public Builder clearXAttr() {
if (xAttrBuilder_ == null) {
xAttr_ = org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.getDefaultInstance();
onChanged();
} else {
xAttrBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder getXAttrBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getXAttrFieldBuilder().getBuilder();
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder getXAttrOrBuilder() {
if (xAttrBuilder_ != null) {
return xAttrBuilder_.getMessageOrBuilder();
} else {
return xAttr_;
}
}
/**
* optional .hadoop.hdfs.XAttrProto xAttr = 2;
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>
getXAttrFieldBuilder() {
if (xAttrBuilder_ == null) {
xAttrBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.Builder, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProtoOrBuilder>(
xAttr_,
getParentForChildren(),
isClean());
xAttr_ = null;
}
return xAttrBuilder_;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.RemoveXAttrRequestProto)
}
static {
defaultInstance = new RemoveXAttrRequestProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.RemoveXAttrRequestProto)
}
public interface RemoveXAttrResponseProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveXAttrResponseProto}
*/
public static final class RemoveXAttrResponseProto extends
com.google.protobuf.GeneratedMessage
implements RemoveXAttrResponseProtoOrBuilder {
// Use RemoveXAttrResponseProto.newBuilder() to construct.
private RemoveXAttrResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RemoveXAttrResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RemoveXAttrResponseProto defaultInstance;
public static RemoveXAttrResponseProto getDefaultInstance() {
return defaultInstance;
}
public RemoveXAttrResponseProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RemoveXAttrResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
public RemoveXAttrResponseProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RemoveXAttrResponseProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.hdfs.RemoveXAttrResponseProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.class, org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.internal_static_hadoop_hdfs_RemoveXAttrResponseProto_descriptor;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.getDefaultInstance();
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto build() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto buildPartial() {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto) {
return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto other) {
if (other == org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.RemoveXAttrResponseProto) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:hadoop.hdfs.RemoveXAttrResponseProto)
}
static {
defaultInstance = new RemoveXAttrResponseProto(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:hadoop.hdfs.RemoveXAttrResponseProto)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_XAttrProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_XAttrProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_SetXAttrRequestProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_SetXAttrRequestProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_SetXAttrResponseProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_SetXAttrResponseProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetXAttrsRequestProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_GetXAttrsRequestProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_GetXAttrsResponseProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_GetXAttrsResponseProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_ListXAttrsRequestProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_ListXAttrsRequestProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_ListXAttrsResponseProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_ListXAttrsResponseProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_RemoveXAttrRequestProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_RemoveXAttrRequestProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_hadoop_hdfs_RemoveXAttrResponseProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hadoop_hdfs_RemoveXAttrResponseProto_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\013xattr.proto\022\013hadoop.hdfs\"\272\001\n\nXAttrProt" +
"o\022>\n\tnamespace\030\001 \002(\0162+.hadoop.hdfs.XAttr" +
"Proto.XAttrNamespaceProto\022\014\n\004name\030\002 \002(\t\022" +
"\r\n\005value\030\003 \001(\014\"O\n\023XAttrNamespaceProto\022\010\n" +
"\004USER\020\000\022\013\n\007TRUSTED\020\001\022\014\n\010SECURITY\020\002\022\n\n\006SY" +
"STEM\020\003\022\007\n\003RAW\020\004\"Y\n\024SetXAttrRequestProto\022" +
"\013\n\003src\030\001 \002(\t\022&\n\005xAttr\030\002 \001(\0132\027.hadoop.hdf" +
"s.XAttrProto\022\014\n\004flag\030\003 \001(\r\"\027\n\025SetXAttrRe" +
"sponseProto\"M\n\025GetXAttrsRequestProto\022\013\n\003" +
"src\030\001 \002(\t\022\'\n\006xAttrs\030\002 \003(\0132\027.hadoop.hdfs.",
"XAttrProto\"A\n\026GetXAttrsResponseProto\022\'\n\006" +
"xAttrs\030\001 \003(\0132\027.hadoop.hdfs.XAttrProto\"%\n" +
"\026ListXAttrsRequestProto\022\013\n\003src\030\001 \002(\t\"B\n\027" +
"ListXAttrsResponseProto\022\'\n\006xAttrs\030\001 \003(\0132" +
"\027.hadoop.hdfs.XAttrProto\"N\n\027RemoveXAttrR" +
"equestProto\022\013\n\003src\030\001 \002(\t\022&\n\005xAttr\030\002 \001(\0132" +
"\027.hadoop.hdfs.XAttrProto\"\032\n\030RemoveXAttrR" +
"esponseProto*8\n\021XAttrSetFlagProto\022\020\n\014XAT" +
"TR_CREATE\020\001\022\021\n\rXATTR_REPLACE\020\002B7\n%org.ap" +
"ache.hadoop.hdfs.protocol.protoB\013XAttrPr",
"otos\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_hadoop_hdfs_XAttrProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_hdfs_XAttrProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_XAttrProto_descriptor,
new java.lang.String[] { "Namespace", "Name", "Value", });
internal_static_hadoop_hdfs_SetXAttrRequestProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_hdfs_SetXAttrRequestProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_SetXAttrRequestProto_descriptor,
new java.lang.String[] { "Src", "XAttr", "Flag", });
internal_static_hadoop_hdfs_SetXAttrResponseProto_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_hadoop_hdfs_SetXAttrResponseProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_SetXAttrResponseProto_descriptor,
new java.lang.String[] { });
internal_static_hadoop_hdfs_GetXAttrsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_hadoop_hdfs_GetXAttrsRequestProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_GetXAttrsRequestProto_descriptor,
new java.lang.String[] { "Src", "XAttrs", });
internal_static_hadoop_hdfs_GetXAttrsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_hadoop_hdfs_GetXAttrsResponseProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_GetXAttrsResponseProto_descriptor,
new java.lang.String[] { "XAttrs", });
internal_static_hadoop_hdfs_ListXAttrsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_hadoop_hdfs_ListXAttrsRequestProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_ListXAttrsRequestProto_descriptor,
new java.lang.String[] { "Src", });
internal_static_hadoop_hdfs_ListXAttrsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_hadoop_hdfs_ListXAttrsResponseProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_ListXAttrsResponseProto_descriptor,
new java.lang.String[] { "XAttrs", });
internal_static_hadoop_hdfs_RemoveXAttrRequestProto_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_hadoop_hdfs_RemoveXAttrRequestProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_RemoveXAttrRequestProto_descriptor,
new java.lang.String[] { "Src", "XAttr", });
internal_static_hadoop_hdfs_RemoveXAttrResponseProto_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_hadoop_hdfs_RemoveXAttrResponseProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hadoop_hdfs_RemoveXAttrResponseProto_descriptor,
new java.lang.String[] { });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}