All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.hdfs.protocol.proto.HdfsProtos Maven / Gradle / Ivy

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: hdfs.proto

package org.apache.hadoop.hdfs.protocol.proto;

public final class HdfsProtos {
  private HdfsProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public enum ChecksumTypeProto
      implements com.google.protobuf.ProtocolMessageEnum {
    CHECKSUM_NULL(0, 0),
    CHECKSUM_CRC32(1, 1),
    CHECKSUM_CRC32C(2, 2),
    ;
    
    public static final int CHECKSUM_NULL_VALUE = 0;
    public static final int CHECKSUM_CRC32_VALUE = 1;
    public static final int CHECKSUM_CRC32C_VALUE = 2;
    
    
    public final int getNumber() { return value; }
    
    public static ChecksumTypeProto valueOf(int value) {
      switch (value) {
        case 0: return CHECKSUM_NULL;
        case 1: return CHECKSUM_CRC32;
        case 2: return CHECKSUM_CRC32C;
        default: return null;
      }
    }
    
    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public ChecksumTypeProto findValueByNumber(int number) {
              return ChecksumTypeProto.valueOf(number);
            }
          };
    
    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor().getEnumTypes().get(0);
    }
    
    private static final ChecksumTypeProto[] VALUES = {
      CHECKSUM_NULL, CHECKSUM_CRC32, CHECKSUM_CRC32C, 
    };
    
    public static ChecksumTypeProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }
    
    private final int index;
    private final int value;
    
    private ChecksumTypeProto(int index, int value) {
      this.index = index;
      this.value = value;
    }
    
    // @@protoc_insertion_point(enum_scope:ChecksumTypeProto)
  }
  
  public enum ReplicaStateProto
      implements com.google.protobuf.ProtocolMessageEnum {
    FINALIZED(0, 0),
    RBW(1, 1),
    RWR(2, 2),
    RUR(3, 3),
    TEMPORARY(4, 4),
    ;
    
    public static final int FINALIZED_VALUE = 0;
    public static final int RBW_VALUE = 1;
    public static final int RWR_VALUE = 2;
    public static final int RUR_VALUE = 3;
    public static final int TEMPORARY_VALUE = 4;
    
    
    public final int getNumber() { return value; }
    
    public static ReplicaStateProto valueOf(int value) {
      switch (value) {
        case 0: return FINALIZED;
        case 1: return RBW;
        case 2: return RWR;
        case 3: return RUR;
        case 4: return TEMPORARY;
        default: return null;
      }
    }
    
    public static com.google.protobuf.Internal.EnumLiteMap
        internalGetValueMap() {
      return internalValueMap;
    }
    private static com.google.protobuf.Internal.EnumLiteMap
        internalValueMap =
          new com.google.protobuf.Internal.EnumLiteMap() {
            public ReplicaStateProto findValueByNumber(int number) {
              return ReplicaStateProto.valueOf(number);
            }
          };
    
    public final com.google.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(index);
    }
    public final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final com.google.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.getDescriptor().getEnumTypes().get(1);
    }
    
    private static final ReplicaStateProto[] VALUES = {
      FINALIZED, RBW, RWR, RUR, TEMPORARY, 
    };
    
    public static ReplicaStateProto valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }
    
    private final int index;
    private final int value;
    
    private ReplicaStateProto(int index, int value) {
      this.index = index;
      this.value = value;
    }
    
    // @@protoc_insertion_point(enum_scope:ReplicaStateProto)
  }
  
  public interface ExtendedBlockProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string poolId = 1;
    boolean hasPoolId();
    String getPoolId();
    
    // required uint64 blockId = 2;
    boolean hasBlockId();
    long getBlockId();
    
    // required uint64 generationStamp = 3;
    boolean hasGenerationStamp();
    long getGenerationStamp();
    
    // optional uint64 numBytes = 4 [default = 0];
    boolean hasNumBytes();
    long getNumBytes();
  }
  public static final class ExtendedBlockProto extends
      com.google.protobuf.GeneratedMessage
      implements ExtendedBlockProtoOrBuilder {
    // Use ExtendedBlockProto.newBuilder() to construct.
    private ExtendedBlockProto(Builder builder) {
      super(builder);
    }
    private ExtendedBlockProto(boolean noInit) {}
    
    private static final ExtendedBlockProto defaultInstance;
    public static ExtendedBlockProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public ExtendedBlockProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required string poolId = 1;
    public static final int POOLID_FIELD_NUMBER = 1;
    private java.lang.Object poolId_;
    public boolean hasPoolId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public String getPoolId() {
      java.lang.Object ref = poolId_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          poolId_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getPoolIdBytes() {
      java.lang.Object ref = poolId_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        poolId_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required uint64 blockId = 2;
    public static final int BLOCKID_FIELD_NUMBER = 2;
    private long blockId_;
    public boolean hasBlockId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getBlockId() {
      return blockId_;
    }
    
    // required uint64 generationStamp = 3;
    public static final int GENERATIONSTAMP_FIELD_NUMBER = 3;
    private long generationStamp_;
    public boolean hasGenerationStamp() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getGenerationStamp() {
      return generationStamp_;
    }
    
    // optional uint64 numBytes = 4 [default = 0];
    public static final int NUMBYTES_FIELD_NUMBER = 4;
    private long numBytes_;
    public boolean hasNumBytes() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public long getNumBytes() {
      return numBytes_;
    }
    
    private void initFields() {
      poolId_ = "";
      blockId_ = 0L;
      generationStamp_ = 0L;
      numBytes_ = 0L;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasPoolId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBlockId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasGenerationStamp()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getPoolIdBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, blockId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, generationStamp_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeUInt64(4, numBytes_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getPoolIdBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, blockId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, generationStamp_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(4, numBytes_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto) obj;
      
      boolean result = true;
      result = result && (hasPoolId() == other.hasPoolId());
      if (hasPoolId()) {
        result = result && getPoolId()
            .equals(other.getPoolId());
      }
      result = result && (hasBlockId() == other.hasBlockId());
      if (hasBlockId()) {
        result = result && (getBlockId()
            == other.getBlockId());
      }
      result = result && (hasGenerationStamp() == other.hasGenerationStamp());
      if (hasGenerationStamp()) {
        result = result && (getGenerationStamp()
            == other.getGenerationStamp());
      }
      result = result && (hasNumBytes() == other.hasNumBytes());
      if (hasNumBytes()) {
        result = result && (getNumBytes()
            == other.getNumBytes());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasPoolId()) {
        hash = (37 * hash) + POOLID_FIELD_NUMBER;
        hash = (53 * hash) + getPoolId().hashCode();
      }
      if (hasBlockId()) {
        hash = (37 * hash) + BLOCKID_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getBlockId());
      }
      if (hasGenerationStamp()) {
        hash = (37 * hash) + GENERATIONSTAMP_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getGenerationStamp());
      }
      if (hasNumBytes()) {
        hash = (37 * hash) + NUMBYTES_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getNumBytes());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        poolId_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        blockId_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        generationStamp_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        numBytes_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.poolId_ = poolId_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.blockId_ = blockId_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.generationStamp_ = generationStamp_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.numBytes_ = numBytes_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance()) return this;
        if (other.hasPoolId()) {
          setPoolId(other.getPoolId());
        }
        if (other.hasBlockId()) {
          setBlockId(other.getBlockId());
        }
        if (other.hasGenerationStamp()) {
          setGenerationStamp(other.getGenerationStamp());
        }
        if (other.hasNumBytes()) {
          setNumBytes(other.getNumBytes());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasPoolId()) {
          
          return false;
        }
        if (!hasBlockId()) {
          
          return false;
        }
        if (!hasGenerationStamp()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              poolId_ = input.readBytes();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              blockId_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              generationStamp_ = input.readUInt64();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              numBytes_ = input.readUInt64();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required string poolId = 1;
      private java.lang.Object poolId_ = "";
      public boolean hasPoolId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public String getPoolId() {
        java.lang.Object ref = poolId_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          poolId_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setPoolId(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        poolId_ = value;
        onChanged();
        return this;
      }
      public Builder clearPoolId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        poolId_ = getDefaultInstance().getPoolId();
        onChanged();
        return this;
      }
      void setPoolId(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000001;
        poolId_ = value;
        onChanged();
      }
      
      // required uint64 blockId = 2;
      private long blockId_ ;
      public boolean hasBlockId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getBlockId() {
        return blockId_;
      }
      public Builder setBlockId(long value) {
        bitField0_ |= 0x00000002;
        blockId_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        blockId_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 generationStamp = 3;
      private long generationStamp_ ;
      public boolean hasGenerationStamp() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getGenerationStamp() {
        return generationStamp_;
      }
      public Builder setGenerationStamp(long value) {
        bitField0_ |= 0x00000004;
        generationStamp_ = value;
        onChanged();
        return this;
      }
      public Builder clearGenerationStamp() {
        bitField0_ = (bitField0_ & ~0x00000004);
        generationStamp_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint64 numBytes = 4 [default = 0];
      private long numBytes_ ;
      public boolean hasNumBytes() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public long getNumBytes() {
        return numBytes_;
      }
      public Builder setNumBytes(long value) {
        bitField0_ |= 0x00000008;
        numBytes_ = value;
        onChanged();
        return this;
      }
      public Builder clearNumBytes() {
        bitField0_ = (bitField0_ & ~0x00000008);
        numBytes_ = 0L;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:ExtendedBlockProto)
    }
    
    static {
      defaultInstance = new ExtendedBlockProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:ExtendedBlockProto)
  }
  
  public interface BlockTokenIdentifierProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required bytes identifier = 1;
    boolean hasIdentifier();
    com.google.protobuf.ByteString getIdentifier();
    
    // required bytes password = 2;
    boolean hasPassword();
    com.google.protobuf.ByteString getPassword();
    
    // required string kind = 3;
    boolean hasKind();
    String getKind();
    
    // required string service = 4;
    boolean hasService();
    String getService();
  }
  public static final class BlockTokenIdentifierProto extends
      com.google.protobuf.GeneratedMessage
      implements BlockTokenIdentifierProtoOrBuilder {
    // Use BlockTokenIdentifierProto.newBuilder() to construct.
    private BlockTokenIdentifierProto(Builder builder) {
      super(builder);
    }
    private BlockTokenIdentifierProto(boolean noInit) {}
    
    private static final BlockTokenIdentifierProto defaultInstance;
    public static BlockTokenIdentifierProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public BlockTokenIdentifierProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required bytes identifier = 1;
    public static final int IDENTIFIER_FIELD_NUMBER = 1;
    private com.google.protobuf.ByteString identifier_;
    public boolean hasIdentifier() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public com.google.protobuf.ByteString getIdentifier() {
      return identifier_;
    }
    
    // required bytes password = 2;
    public static final int PASSWORD_FIELD_NUMBER = 2;
    private com.google.protobuf.ByteString password_;
    public boolean hasPassword() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public com.google.protobuf.ByteString getPassword() {
      return password_;
    }
    
    // required string kind = 3;
    public static final int KIND_FIELD_NUMBER = 3;
    private java.lang.Object kind_;
    public boolean hasKind() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public String getKind() {
      java.lang.Object ref = kind_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          kind_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getKindBytes() {
      java.lang.Object ref = kind_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        kind_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required string service = 4;
    public static final int SERVICE_FIELD_NUMBER = 4;
    private java.lang.Object service_;
    public boolean hasService() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public String getService() {
      java.lang.Object ref = service_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          service_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getServiceBytes() {
      java.lang.Object ref = service_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        service_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    private void initFields() {
      identifier_ = com.google.protobuf.ByteString.EMPTY;
      password_ = com.google.protobuf.ByteString.EMPTY;
      kind_ = "";
      service_ = "";
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasIdentifier()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasPassword()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasKind()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasService()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, identifier_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, password_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, getKindBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeBytes(4, getServiceBytes());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, identifier_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, password_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, getKindBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(4, getServiceBytes());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto) obj;
      
      boolean result = true;
      result = result && (hasIdentifier() == other.hasIdentifier());
      if (hasIdentifier()) {
        result = result && getIdentifier()
            .equals(other.getIdentifier());
      }
      result = result && (hasPassword() == other.hasPassword());
      if (hasPassword()) {
        result = result && getPassword()
            .equals(other.getPassword());
      }
      result = result && (hasKind() == other.hasKind());
      if (hasKind()) {
        result = result && getKind()
            .equals(other.getKind());
      }
      result = result && (hasService() == other.hasService());
      if (hasService()) {
        result = result && getService()
            .equals(other.getService());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasIdentifier()) {
        hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER;
        hash = (53 * hash) + getIdentifier().hashCode();
      }
      if (hasPassword()) {
        hash = (37 * hash) + PASSWORD_FIELD_NUMBER;
        hash = (53 * hash) + getPassword().hashCode();
      }
      if (hasKind()) {
        hash = (37 * hash) + KIND_FIELD_NUMBER;
        hash = (53 * hash) + getKind().hashCode();
      }
      if (hasService()) {
        hash = (37 * hash) + SERVICE_FIELD_NUMBER;
        hash = (53 * hash) + getService().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        identifier_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000001);
        password_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000002);
        kind_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        service_ = "";
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.identifier_ = identifier_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.password_ = password_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.kind_ = kind_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.service_ = service_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance()) return this;
        if (other.hasIdentifier()) {
          setIdentifier(other.getIdentifier());
        }
        if (other.hasPassword()) {
          setPassword(other.getPassword());
        }
        if (other.hasKind()) {
          setKind(other.getKind());
        }
        if (other.hasService()) {
          setService(other.getService());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasIdentifier()) {
          
          return false;
        }
        if (!hasPassword()) {
          
          return false;
        }
        if (!hasKind()) {
          
          return false;
        }
        if (!hasService()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              identifier_ = input.readBytes();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              password_ = input.readBytes();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              kind_ = input.readBytes();
              break;
            }
            case 34: {
              bitField0_ |= 0x00000008;
              service_ = input.readBytes();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required bytes identifier = 1;
      private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasIdentifier() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public com.google.protobuf.ByteString getIdentifier() {
        return identifier_;
      }
      public Builder setIdentifier(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        identifier_ = value;
        onChanged();
        return this;
      }
      public Builder clearIdentifier() {
        bitField0_ = (bitField0_ & ~0x00000001);
        identifier_ = getDefaultInstance().getIdentifier();
        onChanged();
        return this;
      }
      
      // required bytes password = 2;
      private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasPassword() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public com.google.protobuf.ByteString getPassword() {
        return password_;
      }
      public Builder setPassword(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        password_ = value;
        onChanged();
        return this;
      }
      public Builder clearPassword() {
        bitField0_ = (bitField0_ & ~0x00000002);
        password_ = getDefaultInstance().getPassword();
        onChanged();
        return this;
      }
      
      // required string kind = 3;
      private java.lang.Object kind_ = "";
      public boolean hasKind() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public String getKind() {
        java.lang.Object ref = kind_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          kind_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setKind(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        kind_ = value;
        onChanged();
        return this;
      }
      public Builder clearKind() {
        bitField0_ = (bitField0_ & ~0x00000004);
        kind_ = getDefaultInstance().getKind();
        onChanged();
        return this;
      }
      void setKind(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000004;
        kind_ = value;
        onChanged();
      }
      
      // required string service = 4;
      private java.lang.Object service_ = "";
      public boolean hasService() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public String getService() {
        java.lang.Object ref = service_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          service_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setService(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        service_ = value;
        onChanged();
        return this;
      }
      public Builder clearService() {
        bitField0_ = (bitField0_ & ~0x00000008);
        service_ = getDefaultInstance().getService();
        onChanged();
        return this;
      }
      void setService(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000008;
        service_ = value;
        onChanged();
      }
      
      // @@protoc_insertion_point(builder_scope:BlockTokenIdentifierProto)
    }
    
    static {
      defaultInstance = new BlockTokenIdentifierProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:BlockTokenIdentifierProto)
  }
  
  public interface DatanodeIDProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string ipAddr = 1;
    boolean hasIpAddr();
    String getIpAddr();
    
    // required string hostName = 2;
    boolean hasHostName();
    String getHostName();
    
    // required string storageID = 3;
    boolean hasStorageID();
    String getStorageID();
    
    // required uint32 xferPort = 4;
    boolean hasXferPort();
    int getXferPort();
    
    // required uint32 infoPort = 5;
    boolean hasInfoPort();
    int getInfoPort();
    
    // required uint32 ipcPort = 6;
    boolean hasIpcPort();
    int getIpcPort();
  }
  public static final class DatanodeIDProto extends
      com.google.protobuf.GeneratedMessage
      implements DatanodeIDProtoOrBuilder {
    // Use DatanodeIDProto.newBuilder() to construct.
    private DatanodeIDProto(Builder builder) {
      super(builder);
    }
    private DatanodeIDProto(boolean noInit) {}
    
    private static final DatanodeIDProto defaultInstance;
    public static DatanodeIDProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public DatanodeIDProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required string ipAddr = 1;
    public static final int IPADDR_FIELD_NUMBER = 1;
    private java.lang.Object ipAddr_;
    public boolean hasIpAddr() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public String getIpAddr() {
      java.lang.Object ref = ipAddr_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          ipAddr_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getIpAddrBytes() {
      java.lang.Object ref = ipAddr_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        ipAddr_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required string hostName = 2;
    public static final int HOSTNAME_FIELD_NUMBER = 2;
    private java.lang.Object hostName_;
    public boolean hasHostName() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public String getHostName() {
      java.lang.Object ref = hostName_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          hostName_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getHostNameBytes() {
      java.lang.Object ref = hostName_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        hostName_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required string storageID = 3;
    public static final int STORAGEID_FIELD_NUMBER = 3;
    private java.lang.Object storageID_;
    public boolean hasStorageID() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public String getStorageID() {
      java.lang.Object ref = storageID_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          storageID_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getStorageIDBytes() {
      java.lang.Object ref = storageID_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        storageID_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required uint32 xferPort = 4;
    public static final int XFERPORT_FIELD_NUMBER = 4;
    private int xferPort_;
    public boolean hasXferPort() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public int getXferPort() {
      return xferPort_;
    }
    
    // required uint32 infoPort = 5;
    public static final int INFOPORT_FIELD_NUMBER = 5;
    private int infoPort_;
    public boolean hasInfoPort() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public int getInfoPort() {
      return infoPort_;
    }
    
    // required uint32 ipcPort = 6;
    public static final int IPCPORT_FIELD_NUMBER = 6;
    private int ipcPort_;
    public boolean hasIpcPort() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    public int getIpcPort() {
      return ipcPort_;
    }
    
    private void initFields() {
      ipAddr_ = "";
      hostName_ = "";
      storageID_ = "";
      xferPort_ = 0;
      infoPort_ = 0;
      ipcPort_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasIpAddr()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasHostName()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasStorageID()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasXferPort()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasInfoPort()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasIpcPort()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getIpAddrBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, getHostNameBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, getStorageIDBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeUInt32(4, xferPort_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeUInt32(5, infoPort_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeUInt32(6, ipcPort_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getIpAddrBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, getHostNameBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, getStorageIDBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(4, xferPort_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(5, infoPort_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(6, ipcPort_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto) obj;
      
      boolean result = true;
      result = result && (hasIpAddr() == other.hasIpAddr());
      if (hasIpAddr()) {
        result = result && getIpAddr()
            .equals(other.getIpAddr());
      }
      result = result && (hasHostName() == other.hasHostName());
      if (hasHostName()) {
        result = result && getHostName()
            .equals(other.getHostName());
      }
      result = result && (hasStorageID() == other.hasStorageID());
      if (hasStorageID()) {
        result = result && getStorageID()
            .equals(other.getStorageID());
      }
      result = result && (hasXferPort() == other.hasXferPort());
      if (hasXferPort()) {
        result = result && (getXferPort()
            == other.getXferPort());
      }
      result = result && (hasInfoPort() == other.hasInfoPort());
      if (hasInfoPort()) {
        result = result && (getInfoPort()
            == other.getInfoPort());
      }
      result = result && (hasIpcPort() == other.hasIpcPort());
      if (hasIpcPort()) {
        result = result && (getIpcPort()
            == other.getIpcPort());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasIpAddr()) {
        hash = (37 * hash) + IPADDR_FIELD_NUMBER;
        hash = (53 * hash) + getIpAddr().hashCode();
      }
      if (hasHostName()) {
        hash = (37 * hash) + HOSTNAME_FIELD_NUMBER;
        hash = (53 * hash) + getHostName().hashCode();
      }
      if (hasStorageID()) {
        hash = (37 * hash) + STORAGEID_FIELD_NUMBER;
        hash = (53 * hash) + getStorageID().hashCode();
      }
      if (hasXferPort()) {
        hash = (37 * hash) + XFERPORT_FIELD_NUMBER;
        hash = (53 * hash) + getXferPort();
      }
      if (hasInfoPort()) {
        hash = (37 * hash) + INFOPORT_FIELD_NUMBER;
        hash = (53 * hash) + getInfoPort();
      }
      if (hasIpcPort()) {
        hash = (37 * hash) + IPCPORT_FIELD_NUMBER;
        hash = (53 * hash) + getIpcPort();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        ipAddr_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        hostName_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        storageID_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        xferPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        infoPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000010);
        ipcPort_ = 0;
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.ipAddr_ = ipAddr_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.hostName_ = hostName_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.storageID_ = storageID_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.xferPort_ = xferPort_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.infoPort_ = infoPort_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.ipcPort_ = ipcPort_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance()) return this;
        if (other.hasIpAddr()) {
          setIpAddr(other.getIpAddr());
        }
        if (other.hasHostName()) {
          setHostName(other.getHostName());
        }
        if (other.hasStorageID()) {
          setStorageID(other.getStorageID());
        }
        if (other.hasXferPort()) {
          setXferPort(other.getXferPort());
        }
        if (other.hasInfoPort()) {
          setInfoPort(other.getInfoPort());
        }
        if (other.hasIpcPort()) {
          setIpcPort(other.getIpcPort());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasIpAddr()) {
          
          return false;
        }
        if (!hasHostName()) {
          
          return false;
        }
        if (!hasStorageID()) {
          
          return false;
        }
        if (!hasXferPort()) {
          
          return false;
        }
        if (!hasInfoPort()) {
          
          return false;
        }
        if (!hasIpcPort()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              ipAddr_ = input.readBytes();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              hostName_ = input.readBytes();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              storageID_ = input.readBytes();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              xferPort_ = input.readUInt32();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              infoPort_ = input.readUInt32();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              ipcPort_ = input.readUInt32();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required string ipAddr = 1;
      private java.lang.Object ipAddr_ = "";
      public boolean hasIpAddr() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public String getIpAddr() {
        java.lang.Object ref = ipAddr_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          ipAddr_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setIpAddr(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        ipAddr_ = value;
        onChanged();
        return this;
      }
      public Builder clearIpAddr() {
        bitField0_ = (bitField0_ & ~0x00000001);
        ipAddr_ = getDefaultInstance().getIpAddr();
        onChanged();
        return this;
      }
      void setIpAddr(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000001;
        ipAddr_ = value;
        onChanged();
      }
      
      // required string hostName = 2;
      private java.lang.Object hostName_ = "";
      public boolean hasHostName() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public String getHostName() {
        java.lang.Object ref = hostName_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          hostName_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setHostName(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        hostName_ = value;
        onChanged();
        return this;
      }
      public Builder clearHostName() {
        bitField0_ = (bitField0_ & ~0x00000002);
        hostName_ = getDefaultInstance().getHostName();
        onChanged();
        return this;
      }
      void setHostName(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000002;
        hostName_ = value;
        onChanged();
      }
      
      // required string storageID = 3;
      private java.lang.Object storageID_ = "";
      public boolean hasStorageID() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public String getStorageID() {
        java.lang.Object ref = storageID_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          storageID_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setStorageID(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        storageID_ = value;
        onChanged();
        return this;
      }
      public Builder clearStorageID() {
        bitField0_ = (bitField0_ & ~0x00000004);
        storageID_ = getDefaultInstance().getStorageID();
        onChanged();
        return this;
      }
      void setStorageID(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000004;
        storageID_ = value;
        onChanged();
      }
      
      // required uint32 xferPort = 4;
      private int xferPort_ ;
      public boolean hasXferPort() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public int getXferPort() {
        return xferPort_;
      }
      public Builder setXferPort(int value) {
        bitField0_ |= 0x00000008;
        xferPort_ = value;
        onChanged();
        return this;
      }
      public Builder clearXferPort() {
        bitField0_ = (bitField0_ & ~0x00000008);
        xferPort_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 infoPort = 5;
      private int infoPort_ ;
      public boolean hasInfoPort() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public int getInfoPort() {
        return infoPort_;
      }
      public Builder setInfoPort(int value) {
        bitField0_ |= 0x00000010;
        infoPort_ = value;
        onChanged();
        return this;
      }
      public Builder clearInfoPort() {
        bitField0_ = (bitField0_ & ~0x00000010);
        infoPort_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 ipcPort = 6;
      private int ipcPort_ ;
      public boolean hasIpcPort() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      public int getIpcPort() {
        return ipcPort_;
      }
      public Builder setIpcPort(int value) {
        bitField0_ |= 0x00000020;
        ipcPort_ = value;
        onChanged();
        return this;
      }
      public Builder clearIpcPort() {
        bitField0_ = (bitField0_ & ~0x00000020);
        ipcPort_ = 0;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:DatanodeIDProto)
    }
    
    static {
      defaultInstance = new DatanodeIDProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:DatanodeIDProto)
  }
  
  public interface DatanodeInfosProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated .DatanodeInfoProto datanodes = 1;
    java.util.List 
        getDatanodesList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getDatanodes(int index);
    int getDatanodesCount();
    java.util.List 
        getDatanodesOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder getDatanodesOrBuilder(
        int index);
  }
  public static final class DatanodeInfosProto extends
      com.google.protobuf.GeneratedMessage
      implements DatanodeInfosProtoOrBuilder {
    // Use DatanodeInfosProto.newBuilder() to construct.
    private DatanodeInfosProto(Builder builder) {
      super(builder);
    }
    private DatanodeInfosProto(boolean noInit) {}
    
    private static final DatanodeInfosProto defaultInstance;
    public static DatanodeInfosProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public DatanodeInfosProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfosProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfosProto_fieldAccessorTable;
    }
    
    // repeated .DatanodeInfoProto datanodes = 1;
    public static final int DATANODES_FIELD_NUMBER = 1;
    private java.util.List datanodes_;
    public java.util.List getDatanodesList() {
      return datanodes_;
    }
    public java.util.List 
        getDatanodesOrBuilderList() {
      return datanodes_;
    }
    public int getDatanodesCount() {
      return datanodes_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getDatanodes(int index) {
      return datanodes_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder getDatanodesOrBuilder(
        int index) {
      return datanodes_.get(index);
    }
    
    private void initFields() {
      datanodes_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      for (int i = 0; i < getDatanodesCount(); i++) {
        if (!getDatanodes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < datanodes_.size(); i++) {
        output.writeMessage(1, datanodes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      for (int i = 0; i < datanodes_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, datanodes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto) obj;
      
      boolean result = true;
      result = result && getDatanodesList()
          .equals(other.getDatanodesList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getDatanodesCount() > 0) {
        hash = (37 * hash) + DATANODES_FIELD_NUMBER;
        hash = (53 * hash) + getDatanodesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfosProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfosProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getDatanodesFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (datanodesBuilder_ == null) {
          datanodes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          datanodesBuilder_.clear();
        }
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto(this);
        int from_bitField0_ = bitField0_;
        if (datanodesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            datanodes_ = java.util.Collections.unmodifiableList(datanodes_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.datanodes_ = datanodes_;
        } else {
          result.datanodes_ = datanodesBuilder_.build();
        }
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.getDefaultInstance()) return this;
        if (datanodesBuilder_ == null) {
          if (!other.datanodes_.isEmpty()) {
            if (datanodes_.isEmpty()) {
              datanodes_ = other.datanodes_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureDatanodesIsMutable();
              datanodes_.addAll(other.datanodes_);
            }
            onChanged();
          }
        } else {
          if (!other.datanodes_.isEmpty()) {
            if (datanodesBuilder_.isEmpty()) {
              datanodesBuilder_.dispose();
              datanodesBuilder_ = null;
              datanodes_ = other.datanodes_;
              bitField0_ = (bitField0_ & ~0x00000001);
              datanodesBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getDatanodesFieldBuilder() : null;
            } else {
              datanodesBuilder_.addAllMessages(other.datanodes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        for (int i = 0; i < getDatanodesCount(); i++) {
          if (!getDatanodes(i).isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addDatanodes(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // repeated .DatanodeInfoProto datanodes = 1;
      private java.util.List datanodes_ =
        java.util.Collections.emptyList();
      private void ensureDatanodesIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          datanodes_ = new java.util.ArrayList(datanodes_);
          bitField0_ |= 0x00000001;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder> datanodesBuilder_;
      
      public java.util.List getDatanodesList() {
        if (datanodesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(datanodes_);
        } else {
          return datanodesBuilder_.getMessageList();
        }
      }
      public int getDatanodesCount() {
        if (datanodesBuilder_ == null) {
          return datanodes_.size();
        } else {
          return datanodesBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getDatanodes(int index) {
        if (datanodesBuilder_ == null) {
          return datanodes_.get(index);
        } else {
          return datanodesBuilder_.getMessage(index);
        }
      }
      public Builder setDatanodes(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto value) {
        if (datanodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureDatanodesIsMutable();
          datanodes_.set(index, value);
          onChanged();
        } else {
          datanodesBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setDatanodes(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder builderForValue) {
        if (datanodesBuilder_ == null) {
          ensureDatanodesIsMutable();
          datanodes_.set(index, builderForValue.build());
          onChanged();
        } else {
          datanodesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addDatanodes(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto value) {
        if (datanodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureDatanodesIsMutable();
          datanodes_.add(value);
          onChanged();
        } else {
          datanodesBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addDatanodes(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto value) {
        if (datanodesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureDatanodesIsMutable();
          datanodes_.add(index, value);
          onChanged();
        } else {
          datanodesBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addDatanodes(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder builderForValue) {
        if (datanodesBuilder_ == null) {
          ensureDatanodesIsMutable();
          datanodes_.add(builderForValue.build());
          onChanged();
        } else {
          datanodesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addDatanodes(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder builderForValue) {
        if (datanodesBuilder_ == null) {
          ensureDatanodesIsMutable();
          datanodes_.add(index, builderForValue.build());
          onChanged();
        } else {
          datanodesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllDatanodes(
          java.lang.Iterable values) {
        if (datanodesBuilder_ == null) {
          ensureDatanodesIsMutable();
          super.addAll(values, datanodes_);
          onChanged();
        } else {
          datanodesBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearDatanodes() {
        if (datanodesBuilder_ == null) {
          datanodes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          datanodesBuilder_.clear();
        }
        return this;
      }
      public Builder removeDatanodes(int index) {
        if (datanodesBuilder_ == null) {
          ensureDatanodesIsMutable();
          datanodes_.remove(index);
          onChanged();
        } else {
          datanodesBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder getDatanodesBuilder(
          int index) {
        return getDatanodesFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder getDatanodesOrBuilder(
          int index) {
        if (datanodesBuilder_ == null) {
          return datanodes_.get(index);  } else {
          return datanodesBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getDatanodesOrBuilderList() {
        if (datanodesBuilder_ != null) {
          return datanodesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(datanodes_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder addDatanodesBuilder() {
        return getDatanodesFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder addDatanodesBuilder(
          int index) {
        return getDatanodesFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance());
      }
      public java.util.List 
           getDatanodesBuilderList() {
        return getDatanodesFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder> 
          getDatanodesFieldBuilder() {
        if (datanodesBuilder_ == null) {
          datanodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder>(
                  datanodes_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          datanodes_ = null;
        }
        return datanodesBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:DatanodeInfosProto)
    }
    
    static {
      defaultInstance = new DatanodeInfosProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:DatanodeInfosProto)
  }
  
  public interface DatanodeInfoProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .DatanodeIDProto id = 1;
    boolean hasId();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder();
    
    // optional uint64 capacity = 2 [default = 0];
    boolean hasCapacity();
    long getCapacity();
    
    // optional uint64 dfsUsed = 3 [default = 0];
    boolean hasDfsUsed();
    long getDfsUsed();
    
    // optional uint64 remaining = 4 [default = 0];
    boolean hasRemaining();
    long getRemaining();
    
    // optional uint64 blockPoolUsed = 5 [default = 0];
    boolean hasBlockPoolUsed();
    long getBlockPoolUsed();
    
    // optional uint64 lastUpdate = 6 [default = 0];
    boolean hasLastUpdate();
    long getLastUpdate();
    
    // optional uint32 xceiverCount = 7 [default = 0];
    boolean hasXceiverCount();
    int getXceiverCount();
    
    // optional string location = 8;
    boolean hasLocation();
    String getLocation();
    
    // optional .DatanodeInfoProto.AdminState adminState = 10 [default = NORMAL];
    boolean hasAdminState();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState();
  }
  public static final class DatanodeInfoProto extends
      com.google.protobuf.GeneratedMessage
      implements DatanodeInfoProtoOrBuilder {
    // Use DatanodeInfoProto.newBuilder() to construct.
    private DatanodeInfoProto(Builder builder) {
      super(builder);
    }
    private DatanodeInfoProto(boolean noInit) {}
    
    private static final DatanodeInfoProto defaultInstance;
    public static DatanodeInfoProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public DatanodeInfoProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_fieldAccessorTable;
    }
    
    public enum AdminState
        implements com.google.protobuf.ProtocolMessageEnum {
      NORMAL(0, 0),
      DECOMMISSION_INPROGRESS(1, 1),
      DECOMMISSIONED(2, 2),
      ;
      
      public static final int NORMAL_VALUE = 0;
      public static final int DECOMMISSION_INPROGRESS_VALUE = 1;
      public static final int DECOMMISSIONED_VALUE = 2;
      
      
      public final int getNumber() { return value; }
      
      public static AdminState valueOf(int value) {
        switch (value) {
          case 0: return NORMAL;
          case 1: return DECOMMISSION_INPROGRESS;
          case 2: return DECOMMISSIONED;
          default: return null;
        }
      }
      
      public static com.google.protobuf.Internal.EnumLiteMap
          internalGetValueMap() {
        return internalValueMap;
      }
      private static com.google.protobuf.Internal.EnumLiteMap
          internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap() {
              public AdminState findValueByNumber(int number) {
                return AdminState.valueOf(number);
              }
            };
      
      public final com.google.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(index);
      }
      public final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDescriptor().getEnumTypes().get(0);
      }
      
      private static final AdminState[] VALUES = {
        NORMAL, DECOMMISSION_INPROGRESS, DECOMMISSIONED, 
      };
      
      public static AdminState valueOf(
          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }
      
      private final int index;
      private final int value;
      
      private AdminState(int index, int value) {
        this.index = index;
        this.value = value;
      }
      
      // @@protoc_insertion_point(enum_scope:DatanodeInfoProto.AdminState)
    }
    
    private int bitField0_;
    // required .DatanodeIDProto id = 1;
    public static final int ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto id_;
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId() {
      return id_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder() {
      return id_;
    }
    
    // optional uint64 capacity = 2 [default = 0];
    public static final int CAPACITY_FIELD_NUMBER = 2;
    private long capacity_;
    public boolean hasCapacity() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getCapacity() {
      return capacity_;
    }
    
    // optional uint64 dfsUsed = 3 [default = 0];
    public static final int DFSUSED_FIELD_NUMBER = 3;
    private long dfsUsed_;
    public boolean hasDfsUsed() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getDfsUsed() {
      return dfsUsed_;
    }
    
    // optional uint64 remaining = 4 [default = 0];
    public static final int REMAINING_FIELD_NUMBER = 4;
    private long remaining_;
    public boolean hasRemaining() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public long getRemaining() {
      return remaining_;
    }
    
    // optional uint64 blockPoolUsed = 5 [default = 0];
    public static final int BLOCKPOOLUSED_FIELD_NUMBER = 5;
    private long blockPoolUsed_;
    public boolean hasBlockPoolUsed() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public long getBlockPoolUsed() {
      return blockPoolUsed_;
    }
    
    // optional uint64 lastUpdate = 6 [default = 0];
    public static final int LASTUPDATE_FIELD_NUMBER = 6;
    private long lastUpdate_;
    public boolean hasLastUpdate() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    public long getLastUpdate() {
      return lastUpdate_;
    }
    
    // optional uint32 xceiverCount = 7 [default = 0];
    public static final int XCEIVERCOUNT_FIELD_NUMBER = 7;
    private int xceiverCount_;
    public boolean hasXceiverCount() {
      return ((bitField0_ & 0x00000040) == 0x00000040);
    }
    public int getXceiverCount() {
      return xceiverCount_;
    }
    
    // optional string location = 8;
    public static final int LOCATION_FIELD_NUMBER = 8;
    private java.lang.Object location_;
    public boolean hasLocation() {
      return ((bitField0_ & 0x00000080) == 0x00000080);
    }
    public String getLocation() {
      java.lang.Object ref = location_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          location_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getLocationBytes() {
      java.lang.Object ref = location_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        location_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // optional .DatanodeInfoProto.AdminState adminState = 10 [default = NORMAL];
    public static final int ADMINSTATE_FIELD_NUMBER = 10;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState adminState_;
    public boolean hasAdminState() {
      return ((bitField0_ & 0x00000100) == 0x00000100);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState() {
      return adminState_;
    }
    
    private void initFields() {
      id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
      capacity_ = 0L;
      dfsUsed_ = 0L;
      remaining_ = 0L;
      blockPoolUsed_ = 0L;
      lastUpdate_ = 0L;
      xceiverCount_ = 0;
      location_ = "";
      adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getId().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, id_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, capacity_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, dfsUsed_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeUInt64(4, remaining_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeUInt64(5, blockPoolUsed_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeUInt64(6, lastUpdate_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        output.writeUInt32(7, xceiverCount_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        output.writeBytes(8, getLocationBytes());
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        output.writeEnum(10, adminState_.getNumber());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, id_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, capacity_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, dfsUsed_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(4, remaining_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(5, blockPoolUsed_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(6, lastUpdate_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(7, xceiverCount_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(8, getLocationBytes());
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(10, adminState_.getNumber());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto) obj;
      
      boolean result = true;
      result = result && (hasId() == other.hasId());
      if (hasId()) {
        result = result && getId()
            .equals(other.getId());
      }
      result = result && (hasCapacity() == other.hasCapacity());
      if (hasCapacity()) {
        result = result && (getCapacity()
            == other.getCapacity());
      }
      result = result && (hasDfsUsed() == other.hasDfsUsed());
      if (hasDfsUsed()) {
        result = result && (getDfsUsed()
            == other.getDfsUsed());
      }
      result = result && (hasRemaining() == other.hasRemaining());
      if (hasRemaining()) {
        result = result && (getRemaining()
            == other.getRemaining());
      }
      result = result && (hasBlockPoolUsed() == other.hasBlockPoolUsed());
      if (hasBlockPoolUsed()) {
        result = result && (getBlockPoolUsed()
            == other.getBlockPoolUsed());
      }
      result = result && (hasLastUpdate() == other.hasLastUpdate());
      if (hasLastUpdate()) {
        result = result && (getLastUpdate()
            == other.getLastUpdate());
      }
      result = result && (hasXceiverCount() == other.hasXceiverCount());
      if (hasXceiverCount()) {
        result = result && (getXceiverCount()
            == other.getXceiverCount());
      }
      result = result && (hasLocation() == other.hasLocation());
      if (hasLocation()) {
        result = result && getLocation()
            .equals(other.getLocation());
      }
      result = result && (hasAdminState() == other.hasAdminState());
      if (hasAdminState()) {
        result = result &&
            (getAdminState() == other.getAdminState());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId().hashCode();
      }
      if (hasCapacity()) {
        hash = (37 * hash) + CAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getCapacity());
      }
      if (hasDfsUsed()) {
        hash = (37 * hash) + DFSUSED_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getDfsUsed());
      }
      if (hasRemaining()) {
        hash = (37 * hash) + REMAINING_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getRemaining());
      }
      if (hasBlockPoolUsed()) {
        hash = (37 * hash) + BLOCKPOOLUSED_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getBlockPoolUsed());
      }
      if (hasLastUpdate()) {
        hash = (37 * hash) + LASTUPDATE_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getLastUpdate());
      }
      if (hasXceiverCount()) {
        hash = (37 * hash) + XCEIVERCOUNT_FIELD_NUMBER;
        hash = (53 * hash) + getXceiverCount();
      }
      if (hasLocation()) {
        hash = (37 * hash) + LOCATION_FIELD_NUMBER;
        hash = (53 * hash) + getLocation().hashCode();
      }
      if (hasAdminState()) {
        hash = (37 * hash) + ADMINSTATE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getAdminState());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getIdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (idBuilder_ == null) {
          id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
        } else {
          idBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        capacity_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        dfsUsed_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        remaining_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000008);
        blockPoolUsed_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        lastUpdate_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000020);
        xceiverCount_ = 0;
        bitField0_ = (bitField0_ & ~0x00000040);
        location_ = "";
        bitField0_ = (bitField0_ & ~0x00000080);
        adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL;
        bitField0_ = (bitField0_ & ~0x00000100);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (idBuilder_ == null) {
          result.id_ = id_;
        } else {
          result.id_ = idBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.capacity_ = capacity_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.dfsUsed_ = dfsUsed_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.remaining_ = remaining_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.blockPoolUsed_ = blockPoolUsed_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.lastUpdate_ = lastUpdate_;
        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
          to_bitField0_ |= 0x00000040;
        }
        result.xceiverCount_ = xceiverCount_;
        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
          to_bitField0_ |= 0x00000080;
        }
        result.location_ = location_;
        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
          to_bitField0_ |= 0x00000100;
        }
        result.adminState_ = adminState_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          mergeId(other.getId());
        }
        if (other.hasCapacity()) {
          setCapacity(other.getCapacity());
        }
        if (other.hasDfsUsed()) {
          setDfsUsed(other.getDfsUsed());
        }
        if (other.hasRemaining()) {
          setRemaining(other.getRemaining());
        }
        if (other.hasBlockPoolUsed()) {
          setBlockPoolUsed(other.getBlockPoolUsed());
        }
        if (other.hasLastUpdate()) {
          setLastUpdate(other.getLastUpdate());
        }
        if (other.hasXceiverCount()) {
          setXceiverCount(other.getXceiverCount());
        }
        if (other.hasLocation()) {
          setLocation(other.getLocation());
        }
        if (other.hasAdminState()) {
          setAdminState(other.getAdminState());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasId()) {
          
          return false;
        }
        if (!getId().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder();
              if (hasId()) {
                subBuilder.mergeFrom(getId());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setId(subBuilder.buildPartial());
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              capacity_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              dfsUsed_ = input.readUInt64();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              remaining_ = input.readUInt64();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              blockPoolUsed_ = input.readUInt64();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              lastUpdate_ = input.readUInt64();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              xceiverCount_ = input.readUInt32();
              break;
            }
            case 66: {
              bitField0_ |= 0x00000080;
              location_ = input.readBytes();
              break;
            }
            case 80: {
              int rawValue = input.readEnum();
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState value = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(10, rawValue);
              } else {
                bitField0_ |= 0x00000100;
                adminState_ = value;
              }
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .DatanodeIDProto id = 1;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder> idBuilder_;
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId() {
        if (idBuilder_ == null) {
          return id_;
        } else {
          return idBuilder_.getMessage();
        }
      }
      public Builder setId(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto value) {
        if (idBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          id_ = value;
          onChanged();
        } else {
          idBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setId(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder builderForValue) {
        if (idBuilder_ == null) {
          id_ = builderForValue.build();
          onChanged();
        } else {
          idBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeId(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto value) {
        if (idBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              id_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance()) {
            id_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder(id_).mergeFrom(value).buildPartial();
          } else {
            id_ = value;
          }
          onChanged();
        } else {
          idBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearId() {
        if (idBuilder_ == null) {
          id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
          onChanged();
        } else {
          idBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder getIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getIdFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder() {
        if (idBuilder_ != null) {
          return idBuilder_.getMessageOrBuilder();
        } else {
          return id_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder> 
          getIdFieldBuilder() {
        if (idBuilder_ == null) {
          idBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder>(
                  id_,
                  getParentForChildren(),
                  isClean());
          id_ = null;
        }
        return idBuilder_;
      }
      
      // optional uint64 capacity = 2 [default = 0];
      private long capacity_ ;
      public boolean hasCapacity() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getCapacity() {
        return capacity_;
      }
      public Builder setCapacity(long value) {
        bitField0_ |= 0x00000002;
        capacity_ = value;
        onChanged();
        return this;
      }
      public Builder clearCapacity() {
        bitField0_ = (bitField0_ & ~0x00000002);
        capacity_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint64 dfsUsed = 3 [default = 0];
      private long dfsUsed_ ;
      public boolean hasDfsUsed() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getDfsUsed() {
        return dfsUsed_;
      }
      public Builder setDfsUsed(long value) {
        bitField0_ |= 0x00000004;
        dfsUsed_ = value;
        onChanged();
        return this;
      }
      public Builder clearDfsUsed() {
        bitField0_ = (bitField0_ & ~0x00000004);
        dfsUsed_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint64 remaining = 4 [default = 0];
      private long remaining_ ;
      public boolean hasRemaining() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public long getRemaining() {
        return remaining_;
      }
      public Builder setRemaining(long value) {
        bitField0_ |= 0x00000008;
        remaining_ = value;
        onChanged();
        return this;
      }
      public Builder clearRemaining() {
        bitField0_ = (bitField0_ & ~0x00000008);
        remaining_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint64 blockPoolUsed = 5 [default = 0];
      private long blockPoolUsed_ ;
      public boolean hasBlockPoolUsed() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public long getBlockPoolUsed() {
        return blockPoolUsed_;
      }
      public Builder setBlockPoolUsed(long value) {
        bitField0_ |= 0x00000010;
        blockPoolUsed_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockPoolUsed() {
        bitField0_ = (bitField0_ & ~0x00000010);
        blockPoolUsed_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint64 lastUpdate = 6 [default = 0];
      private long lastUpdate_ ;
      public boolean hasLastUpdate() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      public long getLastUpdate() {
        return lastUpdate_;
      }
      public Builder setLastUpdate(long value) {
        bitField0_ |= 0x00000020;
        lastUpdate_ = value;
        onChanged();
        return this;
      }
      public Builder clearLastUpdate() {
        bitField0_ = (bitField0_ & ~0x00000020);
        lastUpdate_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint32 xceiverCount = 7 [default = 0];
      private int xceiverCount_ ;
      public boolean hasXceiverCount() {
        return ((bitField0_ & 0x00000040) == 0x00000040);
      }
      public int getXceiverCount() {
        return xceiverCount_;
      }
      public Builder setXceiverCount(int value) {
        bitField0_ |= 0x00000040;
        xceiverCount_ = value;
        onChanged();
        return this;
      }
      public Builder clearXceiverCount() {
        bitField0_ = (bitField0_ & ~0x00000040);
        xceiverCount_ = 0;
        onChanged();
        return this;
      }
      
      // optional string location = 8;
      private java.lang.Object location_ = "";
      public boolean hasLocation() {
        return ((bitField0_ & 0x00000080) == 0x00000080);
      }
      public String getLocation() {
        java.lang.Object ref = location_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          location_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setLocation(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000080;
        location_ = value;
        onChanged();
        return this;
      }
      public Builder clearLocation() {
        bitField0_ = (bitField0_ & ~0x00000080);
        location_ = getDefaultInstance().getLocation();
        onChanged();
        return this;
      }
      void setLocation(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000080;
        location_ = value;
        onChanged();
      }
      
      // optional .DatanodeInfoProto.AdminState adminState = 10 [default = NORMAL];
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL;
      public boolean hasAdminState() {
        return ((bitField0_ & 0x00000100) == 0x00000100);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState() {
        return adminState_;
      }
      public Builder setAdminState(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000100;
        adminState_ = value;
        onChanged();
        return this;
      }
      public Builder clearAdminState() {
        bitField0_ = (bitField0_ & ~0x00000100);
        adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:DatanodeInfoProto)
    }
    
    static {
      defaultInstance = new DatanodeInfoProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:DatanodeInfoProto)
  }
  
  public interface ContentSummaryProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint64 length = 1;
    boolean hasLength();
    long getLength();
    
    // required uint64 fileCount = 2;
    boolean hasFileCount();
    long getFileCount();
    
    // required uint64 directoryCount = 3;
    boolean hasDirectoryCount();
    long getDirectoryCount();
    
    // required uint64 quota = 4;
    boolean hasQuota();
    long getQuota();
    
    // required uint64 spaceConsumed = 5;
    boolean hasSpaceConsumed();
    long getSpaceConsumed();
    
    // required uint64 spaceQuota = 6;
    boolean hasSpaceQuota();
    long getSpaceQuota();
  }
  public static final class ContentSummaryProto extends
      com.google.protobuf.GeneratedMessage
      implements ContentSummaryProtoOrBuilder {
    // Use ContentSummaryProto.newBuilder() to construct.
    private ContentSummaryProto(Builder builder) {
      super(builder);
    }
    private ContentSummaryProto(boolean noInit) {}
    
    private static final ContentSummaryProto defaultInstance;
    public static ContentSummaryProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public ContentSummaryProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ContentSummaryProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ContentSummaryProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint64 length = 1;
    public static final int LENGTH_FIELD_NUMBER = 1;
    private long length_;
    public boolean hasLength() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public long getLength() {
      return length_;
    }
    
    // required uint64 fileCount = 2;
    public static final int FILECOUNT_FIELD_NUMBER = 2;
    private long fileCount_;
    public boolean hasFileCount() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getFileCount() {
      return fileCount_;
    }
    
    // required uint64 directoryCount = 3;
    public static final int DIRECTORYCOUNT_FIELD_NUMBER = 3;
    private long directoryCount_;
    public boolean hasDirectoryCount() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getDirectoryCount() {
      return directoryCount_;
    }
    
    // required uint64 quota = 4;
    public static final int QUOTA_FIELD_NUMBER = 4;
    private long quota_;
    public boolean hasQuota() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public long getQuota() {
      return quota_;
    }
    
    // required uint64 spaceConsumed = 5;
    public static final int SPACECONSUMED_FIELD_NUMBER = 5;
    private long spaceConsumed_;
    public boolean hasSpaceConsumed() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public long getSpaceConsumed() {
      return spaceConsumed_;
    }
    
    // required uint64 spaceQuota = 6;
    public static final int SPACEQUOTA_FIELD_NUMBER = 6;
    private long spaceQuota_;
    public boolean hasSpaceQuota() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    public long getSpaceQuota() {
      return spaceQuota_;
    }
    
    private void initFields() {
      length_ = 0L;
      fileCount_ = 0L;
      directoryCount_ = 0L;
      quota_ = 0L;
      spaceConsumed_ = 0L;
      spaceQuota_ = 0L;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasLength()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasFileCount()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasDirectoryCount()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasQuota()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasSpaceConsumed()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasSpaceQuota()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt64(1, length_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, fileCount_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, directoryCount_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeUInt64(4, quota_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeUInt64(5, spaceConsumed_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeUInt64(6, spaceQuota_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(1, length_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, fileCount_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, directoryCount_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(4, quota_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(5, spaceConsumed_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(6, spaceQuota_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto) obj;
      
      boolean result = true;
      result = result && (hasLength() == other.hasLength());
      if (hasLength()) {
        result = result && (getLength()
            == other.getLength());
      }
      result = result && (hasFileCount() == other.hasFileCount());
      if (hasFileCount()) {
        result = result && (getFileCount()
            == other.getFileCount());
      }
      result = result && (hasDirectoryCount() == other.hasDirectoryCount());
      if (hasDirectoryCount()) {
        result = result && (getDirectoryCount()
            == other.getDirectoryCount());
      }
      result = result && (hasQuota() == other.hasQuota());
      if (hasQuota()) {
        result = result && (getQuota()
            == other.getQuota());
      }
      result = result && (hasSpaceConsumed() == other.hasSpaceConsumed());
      if (hasSpaceConsumed()) {
        result = result && (getSpaceConsumed()
            == other.getSpaceConsumed());
      }
      result = result && (hasSpaceQuota() == other.hasSpaceQuota());
      if (hasSpaceQuota()) {
        result = result && (getSpaceQuota()
            == other.getSpaceQuota());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasLength()) {
        hash = (37 * hash) + LENGTH_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getLength());
      }
      if (hasFileCount()) {
        hash = (37 * hash) + FILECOUNT_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getFileCount());
      }
      if (hasDirectoryCount()) {
        hash = (37 * hash) + DIRECTORYCOUNT_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getDirectoryCount());
      }
      if (hasQuota()) {
        hash = (37 * hash) + QUOTA_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getQuota());
      }
      if (hasSpaceConsumed()) {
        hash = (37 * hash) + SPACECONSUMED_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getSpaceConsumed());
      }
      if (hasSpaceQuota()) {
        hash = (37 * hash) + SPACEQUOTA_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getSpaceQuota());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ContentSummaryProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ContentSummaryProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        length_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000001);
        fileCount_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        directoryCount_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        quota_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000008);
        spaceConsumed_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        spaceQuota_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.length_ = length_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.fileCount_ = fileCount_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.directoryCount_ = directoryCount_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.quota_ = quota_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.spaceConsumed_ = spaceConsumed_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.spaceQuota_ = spaceQuota_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto.getDefaultInstance()) return this;
        if (other.hasLength()) {
          setLength(other.getLength());
        }
        if (other.hasFileCount()) {
          setFileCount(other.getFileCount());
        }
        if (other.hasDirectoryCount()) {
          setDirectoryCount(other.getDirectoryCount());
        }
        if (other.hasQuota()) {
          setQuota(other.getQuota());
        }
        if (other.hasSpaceConsumed()) {
          setSpaceConsumed(other.getSpaceConsumed());
        }
        if (other.hasSpaceQuota()) {
          setSpaceQuota(other.getSpaceQuota());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasLength()) {
          
          return false;
        }
        if (!hasFileCount()) {
          
          return false;
        }
        if (!hasDirectoryCount()) {
          
          return false;
        }
        if (!hasQuota()) {
          
          return false;
        }
        if (!hasSpaceConsumed()) {
          
          return false;
        }
        if (!hasSpaceQuota()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              length_ = input.readUInt64();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              fileCount_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              directoryCount_ = input.readUInt64();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              quota_ = input.readUInt64();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              spaceConsumed_ = input.readUInt64();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              spaceQuota_ = input.readUInt64();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint64 length = 1;
      private long length_ ;
      public boolean hasLength() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public long getLength() {
        return length_;
      }
      public Builder setLength(long value) {
        bitField0_ |= 0x00000001;
        length_ = value;
        onChanged();
        return this;
      }
      public Builder clearLength() {
        bitField0_ = (bitField0_ & ~0x00000001);
        length_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 fileCount = 2;
      private long fileCount_ ;
      public boolean hasFileCount() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getFileCount() {
        return fileCount_;
      }
      public Builder setFileCount(long value) {
        bitField0_ |= 0x00000002;
        fileCount_ = value;
        onChanged();
        return this;
      }
      public Builder clearFileCount() {
        bitField0_ = (bitField0_ & ~0x00000002);
        fileCount_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 directoryCount = 3;
      private long directoryCount_ ;
      public boolean hasDirectoryCount() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getDirectoryCount() {
        return directoryCount_;
      }
      public Builder setDirectoryCount(long value) {
        bitField0_ |= 0x00000004;
        directoryCount_ = value;
        onChanged();
        return this;
      }
      public Builder clearDirectoryCount() {
        bitField0_ = (bitField0_ & ~0x00000004);
        directoryCount_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 quota = 4;
      private long quota_ ;
      public boolean hasQuota() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public long getQuota() {
        return quota_;
      }
      public Builder setQuota(long value) {
        bitField0_ |= 0x00000008;
        quota_ = value;
        onChanged();
        return this;
      }
      public Builder clearQuota() {
        bitField0_ = (bitField0_ & ~0x00000008);
        quota_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 spaceConsumed = 5;
      private long spaceConsumed_ ;
      public boolean hasSpaceConsumed() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public long getSpaceConsumed() {
        return spaceConsumed_;
      }
      public Builder setSpaceConsumed(long value) {
        bitField0_ |= 0x00000010;
        spaceConsumed_ = value;
        onChanged();
        return this;
      }
      public Builder clearSpaceConsumed() {
        bitField0_ = (bitField0_ & ~0x00000010);
        spaceConsumed_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 spaceQuota = 6;
      private long spaceQuota_ ;
      public boolean hasSpaceQuota() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      public long getSpaceQuota() {
        return spaceQuota_;
      }
      public Builder setSpaceQuota(long value) {
        bitField0_ |= 0x00000020;
        spaceQuota_ = value;
        onChanged();
        return this;
      }
      public Builder clearSpaceQuota() {
        bitField0_ = (bitField0_ & ~0x00000020);
        spaceQuota_ = 0L;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:ContentSummaryProto)
    }
    
    static {
      defaultInstance = new ContentSummaryProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:ContentSummaryProto)
  }
  
  public interface CorruptFileBlocksProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated string files = 1;
    java.util.List getFilesList();
    int getFilesCount();
    String getFiles(int index);
    
    // required string cookie = 2;
    boolean hasCookie();
    String getCookie();
  }
  public static final class CorruptFileBlocksProto extends
      com.google.protobuf.GeneratedMessage
      implements CorruptFileBlocksProtoOrBuilder {
    // Use CorruptFileBlocksProto.newBuilder() to construct.
    private CorruptFileBlocksProto(Builder builder) {
      super(builder);
    }
    private CorruptFileBlocksProto(boolean noInit) {}
    
    private static final CorruptFileBlocksProto defaultInstance;
    public static CorruptFileBlocksProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public CorruptFileBlocksProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CorruptFileBlocksProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CorruptFileBlocksProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // repeated string files = 1;
    public static final int FILES_FIELD_NUMBER = 1;
    private com.google.protobuf.LazyStringList files_;
    public java.util.List
        getFilesList() {
      return files_;
    }
    public int getFilesCount() {
      return files_.size();
    }
    public String getFiles(int index) {
      return files_.get(index);
    }
    
    // required string cookie = 2;
    public static final int COOKIE_FIELD_NUMBER = 2;
    private java.lang.Object cookie_;
    public boolean hasCookie() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public String getCookie() {
      java.lang.Object ref = cookie_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          cookie_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getCookieBytes() {
      java.lang.Object ref = cookie_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        cookie_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    private void initFields() {
      files_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      cookie_ = "";
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasCookie()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < files_.size(); i++) {
        output.writeBytes(1, files_.getByteString(i));
      }
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(2, getCookieBytes());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < files_.size(); i++) {
          dataSize += com.google.protobuf.CodedOutputStream
            .computeBytesSizeNoTag(files_.getByteString(i));
        }
        size += dataSize;
        size += 1 * getFilesList().size();
      }
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, getCookieBytes());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto) obj;
      
      boolean result = true;
      result = result && getFilesList()
          .equals(other.getFilesList());
      result = result && (hasCookie() == other.hasCookie());
      if (hasCookie()) {
        result = result && getCookie()
            .equals(other.getCookie());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getFilesCount() > 0) {
        hash = (37 * hash) + FILES_FIELD_NUMBER;
        hash = (53 * hash) + getFilesList().hashCode();
      }
      if (hasCookie()) {
        hash = (37 * hash) + COOKIE_FIELD_NUMBER;
        hash = (53 * hash) + getCookie().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CorruptFileBlocksProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CorruptFileBlocksProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        files_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000001);
        cookie_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((bitField0_ & 0x00000001) == 0x00000001)) {
          files_ = new com.google.protobuf.UnmodifiableLazyStringList(
              files_);
          bitField0_ = (bitField0_ & ~0x00000001);
        }
        result.files_ = files_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000001;
        }
        result.cookie_ = cookie_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto.getDefaultInstance()) return this;
        if (!other.files_.isEmpty()) {
          if (files_.isEmpty()) {
            files_ = other.files_;
            bitField0_ = (bitField0_ & ~0x00000001);
          } else {
            ensureFilesIsMutable();
            files_.addAll(other.files_);
          }
          onChanged();
        }
        if (other.hasCookie()) {
          setCookie(other.getCookie());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasCookie()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              ensureFilesIsMutable();
              files_.add(input.readBytes());
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              cookie_ = input.readBytes();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // repeated string files = 1;
      private com.google.protobuf.LazyStringList files_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      private void ensureFilesIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          files_ = new com.google.protobuf.LazyStringArrayList(files_);
          bitField0_ |= 0x00000001;
         }
      }
      public java.util.List
          getFilesList() {
        return java.util.Collections.unmodifiableList(files_);
      }
      public int getFilesCount() {
        return files_.size();
      }
      public String getFiles(int index) {
        return files_.get(index);
      }
      public Builder setFiles(
          int index, String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureFilesIsMutable();
        files_.set(index, value);
        onChanged();
        return this;
      }
      public Builder addFiles(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureFilesIsMutable();
        files_.add(value);
        onChanged();
        return this;
      }
      public Builder addAllFiles(
          java.lang.Iterable values) {
        ensureFilesIsMutable();
        super.addAll(values, files_);
        onChanged();
        return this;
      }
      public Builder clearFiles() {
        files_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      void addFiles(com.google.protobuf.ByteString value) {
        ensureFilesIsMutable();
        files_.add(value);
        onChanged();
      }
      
      // required string cookie = 2;
      private java.lang.Object cookie_ = "";
      public boolean hasCookie() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public String getCookie() {
        java.lang.Object ref = cookie_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          cookie_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setCookie(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        cookie_ = value;
        onChanged();
        return this;
      }
      public Builder clearCookie() {
        bitField0_ = (bitField0_ & ~0x00000002);
        cookie_ = getDefaultInstance().getCookie();
        onChanged();
        return this;
      }
      void setCookie(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000002;
        cookie_ = value;
        onChanged();
      }
      
      // @@protoc_insertion_point(builder_scope:CorruptFileBlocksProto)
    }
    
    static {
      defaultInstance = new CorruptFileBlocksProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:CorruptFileBlocksProto)
  }
  
  public interface FsPermissionProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint32 perm = 1;
    boolean hasPerm();
    int getPerm();
  }
  public static final class FsPermissionProto extends
      com.google.protobuf.GeneratedMessage
      implements FsPermissionProtoOrBuilder {
    // Use FsPermissionProto.newBuilder() to construct.
    private FsPermissionProto(Builder builder) {
      super(builder);
    }
    private FsPermissionProto(boolean noInit) {}
    
    private static final FsPermissionProto defaultInstance;
    public static FsPermissionProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public FsPermissionProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsPermissionProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsPermissionProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint32 perm = 1;
    public static final int PERM_FIELD_NUMBER = 1;
    private int perm_;
    public boolean hasPerm() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getPerm() {
      return perm_;
    }
    
    private void initFields() {
      perm_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasPerm()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(1, perm_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(1, perm_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto) obj;
      
      boolean result = true;
      result = result && (hasPerm() == other.hasPerm());
      if (hasPerm()) {
        result = result && (getPerm()
            == other.getPerm());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasPerm()) {
        hash = (37 * hash) + PERM_FIELD_NUMBER;
        hash = (53 * hash) + getPerm();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsPermissionProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsPermissionProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        perm_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.perm_ = perm_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance()) return this;
        if (other.hasPerm()) {
          setPerm(other.getPerm());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasPerm()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              perm_ = input.readUInt32();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint32 perm = 1;
      private int perm_ ;
      public boolean hasPerm() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public int getPerm() {
        return perm_;
      }
      public Builder setPerm(int value) {
        bitField0_ |= 0x00000001;
        perm_ = value;
        onChanged();
        return this;
      }
      public Builder clearPerm() {
        bitField0_ = (bitField0_ & ~0x00000001);
        perm_ = 0;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:FsPermissionProto)
    }
    
    static {
      defaultInstance = new FsPermissionProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:FsPermissionProto)
  }
  
  public interface LocatedBlockProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .ExtendedBlockProto b = 1;
    boolean hasB();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getB();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder getBOrBuilder();
    
    // required uint64 offset = 2;
    boolean hasOffset();
    long getOffset();
    
    // repeated .DatanodeInfoProto locs = 3;
    java.util.List 
        getLocsList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getLocs(int index);
    int getLocsCount();
    java.util.List 
        getLocsOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder getLocsOrBuilder(
        int index);
    
    // required bool corrupt = 4;
    boolean hasCorrupt();
    boolean getCorrupt();
    
    // required .BlockTokenIdentifierProto blockToken = 5;
    boolean hasBlockToken();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto getBlockToken();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder getBlockTokenOrBuilder();
  }
  public static final class LocatedBlockProto extends
      com.google.protobuf.GeneratedMessage
      implements LocatedBlockProtoOrBuilder {
    // Use LocatedBlockProto.newBuilder() to construct.
    private LocatedBlockProto(Builder builder) {
      super(builder);
    }
    private LocatedBlockProto(boolean noInit) {}
    
    private static final LocatedBlockProto defaultInstance;
    public static LocatedBlockProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public LocatedBlockProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlockProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlockProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .ExtendedBlockProto b = 1;
    public static final int B_FIELD_NUMBER = 1;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto b_;
    public boolean hasB() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getB() {
      return b_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder getBOrBuilder() {
      return b_;
    }
    
    // required uint64 offset = 2;
    public static final int OFFSET_FIELD_NUMBER = 2;
    private long offset_;
    public boolean hasOffset() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getOffset() {
      return offset_;
    }
    
    // repeated .DatanodeInfoProto locs = 3;
    public static final int LOCS_FIELD_NUMBER = 3;
    private java.util.List locs_;
    public java.util.List getLocsList() {
      return locs_;
    }
    public java.util.List 
        getLocsOrBuilderList() {
      return locs_;
    }
    public int getLocsCount() {
      return locs_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getLocs(int index) {
      return locs_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder getLocsOrBuilder(
        int index) {
      return locs_.get(index);
    }
    
    // required bool corrupt = 4;
    public static final int CORRUPT_FIELD_NUMBER = 4;
    private boolean corrupt_;
    public boolean hasCorrupt() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public boolean getCorrupt() {
      return corrupt_;
    }
    
    // required .BlockTokenIdentifierProto blockToken = 5;
    public static final int BLOCKTOKEN_FIELD_NUMBER = 5;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto blockToken_;
    public boolean hasBlockToken() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto getBlockToken() {
      return blockToken_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder getBlockTokenOrBuilder() {
      return blockToken_;
    }
    
    private void initFields() {
      b_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance();
      offset_ = 0L;
      locs_ = java.util.Collections.emptyList();
      corrupt_ = false;
      blockToken_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasB()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasOffset()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasCorrupt()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBlockToken()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getB().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getLocsCount(); i++) {
        if (!getLocs(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (!getBlockToken().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, b_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, offset_);
      }
      for (int i = 0; i < locs_.size(); i++) {
        output.writeMessage(3, locs_.get(i));
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBool(4, corrupt_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeMessage(5, blockToken_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, b_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, offset_);
      }
      for (int i = 0; i < locs_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(3, locs_.get(i));
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(4, corrupt_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(5, blockToken_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto) obj;
      
      boolean result = true;
      result = result && (hasB() == other.hasB());
      if (hasB()) {
        result = result && getB()
            .equals(other.getB());
      }
      result = result && (hasOffset() == other.hasOffset());
      if (hasOffset()) {
        result = result && (getOffset()
            == other.getOffset());
      }
      result = result && getLocsList()
          .equals(other.getLocsList());
      result = result && (hasCorrupt() == other.hasCorrupt());
      if (hasCorrupt()) {
        result = result && (getCorrupt()
            == other.getCorrupt());
      }
      result = result && (hasBlockToken() == other.hasBlockToken());
      if (hasBlockToken()) {
        result = result && getBlockToken()
            .equals(other.getBlockToken());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasB()) {
        hash = (37 * hash) + B_FIELD_NUMBER;
        hash = (53 * hash) + getB().hashCode();
      }
      if (hasOffset()) {
        hash = (37 * hash) + OFFSET_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getOffset());
      }
      if (getLocsCount() > 0) {
        hash = (37 * hash) + LOCS_FIELD_NUMBER;
        hash = (53 * hash) + getLocsList().hashCode();
      }
      if (hasCorrupt()) {
        hash = (37 * hash) + CORRUPT_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getCorrupt());
      }
      if (hasBlockToken()) {
        hash = (37 * hash) + BLOCKTOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getBlockToken().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlockProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlockProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getBFieldBuilder();
          getLocsFieldBuilder();
          getBlockTokenFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (bBuilder_ == null) {
          b_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance();
        } else {
          bBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        offset_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        if (locsBuilder_ == null) {
          locs_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
        } else {
          locsBuilder_.clear();
        }
        corrupt_ = false;
        bitField0_ = (bitField0_ & ~0x00000008);
        if (blockTokenBuilder_ == null) {
          blockToken_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance();
        } else {
          blockTokenBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (bBuilder_ == null) {
          result.b_ = b_;
        } else {
          result.b_ = bBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.offset_ = offset_;
        if (locsBuilder_ == null) {
          if (((bitField0_ & 0x00000004) == 0x00000004)) {
            locs_ = java.util.Collections.unmodifiableList(locs_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.locs_ = locs_;
        } else {
          result.locs_ = locsBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000004;
        }
        result.corrupt_ = corrupt_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000008;
        }
        if (blockTokenBuilder_ == null) {
          result.blockToken_ = blockToken_;
        } else {
          result.blockToken_ = blockTokenBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) return this;
        if (other.hasB()) {
          mergeB(other.getB());
        }
        if (other.hasOffset()) {
          setOffset(other.getOffset());
        }
        if (locsBuilder_ == null) {
          if (!other.locs_.isEmpty()) {
            if (locs_.isEmpty()) {
              locs_ = other.locs_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureLocsIsMutable();
              locs_.addAll(other.locs_);
            }
            onChanged();
          }
        } else {
          if (!other.locs_.isEmpty()) {
            if (locsBuilder_.isEmpty()) {
              locsBuilder_.dispose();
              locsBuilder_ = null;
              locs_ = other.locs_;
              bitField0_ = (bitField0_ & ~0x00000004);
              locsBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getLocsFieldBuilder() : null;
            } else {
              locsBuilder_.addAllMessages(other.locs_);
            }
          }
        }
        if (other.hasCorrupt()) {
          setCorrupt(other.getCorrupt());
        }
        if (other.hasBlockToken()) {
          mergeBlockToken(other.getBlockToken());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasB()) {
          
          return false;
        }
        if (!hasOffset()) {
          
          return false;
        }
        if (!hasCorrupt()) {
          
          return false;
        }
        if (!hasBlockToken()) {
          
          return false;
        }
        if (!getB().isInitialized()) {
          
          return false;
        }
        for (int i = 0; i < getLocsCount(); i++) {
          if (!getLocs(i).isInitialized()) {
            
            return false;
          }
        }
        if (!getBlockToken().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.newBuilder();
              if (hasB()) {
                subBuilder.mergeFrom(getB());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setB(subBuilder.buildPartial());
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              offset_ = input.readUInt64();
              break;
            }
            case 26: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addLocs(subBuilder.buildPartial());
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              corrupt_ = input.readBool();
              break;
            }
            case 42: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.newBuilder();
              if (hasBlockToken()) {
                subBuilder.mergeFrom(getBlockToken());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setBlockToken(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .ExtendedBlockProto b = 1;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto b_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder> bBuilder_;
      public boolean hasB() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getB() {
        if (bBuilder_ == null) {
          return b_;
        } else {
          return bBuilder_.getMessage();
        }
      }
      public Builder setB(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto value) {
        if (bBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          b_ = value;
          onChanged();
        } else {
          bBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setB(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder builderForValue) {
        if (bBuilder_ == null) {
          b_ = builderForValue.build();
          onChanged();
        } else {
          bBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeB(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto value) {
        if (bBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              b_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance()) {
            b_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.newBuilder(b_).mergeFrom(value).buildPartial();
          } else {
            b_ = value;
          }
          onChanged();
        } else {
          bBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearB() {
        if (bBuilder_ == null) {
          b_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance();
          onChanged();
        } else {
          bBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder getBBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getBFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder getBOrBuilder() {
        if (bBuilder_ != null) {
          return bBuilder_.getMessageOrBuilder();
        } else {
          return b_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder> 
          getBFieldBuilder() {
        if (bBuilder_ == null) {
          bBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder>(
                  b_,
                  getParentForChildren(),
                  isClean());
          b_ = null;
        }
        return bBuilder_;
      }
      
      // required uint64 offset = 2;
      private long offset_ ;
      public boolean hasOffset() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getOffset() {
        return offset_;
      }
      public Builder setOffset(long value) {
        bitField0_ |= 0x00000002;
        offset_ = value;
        onChanged();
        return this;
      }
      public Builder clearOffset() {
        bitField0_ = (bitField0_ & ~0x00000002);
        offset_ = 0L;
        onChanged();
        return this;
      }
      
      // repeated .DatanodeInfoProto locs = 3;
      private java.util.List locs_ =
        java.util.Collections.emptyList();
      private void ensureLocsIsMutable() {
        if (!((bitField0_ & 0x00000004) == 0x00000004)) {
          locs_ = new java.util.ArrayList(locs_);
          bitField0_ |= 0x00000004;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder> locsBuilder_;
      
      public java.util.List getLocsList() {
        if (locsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(locs_);
        } else {
          return locsBuilder_.getMessageList();
        }
      }
      public int getLocsCount() {
        if (locsBuilder_ == null) {
          return locs_.size();
        } else {
          return locsBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getLocs(int index) {
        if (locsBuilder_ == null) {
          return locs_.get(index);
        } else {
          return locsBuilder_.getMessage(index);
        }
      }
      public Builder setLocs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto value) {
        if (locsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocsIsMutable();
          locs_.set(index, value);
          onChanged();
        } else {
          locsBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setLocs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder builderForValue) {
        if (locsBuilder_ == null) {
          ensureLocsIsMutable();
          locs_.set(index, builderForValue.build());
          onChanged();
        } else {
          locsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addLocs(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto value) {
        if (locsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocsIsMutable();
          locs_.add(value);
          onChanged();
        } else {
          locsBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addLocs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto value) {
        if (locsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocsIsMutable();
          locs_.add(index, value);
          onChanged();
        } else {
          locsBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addLocs(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder builderForValue) {
        if (locsBuilder_ == null) {
          ensureLocsIsMutable();
          locs_.add(builderForValue.build());
          onChanged();
        } else {
          locsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addLocs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder builderForValue) {
        if (locsBuilder_ == null) {
          ensureLocsIsMutable();
          locs_.add(index, builderForValue.build());
          onChanged();
        } else {
          locsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllLocs(
          java.lang.Iterable values) {
        if (locsBuilder_ == null) {
          ensureLocsIsMutable();
          super.addAll(values, locs_);
          onChanged();
        } else {
          locsBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearLocs() {
        if (locsBuilder_ == null) {
          locs_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          locsBuilder_.clear();
        }
        return this;
      }
      public Builder removeLocs(int index) {
        if (locsBuilder_ == null) {
          ensureLocsIsMutable();
          locs_.remove(index);
          onChanged();
        } else {
          locsBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder getLocsBuilder(
          int index) {
        return getLocsFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder getLocsOrBuilder(
          int index) {
        if (locsBuilder_ == null) {
          return locs_.get(index);  } else {
          return locsBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getLocsOrBuilderList() {
        if (locsBuilder_ != null) {
          return locsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(locs_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder addLocsBuilder() {
        return getLocsFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder addLocsBuilder(
          int index) {
        return getLocsFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance());
      }
      public java.util.List 
           getLocsBuilderList() {
        return getLocsFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder> 
          getLocsFieldBuilder() {
        if (locsBuilder_ == null) {
          locsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder>(
                  locs_,
                  ((bitField0_ & 0x00000004) == 0x00000004),
                  getParentForChildren(),
                  isClean());
          locs_ = null;
        }
        return locsBuilder_;
      }
      
      // required bool corrupt = 4;
      private boolean corrupt_ ;
      public boolean hasCorrupt() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public boolean getCorrupt() {
        return corrupt_;
      }
      public Builder setCorrupt(boolean value) {
        bitField0_ |= 0x00000008;
        corrupt_ = value;
        onChanged();
        return this;
      }
      public Builder clearCorrupt() {
        bitField0_ = (bitField0_ & ~0x00000008);
        corrupt_ = false;
        onChanged();
        return this;
      }
      
      // required .BlockTokenIdentifierProto blockToken = 5;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto blockToken_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder> blockTokenBuilder_;
      public boolean hasBlockToken() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto getBlockToken() {
        if (blockTokenBuilder_ == null) {
          return blockToken_;
        } else {
          return blockTokenBuilder_.getMessage();
        }
      }
      public Builder setBlockToken(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto value) {
        if (blockTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          blockToken_ = value;
          onChanged();
        } else {
          blockTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        return this;
      }
      public Builder setBlockToken(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder builderForValue) {
        if (blockTokenBuilder_ == null) {
          blockToken_ = builderForValue.build();
          onChanged();
        } else {
          blockTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        return this;
      }
      public Builder mergeBlockToken(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto value) {
        if (blockTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000010) == 0x00000010) &&
              blockToken_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance()) {
            blockToken_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.newBuilder(blockToken_).mergeFrom(value).buildPartial();
          } else {
            blockToken_ = value;
          }
          onChanged();
        } else {
          blockTokenBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000010;
        return this;
      }
      public Builder clearBlockToken() {
        if (blockTokenBuilder_ == null) {
          blockToken_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance();
          onChanged();
        } else {
          blockTokenBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder getBlockTokenBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getBlockTokenFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder getBlockTokenOrBuilder() {
        if (blockTokenBuilder_ != null) {
          return blockTokenBuilder_.getMessageOrBuilder();
        } else {
          return blockToken_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder> 
          getBlockTokenFieldBuilder() {
        if (blockTokenBuilder_ == null) {
          blockTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder>(
                  blockToken_,
                  getParentForChildren(),
                  isClean());
          blockToken_ = null;
        }
        return blockTokenBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:LocatedBlockProto)
    }
    
    static {
      defaultInstance = new LocatedBlockProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:LocatedBlockProto)
  }
  
  public interface DataEncryptionKeyProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint32 keyId = 1;
    boolean hasKeyId();
    int getKeyId();
    
    // required string blockPoolId = 2;
    boolean hasBlockPoolId();
    String getBlockPoolId();
    
    // required bytes nonce = 3;
    boolean hasNonce();
    com.google.protobuf.ByteString getNonce();
    
    // required bytes encryptionKey = 4;
    boolean hasEncryptionKey();
    com.google.protobuf.ByteString getEncryptionKey();
    
    // required uint64 expiryDate = 5;
    boolean hasExpiryDate();
    long getExpiryDate();
    
    // optional string encryptionAlgorithm = 6;
    boolean hasEncryptionAlgorithm();
    String getEncryptionAlgorithm();
  }
  public static final class DataEncryptionKeyProto extends
      com.google.protobuf.GeneratedMessage
      implements DataEncryptionKeyProtoOrBuilder {
    // Use DataEncryptionKeyProto.newBuilder() to construct.
    private DataEncryptionKeyProto(Builder builder) {
      super(builder);
    }
    private DataEncryptionKeyProto(boolean noInit) {}
    
    private static final DataEncryptionKeyProto defaultInstance;
    public static DataEncryptionKeyProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public DataEncryptionKeyProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DataEncryptionKeyProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DataEncryptionKeyProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint32 keyId = 1;
    public static final int KEYID_FIELD_NUMBER = 1;
    private int keyId_;
    public boolean hasKeyId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getKeyId() {
      return keyId_;
    }
    
    // required string blockPoolId = 2;
    public static final int BLOCKPOOLID_FIELD_NUMBER = 2;
    private java.lang.Object blockPoolId_;
    public boolean hasBlockPoolId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public String getBlockPoolId() {
      java.lang.Object ref = blockPoolId_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          blockPoolId_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getBlockPoolIdBytes() {
      java.lang.Object ref = blockPoolId_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        blockPoolId_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required bytes nonce = 3;
    public static final int NONCE_FIELD_NUMBER = 3;
    private com.google.protobuf.ByteString nonce_;
    public boolean hasNonce() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public com.google.protobuf.ByteString getNonce() {
      return nonce_;
    }
    
    // required bytes encryptionKey = 4;
    public static final int ENCRYPTIONKEY_FIELD_NUMBER = 4;
    private com.google.protobuf.ByteString encryptionKey_;
    public boolean hasEncryptionKey() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public com.google.protobuf.ByteString getEncryptionKey() {
      return encryptionKey_;
    }
    
    // required uint64 expiryDate = 5;
    public static final int EXPIRYDATE_FIELD_NUMBER = 5;
    private long expiryDate_;
    public boolean hasExpiryDate() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public long getExpiryDate() {
      return expiryDate_;
    }
    
    // optional string encryptionAlgorithm = 6;
    public static final int ENCRYPTIONALGORITHM_FIELD_NUMBER = 6;
    private java.lang.Object encryptionAlgorithm_;
    public boolean hasEncryptionAlgorithm() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    public String getEncryptionAlgorithm() {
      java.lang.Object ref = encryptionAlgorithm_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          encryptionAlgorithm_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getEncryptionAlgorithmBytes() {
      java.lang.Object ref = encryptionAlgorithm_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        encryptionAlgorithm_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    private void initFields() {
      keyId_ = 0;
      blockPoolId_ = "";
      nonce_ = com.google.protobuf.ByteString.EMPTY;
      encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
      expiryDate_ = 0L;
      encryptionAlgorithm_ = "";
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasKeyId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBlockPoolId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasNonce()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasEncryptionKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasExpiryDate()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(1, keyId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, getBlockPoolIdBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, nonce_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeBytes(4, encryptionKey_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeUInt64(5, expiryDate_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeBytes(6, getEncryptionAlgorithmBytes());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(1, keyId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, getBlockPoolIdBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, nonce_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(4, encryptionKey_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(5, expiryDate_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(6, getEncryptionAlgorithmBytes());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto) obj;
      
      boolean result = true;
      result = result && (hasKeyId() == other.hasKeyId());
      if (hasKeyId()) {
        result = result && (getKeyId()
            == other.getKeyId());
      }
      result = result && (hasBlockPoolId() == other.hasBlockPoolId());
      if (hasBlockPoolId()) {
        result = result && getBlockPoolId()
            .equals(other.getBlockPoolId());
      }
      result = result && (hasNonce() == other.hasNonce());
      if (hasNonce()) {
        result = result && getNonce()
            .equals(other.getNonce());
      }
      result = result && (hasEncryptionKey() == other.hasEncryptionKey());
      if (hasEncryptionKey()) {
        result = result && getEncryptionKey()
            .equals(other.getEncryptionKey());
      }
      result = result && (hasExpiryDate() == other.hasExpiryDate());
      if (hasExpiryDate()) {
        result = result && (getExpiryDate()
            == other.getExpiryDate());
      }
      result = result && (hasEncryptionAlgorithm() == other.hasEncryptionAlgorithm());
      if (hasEncryptionAlgorithm()) {
        result = result && getEncryptionAlgorithm()
            .equals(other.getEncryptionAlgorithm());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasKeyId()) {
        hash = (37 * hash) + KEYID_FIELD_NUMBER;
        hash = (53 * hash) + getKeyId();
      }
      if (hasBlockPoolId()) {
        hash = (37 * hash) + BLOCKPOOLID_FIELD_NUMBER;
        hash = (53 * hash) + getBlockPoolId().hashCode();
      }
      if (hasNonce()) {
        hash = (37 * hash) + NONCE_FIELD_NUMBER;
        hash = (53 * hash) + getNonce().hashCode();
      }
      if (hasEncryptionKey()) {
        hash = (37 * hash) + ENCRYPTIONKEY_FIELD_NUMBER;
        hash = (53 * hash) + getEncryptionKey().hashCode();
      }
      if (hasExpiryDate()) {
        hash = (37 * hash) + EXPIRYDATE_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getExpiryDate());
      }
      if (hasEncryptionAlgorithm()) {
        hash = (37 * hash) + ENCRYPTIONALGORITHM_FIELD_NUMBER;
        hash = (53 * hash) + getEncryptionAlgorithm().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DataEncryptionKeyProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DataEncryptionKeyProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        keyId_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        blockPoolId_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        nonce_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000004);
        encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000008);
        expiryDate_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000010);
        encryptionAlgorithm_ = "";
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.keyId_ = keyId_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.blockPoolId_ = blockPoolId_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.nonce_ = nonce_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.encryptionKey_ = encryptionKey_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.expiryDate_ = expiryDate_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.encryptionAlgorithm_ = encryptionAlgorithm_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto.getDefaultInstance()) return this;
        if (other.hasKeyId()) {
          setKeyId(other.getKeyId());
        }
        if (other.hasBlockPoolId()) {
          setBlockPoolId(other.getBlockPoolId());
        }
        if (other.hasNonce()) {
          setNonce(other.getNonce());
        }
        if (other.hasEncryptionKey()) {
          setEncryptionKey(other.getEncryptionKey());
        }
        if (other.hasExpiryDate()) {
          setExpiryDate(other.getExpiryDate());
        }
        if (other.hasEncryptionAlgorithm()) {
          setEncryptionAlgorithm(other.getEncryptionAlgorithm());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasKeyId()) {
          
          return false;
        }
        if (!hasBlockPoolId()) {
          
          return false;
        }
        if (!hasNonce()) {
          
          return false;
        }
        if (!hasEncryptionKey()) {
          
          return false;
        }
        if (!hasExpiryDate()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              keyId_ = input.readUInt32();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              blockPoolId_ = input.readBytes();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              nonce_ = input.readBytes();
              break;
            }
            case 34: {
              bitField0_ |= 0x00000008;
              encryptionKey_ = input.readBytes();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              expiryDate_ = input.readUInt64();
              break;
            }
            case 50: {
              bitField0_ |= 0x00000020;
              encryptionAlgorithm_ = input.readBytes();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint32 keyId = 1;
      private int keyId_ ;
      public boolean hasKeyId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public int getKeyId() {
        return keyId_;
      }
      public Builder setKeyId(int value) {
        bitField0_ |= 0x00000001;
        keyId_ = value;
        onChanged();
        return this;
      }
      public Builder clearKeyId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        keyId_ = 0;
        onChanged();
        return this;
      }
      
      // required string blockPoolId = 2;
      private java.lang.Object blockPoolId_ = "";
      public boolean hasBlockPoolId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public String getBlockPoolId() {
        java.lang.Object ref = blockPoolId_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          blockPoolId_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setBlockPoolId(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        blockPoolId_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockPoolId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        blockPoolId_ = getDefaultInstance().getBlockPoolId();
        onChanged();
        return this;
      }
      void setBlockPoolId(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000002;
        blockPoolId_ = value;
        onChanged();
      }
      
      // required bytes nonce = 3;
      private com.google.protobuf.ByteString nonce_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasNonce() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public com.google.protobuf.ByteString getNonce() {
        return nonce_;
      }
      public Builder setNonce(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        nonce_ = value;
        onChanged();
        return this;
      }
      public Builder clearNonce() {
        bitField0_ = (bitField0_ & ~0x00000004);
        nonce_ = getDefaultInstance().getNonce();
        onChanged();
        return this;
      }
      
      // required bytes encryptionKey = 4;
      private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasEncryptionKey() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public com.google.protobuf.ByteString getEncryptionKey() {
        return encryptionKey_;
      }
      public Builder setEncryptionKey(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        encryptionKey_ = value;
        onChanged();
        return this;
      }
      public Builder clearEncryptionKey() {
        bitField0_ = (bitField0_ & ~0x00000008);
        encryptionKey_ = getDefaultInstance().getEncryptionKey();
        onChanged();
        return this;
      }
      
      // required uint64 expiryDate = 5;
      private long expiryDate_ ;
      public boolean hasExpiryDate() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public long getExpiryDate() {
        return expiryDate_;
      }
      public Builder setExpiryDate(long value) {
        bitField0_ |= 0x00000010;
        expiryDate_ = value;
        onChanged();
        return this;
      }
      public Builder clearExpiryDate() {
        bitField0_ = (bitField0_ & ~0x00000010);
        expiryDate_ = 0L;
        onChanged();
        return this;
      }
      
      // optional string encryptionAlgorithm = 6;
      private java.lang.Object encryptionAlgorithm_ = "";
      public boolean hasEncryptionAlgorithm() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      public String getEncryptionAlgorithm() {
        java.lang.Object ref = encryptionAlgorithm_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          encryptionAlgorithm_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setEncryptionAlgorithm(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000020;
        encryptionAlgorithm_ = value;
        onChanged();
        return this;
      }
      public Builder clearEncryptionAlgorithm() {
        bitField0_ = (bitField0_ & ~0x00000020);
        encryptionAlgorithm_ = getDefaultInstance().getEncryptionAlgorithm();
        onChanged();
        return this;
      }
      void setEncryptionAlgorithm(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000020;
        encryptionAlgorithm_ = value;
        onChanged();
      }
      
      // @@protoc_insertion_point(builder_scope:DataEncryptionKeyProto)
    }
    
    static {
      defaultInstance = new DataEncryptionKeyProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:DataEncryptionKeyProto)
  }
  
  public interface LocatedBlocksProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint64 fileLength = 1;
    boolean hasFileLength();
    long getFileLength();
    
    // repeated .LocatedBlockProto blocks = 2;
    java.util.List 
        getBlocksList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getBlocks(int index);
    int getBlocksCount();
    java.util.List 
        getBlocksOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getBlocksOrBuilder(
        int index);
    
    // required bool underConstruction = 3;
    boolean hasUnderConstruction();
    boolean getUnderConstruction();
    
    // optional .LocatedBlockProto lastBlock = 4;
    boolean hasLastBlock();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getLastBlock();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getLastBlockOrBuilder();
    
    // required bool isLastBlockComplete = 5;
    boolean hasIsLastBlockComplete();
    boolean getIsLastBlockComplete();
  }
  public static final class LocatedBlocksProto extends
      com.google.protobuf.GeneratedMessage
      implements LocatedBlocksProtoOrBuilder {
    // Use LocatedBlocksProto.newBuilder() to construct.
    private LocatedBlocksProto(Builder builder) {
      super(builder);
    }
    private LocatedBlocksProto(boolean noInit) {}
    
    private static final LocatedBlocksProto defaultInstance;
    public static LocatedBlocksProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public LocatedBlocksProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlocksProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlocksProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint64 fileLength = 1;
    public static final int FILELENGTH_FIELD_NUMBER = 1;
    private long fileLength_;
    public boolean hasFileLength() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public long getFileLength() {
      return fileLength_;
    }
    
    // repeated .LocatedBlockProto blocks = 2;
    public static final int BLOCKS_FIELD_NUMBER = 2;
    private java.util.List blocks_;
    public java.util.List getBlocksList() {
      return blocks_;
    }
    public java.util.List 
        getBlocksOrBuilderList() {
      return blocks_;
    }
    public int getBlocksCount() {
      return blocks_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getBlocks(int index) {
      return blocks_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getBlocksOrBuilder(
        int index) {
      return blocks_.get(index);
    }
    
    // required bool underConstruction = 3;
    public static final int UNDERCONSTRUCTION_FIELD_NUMBER = 3;
    private boolean underConstruction_;
    public boolean hasUnderConstruction() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public boolean getUnderConstruction() {
      return underConstruction_;
    }
    
    // optional .LocatedBlockProto lastBlock = 4;
    public static final int LASTBLOCK_FIELD_NUMBER = 4;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto lastBlock_;
    public boolean hasLastBlock() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getLastBlock() {
      return lastBlock_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getLastBlockOrBuilder() {
      return lastBlock_;
    }
    
    // required bool isLastBlockComplete = 5;
    public static final int ISLASTBLOCKCOMPLETE_FIELD_NUMBER = 5;
    private boolean isLastBlockComplete_;
    public boolean hasIsLastBlockComplete() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public boolean getIsLastBlockComplete() {
      return isLastBlockComplete_;
    }
    
    private void initFields() {
      fileLength_ = 0L;
      blocks_ = java.util.Collections.emptyList();
      underConstruction_ = false;
      lastBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
      isLastBlockComplete_ = false;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasFileLength()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasUnderConstruction()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasIsLastBlockComplete()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getBlocksCount(); i++) {
        if (!getBlocks(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasLastBlock()) {
        if (!getLastBlock().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt64(1, fileLength_);
      }
      for (int i = 0; i < blocks_.size(); i++) {
        output.writeMessage(2, blocks_.get(i));
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBool(3, underConstruction_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeMessage(4, lastBlock_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeBool(5, isLastBlockComplete_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(1, fileLength_);
      }
      for (int i = 0; i < blocks_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(2, blocks_.get(i));
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(3, underConstruction_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(4, lastBlock_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(5, isLastBlockComplete_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto) obj;
      
      boolean result = true;
      result = result && (hasFileLength() == other.hasFileLength());
      if (hasFileLength()) {
        result = result && (getFileLength()
            == other.getFileLength());
      }
      result = result && getBlocksList()
          .equals(other.getBlocksList());
      result = result && (hasUnderConstruction() == other.hasUnderConstruction());
      if (hasUnderConstruction()) {
        result = result && (getUnderConstruction()
            == other.getUnderConstruction());
      }
      result = result && (hasLastBlock() == other.hasLastBlock());
      if (hasLastBlock()) {
        result = result && getLastBlock()
            .equals(other.getLastBlock());
      }
      result = result && (hasIsLastBlockComplete() == other.hasIsLastBlockComplete());
      if (hasIsLastBlockComplete()) {
        result = result && (getIsLastBlockComplete()
            == other.getIsLastBlockComplete());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasFileLength()) {
        hash = (37 * hash) + FILELENGTH_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getFileLength());
      }
      if (getBlocksCount() > 0) {
        hash = (37 * hash) + BLOCKS_FIELD_NUMBER;
        hash = (53 * hash) + getBlocksList().hashCode();
      }
      if (hasUnderConstruction()) {
        hash = (37 * hash) + UNDERCONSTRUCTION_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getUnderConstruction());
      }
      if (hasLastBlock()) {
        hash = (37 * hash) + LASTBLOCK_FIELD_NUMBER;
        hash = (53 * hash) + getLastBlock().hashCode();
      }
      if (hasIsLastBlockComplete()) {
        hash = (37 * hash) + ISLASTBLOCKCOMPLETE_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getIsLastBlockComplete());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlocksProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_LocatedBlocksProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getBlocksFieldBuilder();
          getLastBlockFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        fileLength_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000001);
        if (blocksBuilder_ == null) {
          blocks_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
        } else {
          blocksBuilder_.clear();
        }
        underConstruction_ = false;
        bitField0_ = (bitField0_ & ~0x00000004);
        if (lastBlockBuilder_ == null) {
          lastBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
        } else {
          lastBlockBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        isLastBlockComplete_ = false;
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.fileLength_ = fileLength_;
        if (blocksBuilder_ == null) {
          if (((bitField0_ & 0x00000002) == 0x00000002)) {
            blocks_ = java.util.Collections.unmodifiableList(blocks_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.blocks_ = blocks_;
        } else {
          result.blocks_ = blocksBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000002;
        }
        result.underConstruction_ = underConstruction_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000004;
        }
        if (lastBlockBuilder_ == null) {
          result.lastBlock_ = lastBlock_;
        } else {
          result.lastBlock_ = lastBlockBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000008;
        }
        result.isLastBlockComplete_ = isLastBlockComplete_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance()) return this;
        if (other.hasFileLength()) {
          setFileLength(other.getFileLength());
        }
        if (blocksBuilder_ == null) {
          if (!other.blocks_.isEmpty()) {
            if (blocks_.isEmpty()) {
              blocks_ = other.blocks_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureBlocksIsMutable();
              blocks_.addAll(other.blocks_);
            }
            onChanged();
          }
        } else {
          if (!other.blocks_.isEmpty()) {
            if (blocksBuilder_.isEmpty()) {
              blocksBuilder_.dispose();
              blocksBuilder_ = null;
              blocks_ = other.blocks_;
              bitField0_ = (bitField0_ & ~0x00000002);
              blocksBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getBlocksFieldBuilder() : null;
            } else {
              blocksBuilder_.addAllMessages(other.blocks_);
            }
          }
        }
        if (other.hasUnderConstruction()) {
          setUnderConstruction(other.getUnderConstruction());
        }
        if (other.hasLastBlock()) {
          mergeLastBlock(other.getLastBlock());
        }
        if (other.hasIsLastBlockComplete()) {
          setIsLastBlockComplete(other.getIsLastBlockComplete());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasFileLength()) {
          
          return false;
        }
        if (!hasUnderConstruction()) {
          
          return false;
        }
        if (!hasIsLastBlockComplete()) {
          
          return false;
        }
        for (int i = 0; i < getBlocksCount(); i++) {
          if (!getBlocks(i).isInitialized()) {
            
            return false;
          }
        }
        if (hasLastBlock()) {
          if (!getLastBlock().isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              fileLength_ = input.readUInt64();
              break;
            }
            case 18: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addBlocks(subBuilder.buildPartial());
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              underConstruction_ = input.readBool();
              break;
            }
            case 34: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder();
              if (hasLastBlock()) {
                subBuilder.mergeFrom(getLastBlock());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setLastBlock(subBuilder.buildPartial());
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              isLastBlockComplete_ = input.readBool();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint64 fileLength = 1;
      private long fileLength_ ;
      public boolean hasFileLength() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public long getFileLength() {
        return fileLength_;
      }
      public Builder setFileLength(long value) {
        bitField0_ |= 0x00000001;
        fileLength_ = value;
        onChanged();
        return this;
      }
      public Builder clearFileLength() {
        bitField0_ = (bitField0_ & ~0x00000001);
        fileLength_ = 0L;
        onChanged();
        return this;
      }
      
      // repeated .LocatedBlockProto blocks = 2;
      private java.util.List blocks_ =
        java.util.Collections.emptyList();
      private void ensureBlocksIsMutable() {
        if (!((bitField0_ & 0x00000002) == 0x00000002)) {
          blocks_ = new java.util.ArrayList(blocks_);
          bitField0_ |= 0x00000002;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder> blocksBuilder_;
      
      public java.util.List getBlocksList() {
        if (blocksBuilder_ == null) {
          return java.util.Collections.unmodifiableList(blocks_);
        } else {
          return blocksBuilder_.getMessageList();
        }
      }
      public int getBlocksCount() {
        if (blocksBuilder_ == null) {
          return blocks_.size();
        } else {
          return blocksBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getBlocks(int index) {
        if (blocksBuilder_ == null) {
          return blocks_.get(index);
        } else {
          return blocksBuilder_.getMessage(index);
        }
      }
      public Builder setBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (blocksBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureBlocksIsMutable();
          blocks_.set(index, value);
          onChanged();
        } else {
          blocksBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder builderForValue) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.set(index, builderForValue.build());
          onChanged();
        } else {
          blocksBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addBlocks(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (blocksBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureBlocksIsMutable();
          blocks_.add(value);
          onChanged();
        } else {
          blocksBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (blocksBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureBlocksIsMutable();
          blocks_.add(index, value);
          onChanged();
        } else {
          blocksBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addBlocks(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder builderForValue) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.add(builderForValue.build());
          onChanged();
        } else {
          blocksBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder builderForValue) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.add(index, builderForValue.build());
          onChanged();
        } else {
          blocksBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllBlocks(
          java.lang.Iterable values) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          super.addAll(values, blocks_);
          onChanged();
        } else {
          blocksBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearBlocks() {
        if (blocksBuilder_ == null) {
          blocks_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          blocksBuilder_.clear();
        }
        return this;
      }
      public Builder removeBlocks(int index) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.remove(index);
          onChanged();
        } else {
          blocksBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder getBlocksBuilder(
          int index) {
        return getBlocksFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getBlocksOrBuilder(
          int index) {
        if (blocksBuilder_ == null) {
          return blocks_.get(index);  } else {
          return blocksBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getBlocksOrBuilderList() {
        if (blocksBuilder_ != null) {
          return blocksBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(blocks_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder addBlocksBuilder() {
        return getBlocksFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder addBlocksBuilder(
          int index) {
        return getBlocksFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance());
      }
      public java.util.List 
           getBlocksBuilderList() {
        return getBlocksFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder> 
          getBlocksFieldBuilder() {
        if (blocksBuilder_ == null) {
          blocksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder>(
                  blocks_,
                  ((bitField0_ & 0x00000002) == 0x00000002),
                  getParentForChildren(),
                  isClean());
          blocks_ = null;
        }
        return blocksBuilder_;
      }
      
      // required bool underConstruction = 3;
      private boolean underConstruction_ ;
      public boolean hasUnderConstruction() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public boolean getUnderConstruction() {
        return underConstruction_;
      }
      public Builder setUnderConstruction(boolean value) {
        bitField0_ |= 0x00000004;
        underConstruction_ = value;
        onChanged();
        return this;
      }
      public Builder clearUnderConstruction() {
        bitField0_ = (bitField0_ & ~0x00000004);
        underConstruction_ = false;
        onChanged();
        return this;
      }
      
      // optional .LocatedBlockProto lastBlock = 4;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto lastBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder> lastBlockBuilder_;
      public boolean hasLastBlock() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getLastBlock() {
        if (lastBlockBuilder_ == null) {
          return lastBlock_;
        } else {
          return lastBlockBuilder_.getMessage();
        }
      }
      public Builder setLastBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (lastBlockBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          lastBlock_ = value;
          onChanged();
        } else {
          lastBlockBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder setLastBlock(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder builderForValue) {
        if (lastBlockBuilder_ == null) {
          lastBlock_ = builderForValue.build();
          onChanged();
        } else {
          lastBlockBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder mergeLastBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (lastBlockBuilder_ == null) {
          if (((bitField0_ & 0x00000008) == 0x00000008) &&
              lastBlock_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) {
            lastBlock_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder(lastBlock_).mergeFrom(value).buildPartial();
          } else {
            lastBlock_ = value;
          }
          onChanged();
        } else {
          lastBlockBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder clearLastBlock() {
        if (lastBlockBuilder_ == null) {
          lastBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
          onChanged();
        } else {
          lastBlockBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder getLastBlockBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getLastBlockFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getLastBlockOrBuilder() {
        if (lastBlockBuilder_ != null) {
          return lastBlockBuilder_.getMessageOrBuilder();
        } else {
          return lastBlock_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder> 
          getLastBlockFieldBuilder() {
        if (lastBlockBuilder_ == null) {
          lastBlockBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder>(
                  lastBlock_,
                  getParentForChildren(),
                  isClean());
          lastBlock_ = null;
        }
        return lastBlockBuilder_;
      }
      
      // required bool isLastBlockComplete = 5;
      private boolean isLastBlockComplete_ ;
      public boolean hasIsLastBlockComplete() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public boolean getIsLastBlockComplete() {
        return isLastBlockComplete_;
      }
      public Builder setIsLastBlockComplete(boolean value) {
        bitField0_ |= 0x00000010;
        isLastBlockComplete_ = value;
        onChanged();
        return this;
      }
      public Builder clearIsLastBlockComplete() {
        bitField0_ = (bitField0_ & ~0x00000010);
        isLastBlockComplete_ = false;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:LocatedBlocksProto)
    }
    
    static {
      defaultInstance = new LocatedBlocksProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:LocatedBlocksProto)
  }
  
  public interface HdfsFileStatusProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .HdfsFileStatusProto.FileType fileType = 1;
    boolean hasFileType();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType getFileType();
    
    // required bytes path = 2;
    boolean hasPath();
    com.google.protobuf.ByteString getPath();
    
    // required uint64 length = 3;
    boolean hasLength();
    long getLength();
    
    // required .FsPermissionProto permission = 4;
    boolean hasPermission();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto getPermission();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder getPermissionOrBuilder();
    
    // required string owner = 5;
    boolean hasOwner();
    String getOwner();
    
    // required string group = 6;
    boolean hasGroup();
    String getGroup();
    
    // required uint64 modification_time = 7;
    boolean hasModificationTime();
    long getModificationTime();
    
    // required uint64 access_time = 8;
    boolean hasAccessTime();
    long getAccessTime();
    
    // optional bytes symlink = 9;
    boolean hasSymlink();
    com.google.protobuf.ByteString getSymlink();
    
    // optional uint32 block_replication = 10 [default = 0];
    boolean hasBlockReplication();
    int getBlockReplication();
    
    // optional uint64 blocksize = 11 [default = 0];
    boolean hasBlocksize();
    long getBlocksize();
    
    // optional .LocatedBlocksProto locations = 12;
    boolean hasLocations();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto getLocations();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder getLocationsOrBuilder();
  }
  public static final class HdfsFileStatusProto extends
      com.google.protobuf.GeneratedMessage
      implements HdfsFileStatusProtoOrBuilder {
    // Use HdfsFileStatusProto.newBuilder() to construct.
    private HdfsFileStatusProto(Builder builder) {
      super(builder);
    }
    private HdfsFileStatusProto(boolean noInit) {}
    
    private static final HdfsFileStatusProto defaultInstance;
    public static HdfsFileStatusProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public HdfsFileStatusProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_HdfsFileStatusProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_HdfsFileStatusProto_fieldAccessorTable;
    }
    
    public enum FileType
        implements com.google.protobuf.ProtocolMessageEnum {
      IS_DIR(0, 1),
      IS_FILE(1, 2),
      IS_SYMLINK(2, 3),
      ;
      
      public static final int IS_DIR_VALUE = 1;
      public static final int IS_FILE_VALUE = 2;
      public static final int IS_SYMLINK_VALUE = 3;
      
      
      public final int getNumber() { return value; }
      
      public static FileType valueOf(int value) {
        switch (value) {
          case 1: return IS_DIR;
          case 2: return IS_FILE;
          case 3: return IS_SYMLINK;
          default: return null;
        }
      }
      
      public static com.google.protobuf.Internal.EnumLiteMap
          internalGetValueMap() {
        return internalValueMap;
      }
      private static com.google.protobuf.Internal.EnumLiteMap
          internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap() {
              public FileType findValueByNumber(int number) {
                return FileType.valueOf(number);
              }
            };
      
      public final com.google.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(index);
      }
      public final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.getDescriptor().getEnumTypes().get(0);
      }
      
      private static final FileType[] VALUES = {
        IS_DIR, IS_FILE, IS_SYMLINK, 
      };
      
      public static FileType valueOf(
          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }
      
      private final int index;
      private final int value;
      
      private FileType(int index, int value) {
        this.index = index;
        this.value = value;
      }
      
      // @@protoc_insertion_point(enum_scope:HdfsFileStatusProto.FileType)
    }
    
    private int bitField0_;
    // required .HdfsFileStatusProto.FileType fileType = 1;
    public static final int FILETYPE_FIELD_NUMBER = 1;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType fileType_;
    public boolean hasFileType() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType getFileType() {
      return fileType_;
    }
    
    // required bytes path = 2;
    public static final int PATH_FIELD_NUMBER = 2;
    private com.google.protobuf.ByteString path_;
    public boolean hasPath() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public com.google.protobuf.ByteString getPath() {
      return path_;
    }
    
    // required uint64 length = 3;
    public static final int LENGTH_FIELD_NUMBER = 3;
    private long length_;
    public boolean hasLength() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getLength() {
      return length_;
    }
    
    // required .FsPermissionProto permission = 4;
    public static final int PERMISSION_FIELD_NUMBER = 4;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto permission_;
    public boolean hasPermission() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto getPermission() {
      return permission_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder getPermissionOrBuilder() {
      return permission_;
    }
    
    // required string owner = 5;
    public static final int OWNER_FIELD_NUMBER = 5;
    private java.lang.Object owner_;
    public boolean hasOwner() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public String getOwner() {
      java.lang.Object ref = owner_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          owner_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getOwnerBytes() {
      java.lang.Object ref = owner_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        owner_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required string group = 6;
    public static final int GROUP_FIELD_NUMBER = 6;
    private java.lang.Object group_;
    public boolean hasGroup() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    public String getGroup() {
      java.lang.Object ref = group_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          group_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getGroupBytes() {
      java.lang.Object ref = group_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        group_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required uint64 modification_time = 7;
    public static final int MODIFICATION_TIME_FIELD_NUMBER = 7;
    private long modificationTime_;
    public boolean hasModificationTime() {
      return ((bitField0_ & 0x00000040) == 0x00000040);
    }
    public long getModificationTime() {
      return modificationTime_;
    }
    
    // required uint64 access_time = 8;
    public static final int ACCESS_TIME_FIELD_NUMBER = 8;
    private long accessTime_;
    public boolean hasAccessTime() {
      return ((bitField0_ & 0x00000080) == 0x00000080);
    }
    public long getAccessTime() {
      return accessTime_;
    }
    
    // optional bytes symlink = 9;
    public static final int SYMLINK_FIELD_NUMBER = 9;
    private com.google.protobuf.ByteString symlink_;
    public boolean hasSymlink() {
      return ((bitField0_ & 0x00000100) == 0x00000100);
    }
    public com.google.protobuf.ByteString getSymlink() {
      return symlink_;
    }
    
    // optional uint32 block_replication = 10 [default = 0];
    public static final int BLOCK_REPLICATION_FIELD_NUMBER = 10;
    private int blockReplication_;
    public boolean hasBlockReplication() {
      return ((bitField0_ & 0x00000200) == 0x00000200);
    }
    public int getBlockReplication() {
      return blockReplication_;
    }
    
    // optional uint64 blocksize = 11 [default = 0];
    public static final int BLOCKSIZE_FIELD_NUMBER = 11;
    private long blocksize_;
    public boolean hasBlocksize() {
      return ((bitField0_ & 0x00000400) == 0x00000400);
    }
    public long getBlocksize() {
      return blocksize_;
    }
    
    // optional .LocatedBlocksProto locations = 12;
    public static final int LOCATIONS_FIELD_NUMBER = 12;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto locations_;
    public boolean hasLocations() {
      return ((bitField0_ & 0x00000800) == 0x00000800);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto getLocations() {
      return locations_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder getLocationsOrBuilder() {
      return locations_;
    }
    
    private void initFields() {
      fileType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType.IS_DIR;
      path_ = com.google.protobuf.ByteString.EMPTY;
      length_ = 0L;
      permission_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance();
      owner_ = "";
      group_ = "";
      modificationTime_ = 0L;
      accessTime_ = 0L;
      symlink_ = com.google.protobuf.ByteString.EMPTY;
      blockReplication_ = 0;
      blocksize_ = 0L;
      locations_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasFileType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasPath()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasLength()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasPermission()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasOwner()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasGroup()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasModificationTime()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasAccessTime()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getPermission().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (hasLocations()) {
        if (!getLocations().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeEnum(1, fileType_.getNumber());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, path_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, length_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeMessage(4, permission_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeBytes(5, getOwnerBytes());
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeBytes(6, getGroupBytes());
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        output.writeUInt64(7, modificationTime_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        output.writeUInt64(8, accessTime_);
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        output.writeBytes(9, symlink_);
      }
      if (((bitField0_ & 0x00000200) == 0x00000200)) {
        output.writeUInt32(10, blockReplication_);
      }
      if (((bitField0_ & 0x00000400) == 0x00000400)) {
        output.writeUInt64(11, blocksize_);
      }
      if (((bitField0_ & 0x00000800) == 0x00000800)) {
        output.writeMessage(12, locations_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(1, fileType_.getNumber());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, path_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, length_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(4, permission_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(5, getOwnerBytes());
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(6, getGroupBytes());
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(7, modificationTime_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(8, accessTime_);
      }
      if (((bitField0_ & 0x00000100) == 0x00000100)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(9, symlink_);
      }
      if (((bitField0_ & 0x00000200) == 0x00000200)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(10, blockReplication_);
      }
      if (((bitField0_ & 0x00000400) == 0x00000400)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(11, blocksize_);
      }
      if (((bitField0_ & 0x00000800) == 0x00000800)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(12, locations_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto) obj;
      
      boolean result = true;
      result = result && (hasFileType() == other.hasFileType());
      if (hasFileType()) {
        result = result &&
            (getFileType() == other.getFileType());
      }
      result = result && (hasPath() == other.hasPath());
      if (hasPath()) {
        result = result && getPath()
            .equals(other.getPath());
      }
      result = result && (hasLength() == other.hasLength());
      if (hasLength()) {
        result = result && (getLength()
            == other.getLength());
      }
      result = result && (hasPermission() == other.hasPermission());
      if (hasPermission()) {
        result = result && getPermission()
            .equals(other.getPermission());
      }
      result = result && (hasOwner() == other.hasOwner());
      if (hasOwner()) {
        result = result && getOwner()
            .equals(other.getOwner());
      }
      result = result && (hasGroup() == other.hasGroup());
      if (hasGroup()) {
        result = result && getGroup()
            .equals(other.getGroup());
      }
      result = result && (hasModificationTime() == other.hasModificationTime());
      if (hasModificationTime()) {
        result = result && (getModificationTime()
            == other.getModificationTime());
      }
      result = result && (hasAccessTime() == other.hasAccessTime());
      if (hasAccessTime()) {
        result = result && (getAccessTime()
            == other.getAccessTime());
      }
      result = result && (hasSymlink() == other.hasSymlink());
      if (hasSymlink()) {
        result = result && getSymlink()
            .equals(other.getSymlink());
      }
      result = result && (hasBlockReplication() == other.hasBlockReplication());
      if (hasBlockReplication()) {
        result = result && (getBlockReplication()
            == other.getBlockReplication());
      }
      result = result && (hasBlocksize() == other.hasBlocksize());
      if (hasBlocksize()) {
        result = result && (getBlocksize()
            == other.getBlocksize());
      }
      result = result && (hasLocations() == other.hasLocations());
      if (hasLocations()) {
        result = result && getLocations()
            .equals(other.getLocations());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasFileType()) {
        hash = (37 * hash) + FILETYPE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getFileType());
      }
      if (hasPath()) {
        hash = (37 * hash) + PATH_FIELD_NUMBER;
        hash = (53 * hash) + getPath().hashCode();
      }
      if (hasLength()) {
        hash = (37 * hash) + LENGTH_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getLength());
      }
      if (hasPermission()) {
        hash = (37 * hash) + PERMISSION_FIELD_NUMBER;
        hash = (53 * hash) + getPermission().hashCode();
      }
      if (hasOwner()) {
        hash = (37 * hash) + OWNER_FIELD_NUMBER;
        hash = (53 * hash) + getOwner().hashCode();
      }
      if (hasGroup()) {
        hash = (37 * hash) + GROUP_FIELD_NUMBER;
        hash = (53 * hash) + getGroup().hashCode();
      }
      if (hasModificationTime()) {
        hash = (37 * hash) + MODIFICATION_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getModificationTime());
      }
      if (hasAccessTime()) {
        hash = (37 * hash) + ACCESS_TIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getAccessTime());
      }
      if (hasSymlink()) {
        hash = (37 * hash) + SYMLINK_FIELD_NUMBER;
        hash = (53 * hash) + getSymlink().hashCode();
      }
      if (hasBlockReplication()) {
        hash = (37 * hash) + BLOCK_REPLICATION_FIELD_NUMBER;
        hash = (53 * hash) + getBlockReplication();
      }
      if (hasBlocksize()) {
        hash = (37 * hash) + BLOCKSIZE_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getBlocksize());
      }
      if (hasLocations()) {
        hash = (37 * hash) + LOCATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getLocations().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_HdfsFileStatusProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_HdfsFileStatusProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getPermissionFieldBuilder();
          getLocationsFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        fileType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType.IS_DIR;
        bitField0_ = (bitField0_ & ~0x00000001);
        path_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000002);
        length_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        if (permissionBuilder_ == null) {
          permission_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance();
        } else {
          permissionBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        owner_ = "";
        bitField0_ = (bitField0_ & ~0x00000010);
        group_ = "";
        bitField0_ = (bitField0_ & ~0x00000020);
        modificationTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000040);
        accessTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000080);
        symlink_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000100);
        blockReplication_ = 0;
        bitField0_ = (bitField0_ & ~0x00000200);
        blocksize_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000400);
        if (locationsBuilder_ == null) {
          locations_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance();
        } else {
          locationsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000800);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.fileType_ = fileType_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.path_ = path_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.length_ = length_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        if (permissionBuilder_ == null) {
          result.permission_ = permission_;
        } else {
          result.permission_ = permissionBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.owner_ = owner_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.group_ = group_;
        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
          to_bitField0_ |= 0x00000040;
        }
        result.modificationTime_ = modificationTime_;
        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
          to_bitField0_ |= 0x00000080;
        }
        result.accessTime_ = accessTime_;
        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
          to_bitField0_ |= 0x00000100;
        }
        result.symlink_ = symlink_;
        if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
          to_bitField0_ |= 0x00000200;
        }
        result.blockReplication_ = blockReplication_;
        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
          to_bitField0_ |= 0x00000400;
        }
        result.blocksize_ = blocksize_;
        if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
          to_bitField0_ |= 0x00000800;
        }
        if (locationsBuilder_ == null) {
          result.locations_ = locations_;
        } else {
          result.locations_ = locationsBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.getDefaultInstance()) return this;
        if (other.hasFileType()) {
          setFileType(other.getFileType());
        }
        if (other.hasPath()) {
          setPath(other.getPath());
        }
        if (other.hasLength()) {
          setLength(other.getLength());
        }
        if (other.hasPermission()) {
          mergePermission(other.getPermission());
        }
        if (other.hasOwner()) {
          setOwner(other.getOwner());
        }
        if (other.hasGroup()) {
          setGroup(other.getGroup());
        }
        if (other.hasModificationTime()) {
          setModificationTime(other.getModificationTime());
        }
        if (other.hasAccessTime()) {
          setAccessTime(other.getAccessTime());
        }
        if (other.hasSymlink()) {
          setSymlink(other.getSymlink());
        }
        if (other.hasBlockReplication()) {
          setBlockReplication(other.getBlockReplication());
        }
        if (other.hasBlocksize()) {
          setBlocksize(other.getBlocksize());
        }
        if (other.hasLocations()) {
          mergeLocations(other.getLocations());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasFileType()) {
          
          return false;
        }
        if (!hasPath()) {
          
          return false;
        }
        if (!hasLength()) {
          
          return false;
        }
        if (!hasPermission()) {
          
          return false;
        }
        if (!hasOwner()) {
          
          return false;
        }
        if (!hasGroup()) {
          
          return false;
        }
        if (!hasModificationTime()) {
          
          return false;
        }
        if (!hasAccessTime()) {
          
          return false;
        }
        if (!getPermission().isInitialized()) {
          
          return false;
        }
        if (hasLocations()) {
          if (!getLocations().isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              int rawValue = input.readEnum();
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType value = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(1, rawValue);
              } else {
                bitField0_ |= 0x00000001;
                fileType_ = value;
              }
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              path_ = input.readBytes();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              length_ = input.readUInt64();
              break;
            }
            case 34: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.newBuilder();
              if (hasPermission()) {
                subBuilder.mergeFrom(getPermission());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setPermission(subBuilder.buildPartial());
              break;
            }
            case 42: {
              bitField0_ |= 0x00000010;
              owner_ = input.readBytes();
              break;
            }
            case 50: {
              bitField0_ |= 0x00000020;
              group_ = input.readBytes();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              modificationTime_ = input.readUInt64();
              break;
            }
            case 64: {
              bitField0_ |= 0x00000080;
              accessTime_ = input.readUInt64();
              break;
            }
            case 74: {
              bitField0_ |= 0x00000100;
              symlink_ = input.readBytes();
              break;
            }
            case 80: {
              bitField0_ |= 0x00000200;
              blockReplication_ = input.readUInt32();
              break;
            }
            case 88: {
              bitField0_ |= 0x00000400;
              blocksize_ = input.readUInt64();
              break;
            }
            case 98: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.newBuilder();
              if (hasLocations()) {
                subBuilder.mergeFrom(getLocations());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setLocations(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .HdfsFileStatusProto.FileType fileType = 1;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType fileType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType.IS_DIR;
      public boolean hasFileType() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType getFileType() {
        return fileType_;
      }
      public Builder setFileType(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        fileType_ = value;
        onChanged();
        return this;
      }
      public Builder clearFileType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        fileType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType.IS_DIR;
        onChanged();
        return this;
      }
      
      // required bytes path = 2;
      private com.google.protobuf.ByteString path_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasPath() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public com.google.protobuf.ByteString getPath() {
        return path_;
      }
      public Builder setPath(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        path_ = value;
        onChanged();
        return this;
      }
      public Builder clearPath() {
        bitField0_ = (bitField0_ & ~0x00000002);
        path_ = getDefaultInstance().getPath();
        onChanged();
        return this;
      }
      
      // required uint64 length = 3;
      private long length_ ;
      public boolean hasLength() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getLength() {
        return length_;
      }
      public Builder setLength(long value) {
        bitField0_ |= 0x00000004;
        length_ = value;
        onChanged();
        return this;
      }
      public Builder clearLength() {
        bitField0_ = (bitField0_ & ~0x00000004);
        length_ = 0L;
        onChanged();
        return this;
      }
      
      // required .FsPermissionProto permission = 4;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto permission_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder> permissionBuilder_;
      public boolean hasPermission() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto getPermission() {
        if (permissionBuilder_ == null) {
          return permission_;
        } else {
          return permissionBuilder_.getMessage();
        }
      }
      public Builder setPermission(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto value) {
        if (permissionBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          permission_ = value;
          onChanged();
        } else {
          permissionBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder setPermission(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder builderForValue) {
        if (permissionBuilder_ == null) {
          permission_ = builderForValue.build();
          onChanged();
        } else {
          permissionBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder mergePermission(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto value) {
        if (permissionBuilder_ == null) {
          if (((bitField0_ & 0x00000008) == 0x00000008) &&
              permission_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance()) {
            permission_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.newBuilder(permission_).mergeFrom(value).buildPartial();
          } else {
            permission_ = value;
          }
          onChanged();
        } else {
          permissionBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder clearPermission() {
        if (permissionBuilder_ == null) {
          permission_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.getDefaultInstance();
          onChanged();
        } else {
          permissionBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder getPermissionBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getPermissionFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder getPermissionOrBuilder() {
        if (permissionBuilder_ != null) {
          return permissionBuilder_.getMessageOrBuilder();
        } else {
          return permission_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder> 
          getPermissionFieldBuilder() {
        if (permissionBuilder_ == null) {
          permissionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProtoOrBuilder>(
                  permission_,
                  getParentForChildren(),
                  isClean());
          permission_ = null;
        }
        return permissionBuilder_;
      }
      
      // required string owner = 5;
      private java.lang.Object owner_ = "";
      public boolean hasOwner() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public String getOwner() {
        java.lang.Object ref = owner_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          owner_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setOwner(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        owner_ = value;
        onChanged();
        return this;
      }
      public Builder clearOwner() {
        bitField0_ = (bitField0_ & ~0x00000010);
        owner_ = getDefaultInstance().getOwner();
        onChanged();
        return this;
      }
      void setOwner(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000010;
        owner_ = value;
        onChanged();
      }
      
      // required string group = 6;
      private java.lang.Object group_ = "";
      public boolean hasGroup() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      public String getGroup() {
        java.lang.Object ref = group_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          group_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setGroup(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000020;
        group_ = value;
        onChanged();
        return this;
      }
      public Builder clearGroup() {
        bitField0_ = (bitField0_ & ~0x00000020);
        group_ = getDefaultInstance().getGroup();
        onChanged();
        return this;
      }
      void setGroup(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000020;
        group_ = value;
        onChanged();
      }
      
      // required uint64 modification_time = 7;
      private long modificationTime_ ;
      public boolean hasModificationTime() {
        return ((bitField0_ & 0x00000040) == 0x00000040);
      }
      public long getModificationTime() {
        return modificationTime_;
      }
      public Builder setModificationTime(long value) {
        bitField0_ |= 0x00000040;
        modificationTime_ = value;
        onChanged();
        return this;
      }
      public Builder clearModificationTime() {
        bitField0_ = (bitField0_ & ~0x00000040);
        modificationTime_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 access_time = 8;
      private long accessTime_ ;
      public boolean hasAccessTime() {
        return ((bitField0_ & 0x00000080) == 0x00000080);
      }
      public long getAccessTime() {
        return accessTime_;
      }
      public Builder setAccessTime(long value) {
        bitField0_ |= 0x00000080;
        accessTime_ = value;
        onChanged();
        return this;
      }
      public Builder clearAccessTime() {
        bitField0_ = (bitField0_ & ~0x00000080);
        accessTime_ = 0L;
        onChanged();
        return this;
      }
      
      // optional bytes symlink = 9;
      private com.google.protobuf.ByteString symlink_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasSymlink() {
        return ((bitField0_ & 0x00000100) == 0x00000100);
      }
      public com.google.protobuf.ByteString getSymlink() {
        return symlink_;
      }
      public Builder setSymlink(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000100;
        symlink_ = value;
        onChanged();
        return this;
      }
      public Builder clearSymlink() {
        bitField0_ = (bitField0_ & ~0x00000100);
        symlink_ = getDefaultInstance().getSymlink();
        onChanged();
        return this;
      }
      
      // optional uint32 block_replication = 10 [default = 0];
      private int blockReplication_ ;
      public boolean hasBlockReplication() {
        return ((bitField0_ & 0x00000200) == 0x00000200);
      }
      public int getBlockReplication() {
        return blockReplication_;
      }
      public Builder setBlockReplication(int value) {
        bitField0_ |= 0x00000200;
        blockReplication_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockReplication() {
        bitField0_ = (bitField0_ & ~0x00000200);
        blockReplication_ = 0;
        onChanged();
        return this;
      }
      
      // optional uint64 blocksize = 11 [default = 0];
      private long blocksize_ ;
      public boolean hasBlocksize() {
        return ((bitField0_ & 0x00000400) == 0x00000400);
      }
      public long getBlocksize() {
        return blocksize_;
      }
      public Builder setBlocksize(long value) {
        bitField0_ |= 0x00000400;
        blocksize_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlocksize() {
        bitField0_ = (bitField0_ & ~0x00000400);
        blocksize_ = 0L;
        onChanged();
        return this;
      }
      
      // optional .LocatedBlocksProto locations = 12;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto locations_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder> locationsBuilder_;
      public boolean hasLocations() {
        return ((bitField0_ & 0x00000800) == 0x00000800);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto getLocations() {
        if (locationsBuilder_ == null) {
          return locations_;
        } else {
          return locationsBuilder_.getMessage();
        }
      }
      public Builder setLocations(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto value) {
        if (locationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          locations_ = value;
          onChanged();
        } else {
          locationsBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000800;
        return this;
      }
      public Builder setLocations(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder builderForValue) {
        if (locationsBuilder_ == null) {
          locations_ = builderForValue.build();
          onChanged();
        } else {
          locationsBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000800;
        return this;
      }
      public Builder mergeLocations(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto value) {
        if (locationsBuilder_ == null) {
          if (((bitField0_ & 0x00000800) == 0x00000800) &&
              locations_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance()) {
            locations_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.newBuilder(locations_).mergeFrom(value).buildPartial();
          } else {
            locations_ = value;
          }
          onChanged();
        } else {
          locationsBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000800;
        return this;
      }
      public Builder clearLocations() {
        if (locationsBuilder_ == null) {
          locations_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.getDefaultInstance();
          onChanged();
        } else {
          locationsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000800);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder getLocationsBuilder() {
        bitField0_ |= 0x00000800;
        onChanged();
        return getLocationsFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder getLocationsOrBuilder() {
        if (locationsBuilder_ != null) {
          return locationsBuilder_.getMessageOrBuilder();
        } else {
          return locations_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder> 
          getLocationsFieldBuilder() {
        if (locationsBuilder_ == null) {
          locationsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProtoOrBuilder>(
                  locations_,
                  getParentForChildren(),
                  isClean());
          locations_ = null;
        }
        return locationsBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:HdfsFileStatusProto)
    }
    
    static {
      defaultInstance = new HdfsFileStatusProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:HdfsFileStatusProto)
  }
  
  public interface FsServerDefaultsProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint64 blockSize = 1;
    boolean hasBlockSize();
    long getBlockSize();
    
    // required uint32 bytesPerChecksum = 2;
    boolean hasBytesPerChecksum();
    int getBytesPerChecksum();
    
    // required uint32 writePacketSize = 3;
    boolean hasWritePacketSize();
    int getWritePacketSize();
    
    // required uint32 replication = 4;
    boolean hasReplication();
    int getReplication();
    
    // required uint32 fileBufferSize = 5;
    boolean hasFileBufferSize();
    int getFileBufferSize();
    
    // optional bool encryptDataTransfer = 6 [default = false];
    boolean hasEncryptDataTransfer();
    boolean getEncryptDataTransfer();
    
    // optional uint64 trashInterval = 7 [default = 0];
    boolean hasTrashInterval();
    long getTrashInterval();
    
    // optional .ChecksumTypeProto checksumType = 8 [default = CHECKSUM_CRC32];
    boolean hasChecksumType();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto getChecksumType();
  }
  public static final class FsServerDefaultsProto extends
      com.google.protobuf.GeneratedMessage
      implements FsServerDefaultsProtoOrBuilder {
    // Use FsServerDefaultsProto.newBuilder() to construct.
    private FsServerDefaultsProto(Builder builder) {
      super(builder);
    }
    private FsServerDefaultsProto(boolean noInit) {}
    
    private static final FsServerDefaultsProto defaultInstance;
    public static FsServerDefaultsProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public FsServerDefaultsProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsServerDefaultsProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsServerDefaultsProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint64 blockSize = 1;
    public static final int BLOCKSIZE_FIELD_NUMBER = 1;
    private long blockSize_;
    public boolean hasBlockSize() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public long getBlockSize() {
      return blockSize_;
    }
    
    // required uint32 bytesPerChecksum = 2;
    public static final int BYTESPERCHECKSUM_FIELD_NUMBER = 2;
    private int bytesPerChecksum_;
    public boolean hasBytesPerChecksum() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public int getBytesPerChecksum() {
      return bytesPerChecksum_;
    }
    
    // required uint32 writePacketSize = 3;
    public static final int WRITEPACKETSIZE_FIELD_NUMBER = 3;
    private int writePacketSize_;
    public boolean hasWritePacketSize() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public int getWritePacketSize() {
      return writePacketSize_;
    }
    
    // required uint32 replication = 4;
    public static final int REPLICATION_FIELD_NUMBER = 4;
    private int replication_;
    public boolean hasReplication() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public int getReplication() {
      return replication_;
    }
    
    // required uint32 fileBufferSize = 5;
    public static final int FILEBUFFERSIZE_FIELD_NUMBER = 5;
    private int fileBufferSize_;
    public boolean hasFileBufferSize() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public int getFileBufferSize() {
      return fileBufferSize_;
    }
    
    // optional bool encryptDataTransfer = 6 [default = false];
    public static final int ENCRYPTDATATRANSFER_FIELD_NUMBER = 6;
    private boolean encryptDataTransfer_;
    public boolean hasEncryptDataTransfer() {
      return ((bitField0_ & 0x00000020) == 0x00000020);
    }
    public boolean getEncryptDataTransfer() {
      return encryptDataTransfer_;
    }
    
    // optional uint64 trashInterval = 7 [default = 0];
    public static final int TRASHINTERVAL_FIELD_NUMBER = 7;
    private long trashInterval_;
    public boolean hasTrashInterval() {
      return ((bitField0_ & 0x00000040) == 0x00000040);
    }
    public long getTrashInterval() {
      return trashInterval_;
    }
    
    // optional .ChecksumTypeProto checksumType = 8 [default = CHECKSUM_CRC32];
    public static final int CHECKSUMTYPE_FIELD_NUMBER = 8;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto checksumType_;
    public boolean hasChecksumType() {
      return ((bitField0_ & 0x00000080) == 0x00000080);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto getChecksumType() {
      return checksumType_;
    }
    
    private void initFields() {
      blockSize_ = 0L;
      bytesPerChecksum_ = 0;
      writePacketSize_ = 0;
      replication_ = 0;
      fileBufferSize_ = 0;
      encryptDataTransfer_ = false;
      trashInterval_ = 0L;
      checksumType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasBlockSize()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBytesPerChecksum()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasWritePacketSize()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasReplication()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasFileBufferSize()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt64(1, blockSize_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt32(2, bytesPerChecksum_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt32(3, writePacketSize_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeUInt32(4, replication_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeUInt32(5, fileBufferSize_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        output.writeBool(6, encryptDataTransfer_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        output.writeUInt64(7, trashInterval_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        output.writeEnum(8, checksumType_.getNumber());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(1, blockSize_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(2, bytesPerChecksum_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(3, writePacketSize_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(4, replication_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(5, fileBufferSize_);
      }
      if (((bitField0_ & 0x00000020) == 0x00000020)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(6, encryptDataTransfer_);
      }
      if (((bitField0_ & 0x00000040) == 0x00000040)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(7, trashInterval_);
      }
      if (((bitField0_ & 0x00000080) == 0x00000080)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(8, checksumType_.getNumber());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto) obj;
      
      boolean result = true;
      result = result && (hasBlockSize() == other.hasBlockSize());
      if (hasBlockSize()) {
        result = result && (getBlockSize()
            == other.getBlockSize());
      }
      result = result && (hasBytesPerChecksum() == other.hasBytesPerChecksum());
      if (hasBytesPerChecksum()) {
        result = result && (getBytesPerChecksum()
            == other.getBytesPerChecksum());
      }
      result = result && (hasWritePacketSize() == other.hasWritePacketSize());
      if (hasWritePacketSize()) {
        result = result && (getWritePacketSize()
            == other.getWritePacketSize());
      }
      result = result && (hasReplication() == other.hasReplication());
      if (hasReplication()) {
        result = result && (getReplication()
            == other.getReplication());
      }
      result = result && (hasFileBufferSize() == other.hasFileBufferSize());
      if (hasFileBufferSize()) {
        result = result && (getFileBufferSize()
            == other.getFileBufferSize());
      }
      result = result && (hasEncryptDataTransfer() == other.hasEncryptDataTransfer());
      if (hasEncryptDataTransfer()) {
        result = result && (getEncryptDataTransfer()
            == other.getEncryptDataTransfer());
      }
      result = result && (hasTrashInterval() == other.hasTrashInterval());
      if (hasTrashInterval()) {
        result = result && (getTrashInterval()
            == other.getTrashInterval());
      }
      result = result && (hasChecksumType() == other.hasChecksumType());
      if (hasChecksumType()) {
        result = result &&
            (getChecksumType() == other.getChecksumType());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasBlockSize()) {
        hash = (37 * hash) + BLOCKSIZE_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getBlockSize());
      }
      if (hasBytesPerChecksum()) {
        hash = (37 * hash) + BYTESPERCHECKSUM_FIELD_NUMBER;
        hash = (53 * hash) + getBytesPerChecksum();
      }
      if (hasWritePacketSize()) {
        hash = (37 * hash) + WRITEPACKETSIZE_FIELD_NUMBER;
        hash = (53 * hash) + getWritePacketSize();
      }
      if (hasReplication()) {
        hash = (37 * hash) + REPLICATION_FIELD_NUMBER;
        hash = (53 * hash) + getReplication();
      }
      if (hasFileBufferSize()) {
        hash = (37 * hash) + FILEBUFFERSIZE_FIELD_NUMBER;
        hash = (53 * hash) + getFileBufferSize();
      }
      if (hasEncryptDataTransfer()) {
        hash = (37 * hash) + ENCRYPTDATATRANSFER_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getEncryptDataTransfer());
      }
      if (hasTrashInterval()) {
        hash = (37 * hash) + TRASHINTERVAL_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getTrashInterval());
      }
      if (hasChecksumType()) {
        hash = (37 * hash) + CHECKSUMTYPE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getChecksumType());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsServerDefaultsProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_FsServerDefaultsProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        blockSize_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000001);
        bytesPerChecksum_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        writePacketSize_ = 0;
        bitField0_ = (bitField0_ & ~0x00000004);
        replication_ = 0;
        bitField0_ = (bitField0_ & ~0x00000008);
        fileBufferSize_ = 0;
        bitField0_ = (bitField0_ & ~0x00000010);
        encryptDataTransfer_ = false;
        bitField0_ = (bitField0_ & ~0x00000020);
        trashInterval_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000040);
        checksumType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32;
        bitField0_ = (bitField0_ & ~0x00000080);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.blockSize_ = blockSize_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.bytesPerChecksum_ = bytesPerChecksum_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.writePacketSize_ = writePacketSize_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.replication_ = replication_;
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.fileBufferSize_ = fileBufferSize_;
        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
          to_bitField0_ |= 0x00000020;
        }
        result.encryptDataTransfer_ = encryptDataTransfer_;
        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
          to_bitField0_ |= 0x00000040;
        }
        result.trashInterval_ = trashInterval_;
        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
          to_bitField0_ |= 0x00000080;
        }
        result.checksumType_ = checksumType_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto.getDefaultInstance()) return this;
        if (other.hasBlockSize()) {
          setBlockSize(other.getBlockSize());
        }
        if (other.hasBytesPerChecksum()) {
          setBytesPerChecksum(other.getBytesPerChecksum());
        }
        if (other.hasWritePacketSize()) {
          setWritePacketSize(other.getWritePacketSize());
        }
        if (other.hasReplication()) {
          setReplication(other.getReplication());
        }
        if (other.hasFileBufferSize()) {
          setFileBufferSize(other.getFileBufferSize());
        }
        if (other.hasEncryptDataTransfer()) {
          setEncryptDataTransfer(other.getEncryptDataTransfer());
        }
        if (other.hasTrashInterval()) {
          setTrashInterval(other.getTrashInterval());
        }
        if (other.hasChecksumType()) {
          setChecksumType(other.getChecksumType());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasBlockSize()) {
          
          return false;
        }
        if (!hasBytesPerChecksum()) {
          
          return false;
        }
        if (!hasWritePacketSize()) {
          
          return false;
        }
        if (!hasReplication()) {
          
          return false;
        }
        if (!hasFileBufferSize()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              blockSize_ = input.readUInt64();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              bytesPerChecksum_ = input.readUInt32();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              writePacketSize_ = input.readUInt32();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              replication_ = input.readUInt32();
              break;
            }
            case 40: {
              bitField0_ |= 0x00000010;
              fileBufferSize_ = input.readUInt32();
              break;
            }
            case 48: {
              bitField0_ |= 0x00000020;
              encryptDataTransfer_ = input.readBool();
              break;
            }
            case 56: {
              bitField0_ |= 0x00000040;
              trashInterval_ = input.readUInt64();
              break;
            }
            case 64: {
              int rawValue = input.readEnum();
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto value = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(8, rawValue);
              } else {
                bitField0_ |= 0x00000080;
                checksumType_ = value;
              }
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint64 blockSize = 1;
      private long blockSize_ ;
      public boolean hasBlockSize() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public long getBlockSize() {
        return blockSize_;
      }
      public Builder setBlockSize(long value) {
        bitField0_ |= 0x00000001;
        blockSize_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockSize() {
        bitField0_ = (bitField0_ & ~0x00000001);
        blockSize_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint32 bytesPerChecksum = 2;
      private int bytesPerChecksum_ ;
      public boolean hasBytesPerChecksum() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public int getBytesPerChecksum() {
        return bytesPerChecksum_;
      }
      public Builder setBytesPerChecksum(int value) {
        bitField0_ |= 0x00000002;
        bytesPerChecksum_ = value;
        onChanged();
        return this;
      }
      public Builder clearBytesPerChecksum() {
        bitField0_ = (bitField0_ & ~0x00000002);
        bytesPerChecksum_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 writePacketSize = 3;
      private int writePacketSize_ ;
      public boolean hasWritePacketSize() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public int getWritePacketSize() {
        return writePacketSize_;
      }
      public Builder setWritePacketSize(int value) {
        bitField0_ |= 0x00000004;
        writePacketSize_ = value;
        onChanged();
        return this;
      }
      public Builder clearWritePacketSize() {
        bitField0_ = (bitField0_ & ~0x00000004);
        writePacketSize_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 replication = 4;
      private int replication_ ;
      public boolean hasReplication() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public int getReplication() {
        return replication_;
      }
      public Builder setReplication(int value) {
        bitField0_ |= 0x00000008;
        replication_ = value;
        onChanged();
        return this;
      }
      public Builder clearReplication() {
        bitField0_ = (bitField0_ & ~0x00000008);
        replication_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 fileBufferSize = 5;
      private int fileBufferSize_ ;
      public boolean hasFileBufferSize() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public int getFileBufferSize() {
        return fileBufferSize_;
      }
      public Builder setFileBufferSize(int value) {
        bitField0_ |= 0x00000010;
        fileBufferSize_ = value;
        onChanged();
        return this;
      }
      public Builder clearFileBufferSize() {
        bitField0_ = (bitField0_ & ~0x00000010);
        fileBufferSize_ = 0;
        onChanged();
        return this;
      }
      
      // optional bool encryptDataTransfer = 6 [default = false];
      private boolean encryptDataTransfer_ ;
      public boolean hasEncryptDataTransfer() {
        return ((bitField0_ & 0x00000020) == 0x00000020);
      }
      public boolean getEncryptDataTransfer() {
        return encryptDataTransfer_;
      }
      public Builder setEncryptDataTransfer(boolean value) {
        bitField0_ |= 0x00000020;
        encryptDataTransfer_ = value;
        onChanged();
        return this;
      }
      public Builder clearEncryptDataTransfer() {
        bitField0_ = (bitField0_ & ~0x00000020);
        encryptDataTransfer_ = false;
        onChanged();
        return this;
      }
      
      // optional uint64 trashInterval = 7 [default = 0];
      private long trashInterval_ ;
      public boolean hasTrashInterval() {
        return ((bitField0_ & 0x00000040) == 0x00000040);
      }
      public long getTrashInterval() {
        return trashInterval_;
      }
      public Builder setTrashInterval(long value) {
        bitField0_ |= 0x00000040;
        trashInterval_ = value;
        onChanged();
        return this;
      }
      public Builder clearTrashInterval() {
        bitField0_ = (bitField0_ & ~0x00000040);
        trashInterval_ = 0L;
        onChanged();
        return this;
      }
      
      // optional .ChecksumTypeProto checksumType = 8 [default = CHECKSUM_CRC32];
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto checksumType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32;
      public boolean hasChecksumType() {
        return ((bitField0_ & 0x00000080) == 0x00000080);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto getChecksumType() {
        return checksumType_;
      }
      public Builder setChecksumType(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000080;
        checksumType_ = value;
        onChanged();
        return this;
      }
      public Builder clearChecksumType() {
        bitField0_ = (bitField0_ & ~0x00000080);
        checksumType_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:FsServerDefaultsProto)
    }
    
    static {
      defaultInstance = new FsServerDefaultsProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:FsServerDefaultsProto)
  }
  
  public interface DirectoryListingProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated .HdfsFileStatusProto partialListing = 1;
    java.util.List 
        getPartialListingList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto getPartialListing(int index);
    int getPartialListingCount();
    java.util.List 
        getPartialListingOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder getPartialListingOrBuilder(
        int index);
    
    // required uint32 remainingEntries = 2;
    boolean hasRemainingEntries();
    int getRemainingEntries();
  }
  public static final class DirectoryListingProto extends
      com.google.protobuf.GeneratedMessage
      implements DirectoryListingProtoOrBuilder {
    // Use DirectoryListingProto.newBuilder() to construct.
    private DirectoryListingProto(Builder builder) {
      super(builder);
    }
    private DirectoryListingProto(boolean noInit) {}
    
    private static final DirectoryListingProto defaultInstance;
    public static DirectoryListingProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public DirectoryListingProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DirectoryListingProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DirectoryListingProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // repeated .HdfsFileStatusProto partialListing = 1;
    public static final int PARTIALLISTING_FIELD_NUMBER = 1;
    private java.util.List partialListing_;
    public java.util.List getPartialListingList() {
      return partialListing_;
    }
    public java.util.List 
        getPartialListingOrBuilderList() {
      return partialListing_;
    }
    public int getPartialListingCount() {
      return partialListing_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto getPartialListing(int index) {
      return partialListing_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder getPartialListingOrBuilder(
        int index) {
      return partialListing_.get(index);
    }
    
    // required uint32 remainingEntries = 2;
    public static final int REMAININGENTRIES_FIELD_NUMBER = 2;
    private int remainingEntries_;
    public boolean hasRemainingEntries() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getRemainingEntries() {
      return remainingEntries_;
    }
    
    private void initFields() {
      partialListing_ = java.util.Collections.emptyList();
      remainingEntries_ = 0;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasRemainingEntries()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getPartialListingCount(); i++) {
        if (!getPartialListing(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < partialListing_.size(); i++) {
        output.writeMessage(1, partialListing_.get(i));
      }
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(2, remainingEntries_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      for (int i = 0; i < partialListing_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, partialListing_.get(i));
      }
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(2, remainingEntries_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto) obj;
      
      boolean result = true;
      result = result && getPartialListingList()
          .equals(other.getPartialListingList());
      result = result && (hasRemainingEntries() == other.hasRemainingEntries());
      if (hasRemainingEntries()) {
        result = result && (getRemainingEntries()
            == other.getRemainingEntries());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getPartialListingCount() > 0) {
        hash = (37 * hash) + PARTIALLISTING_FIELD_NUMBER;
        hash = (53 * hash) + getPartialListingList().hashCode();
      }
      if (hasRemainingEntries()) {
        hash = (37 * hash) + REMAININGENTRIES_FIELD_NUMBER;
        hash = (53 * hash) + getRemainingEntries();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DirectoryListingProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DirectoryListingProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getPartialListingFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (partialListingBuilder_ == null) {
          partialListing_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          partialListingBuilder_.clear();
        }
        remainingEntries_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (partialListingBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            partialListing_ = java.util.Collections.unmodifiableList(partialListing_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.partialListing_ = partialListing_;
        } else {
          result.partialListing_ = partialListingBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000001;
        }
        result.remainingEntries_ = remainingEntries_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto.getDefaultInstance()) return this;
        if (partialListingBuilder_ == null) {
          if (!other.partialListing_.isEmpty()) {
            if (partialListing_.isEmpty()) {
              partialListing_ = other.partialListing_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensurePartialListingIsMutable();
              partialListing_.addAll(other.partialListing_);
            }
            onChanged();
          }
        } else {
          if (!other.partialListing_.isEmpty()) {
            if (partialListingBuilder_.isEmpty()) {
              partialListingBuilder_.dispose();
              partialListingBuilder_ = null;
              partialListing_ = other.partialListing_;
              bitField0_ = (bitField0_ & ~0x00000001);
              partialListingBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getPartialListingFieldBuilder() : null;
            } else {
              partialListingBuilder_.addAllMessages(other.partialListing_);
            }
          }
        }
        if (other.hasRemainingEntries()) {
          setRemainingEntries(other.getRemainingEntries());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasRemainingEntries()) {
          
          return false;
        }
        for (int i = 0; i < getPartialListingCount(); i++) {
          if (!getPartialListing(i).isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addPartialListing(subBuilder.buildPartial());
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              remainingEntries_ = input.readUInt32();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // repeated .HdfsFileStatusProto partialListing = 1;
      private java.util.List partialListing_ =
        java.util.Collections.emptyList();
      private void ensurePartialListingIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          partialListing_ = new java.util.ArrayList(partialListing_);
          bitField0_ |= 0x00000001;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder> partialListingBuilder_;
      
      public java.util.List getPartialListingList() {
        if (partialListingBuilder_ == null) {
          return java.util.Collections.unmodifiableList(partialListing_);
        } else {
          return partialListingBuilder_.getMessageList();
        }
      }
      public int getPartialListingCount() {
        if (partialListingBuilder_ == null) {
          return partialListing_.size();
        } else {
          return partialListingBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto getPartialListing(int index) {
        if (partialListingBuilder_ == null) {
          return partialListing_.get(index);
        } else {
          return partialListingBuilder_.getMessage(index);
        }
      }
      public Builder setPartialListing(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto value) {
        if (partialListingBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePartialListingIsMutable();
          partialListing_.set(index, value);
          onChanged();
        } else {
          partialListingBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setPartialListing(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder builderForValue) {
        if (partialListingBuilder_ == null) {
          ensurePartialListingIsMutable();
          partialListing_.set(index, builderForValue.build());
          onChanged();
        } else {
          partialListingBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addPartialListing(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto value) {
        if (partialListingBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePartialListingIsMutable();
          partialListing_.add(value);
          onChanged();
        } else {
          partialListingBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addPartialListing(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto value) {
        if (partialListingBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePartialListingIsMutable();
          partialListing_.add(index, value);
          onChanged();
        } else {
          partialListingBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addPartialListing(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder builderForValue) {
        if (partialListingBuilder_ == null) {
          ensurePartialListingIsMutable();
          partialListing_.add(builderForValue.build());
          onChanged();
        } else {
          partialListingBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addPartialListing(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder builderForValue) {
        if (partialListingBuilder_ == null) {
          ensurePartialListingIsMutable();
          partialListing_.add(index, builderForValue.build());
          onChanged();
        } else {
          partialListingBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllPartialListing(
          java.lang.Iterable values) {
        if (partialListingBuilder_ == null) {
          ensurePartialListingIsMutable();
          super.addAll(values, partialListing_);
          onChanged();
        } else {
          partialListingBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearPartialListing() {
        if (partialListingBuilder_ == null) {
          partialListing_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          partialListingBuilder_.clear();
        }
        return this;
      }
      public Builder removePartialListing(int index) {
        if (partialListingBuilder_ == null) {
          ensurePartialListingIsMutable();
          partialListing_.remove(index);
          onChanged();
        } else {
          partialListingBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder getPartialListingBuilder(
          int index) {
        return getPartialListingFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder getPartialListingOrBuilder(
          int index) {
        if (partialListingBuilder_ == null) {
          return partialListing_.get(index);  } else {
          return partialListingBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getPartialListingOrBuilderList() {
        if (partialListingBuilder_ != null) {
          return partialListingBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(partialListing_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder addPartialListingBuilder() {
        return getPartialListingFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder addPartialListingBuilder(
          int index) {
        return getPartialListingFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.getDefaultInstance());
      }
      public java.util.List 
           getPartialListingBuilderList() {
        return getPartialListingFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder> 
          getPartialListingFieldBuilder() {
        if (partialListingBuilder_ == null) {
          partialListingBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProtoOrBuilder>(
                  partialListing_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          partialListing_ = null;
        }
        return partialListingBuilder_;
      }
      
      // required uint32 remainingEntries = 2;
      private int remainingEntries_ ;
      public boolean hasRemainingEntries() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public int getRemainingEntries() {
        return remainingEntries_;
      }
      public Builder setRemainingEntries(int value) {
        bitField0_ |= 0x00000002;
        remainingEntries_ = value;
        onChanged();
        return this;
      }
      public Builder clearRemainingEntries() {
        bitField0_ = (bitField0_ & ~0x00000002);
        remainingEntries_ = 0;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:DirectoryListingProto)
    }
    
    static {
      defaultInstance = new DirectoryListingProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:DirectoryListingProto)
  }
  
  public interface UpgradeStatusReportProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint32 version = 1;
    boolean hasVersion();
    int getVersion();
    
    // required uint32 upgradeStatus = 2;
    boolean hasUpgradeStatus();
    int getUpgradeStatus();
    
    // required bool finalized = 3;
    boolean hasFinalized();
    boolean getFinalized();
  }
  public static final class UpgradeStatusReportProto extends
      com.google.protobuf.GeneratedMessage
      implements UpgradeStatusReportProtoOrBuilder {
    // Use UpgradeStatusReportProto.newBuilder() to construct.
    private UpgradeStatusReportProto(Builder builder) {
      super(builder);
    }
    private UpgradeStatusReportProto(boolean noInit) {}
    
    private static final UpgradeStatusReportProto defaultInstance;
    public static UpgradeStatusReportProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public UpgradeStatusReportProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_UpgradeStatusReportProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_UpgradeStatusReportProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint32 version = 1;
    public static final int VERSION_FIELD_NUMBER = 1;
    private int version_;
    public boolean hasVersion() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getVersion() {
      return version_;
    }
    
    // required uint32 upgradeStatus = 2;
    public static final int UPGRADESTATUS_FIELD_NUMBER = 2;
    private int upgradeStatus_;
    public boolean hasUpgradeStatus() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public int getUpgradeStatus() {
      return upgradeStatus_;
    }
    
    // required bool finalized = 3;
    public static final int FINALIZED_FIELD_NUMBER = 3;
    private boolean finalized_;
    public boolean hasFinalized() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public boolean getFinalized() {
      return finalized_;
    }
    
    private void initFields() {
      version_ = 0;
      upgradeStatus_ = 0;
      finalized_ = false;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasUpgradeStatus()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasFinalized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(1, version_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt32(2, upgradeStatus_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBool(3, finalized_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(1, version_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(2, upgradeStatus_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(3, finalized_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto) obj;
      
      boolean result = true;
      result = result && (hasVersion() == other.hasVersion());
      if (hasVersion()) {
        result = result && (getVersion()
            == other.getVersion());
      }
      result = result && (hasUpgradeStatus() == other.hasUpgradeStatus());
      if (hasUpgradeStatus()) {
        result = result && (getUpgradeStatus()
            == other.getUpgradeStatus());
      }
      result = result && (hasFinalized() == other.hasFinalized());
      if (hasFinalized()) {
        result = result && (getFinalized()
            == other.getFinalized());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasVersion()) {
        hash = (37 * hash) + VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getVersion();
      }
      if (hasUpgradeStatus()) {
        hash = (37 * hash) + UPGRADESTATUS_FIELD_NUMBER;
        hash = (53 * hash) + getUpgradeStatus();
      }
      if (hasFinalized()) {
        hash = (37 * hash) + FINALIZED_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getFinalized());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_UpgradeStatusReportProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_UpgradeStatusReportProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        version_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        upgradeStatus_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        finalized_ = false;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.version_ = version_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.upgradeStatus_ = upgradeStatus_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.finalized_ = finalized_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto.getDefaultInstance()) return this;
        if (other.hasVersion()) {
          setVersion(other.getVersion());
        }
        if (other.hasUpgradeStatus()) {
          setUpgradeStatus(other.getUpgradeStatus());
        }
        if (other.hasFinalized()) {
          setFinalized(other.getFinalized());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasVersion()) {
          
          return false;
        }
        if (!hasUpgradeStatus()) {
          
          return false;
        }
        if (!hasFinalized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              version_ = input.readUInt32();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              upgradeStatus_ = input.readUInt32();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              finalized_ = input.readBool();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint32 version = 1;
      private int version_ ;
      public boolean hasVersion() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public int getVersion() {
        return version_;
      }
      public Builder setVersion(int value) {
        bitField0_ |= 0x00000001;
        version_ = value;
        onChanged();
        return this;
      }
      public Builder clearVersion() {
        bitField0_ = (bitField0_ & ~0x00000001);
        version_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 upgradeStatus = 2;
      private int upgradeStatus_ ;
      public boolean hasUpgradeStatus() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public int getUpgradeStatus() {
        return upgradeStatus_;
      }
      public Builder setUpgradeStatus(int value) {
        bitField0_ |= 0x00000002;
        upgradeStatus_ = value;
        onChanged();
        return this;
      }
      public Builder clearUpgradeStatus() {
        bitField0_ = (bitField0_ & ~0x00000002);
        upgradeStatus_ = 0;
        onChanged();
        return this;
      }
      
      // required bool finalized = 3;
      private boolean finalized_ ;
      public boolean hasFinalized() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public boolean getFinalized() {
        return finalized_;
      }
      public Builder setFinalized(boolean value) {
        bitField0_ |= 0x00000004;
        finalized_ = value;
        onChanged();
        return this;
      }
      public Builder clearFinalized() {
        bitField0_ = (bitField0_ & ~0x00000004);
        finalized_ = false;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:UpgradeStatusReportProto)
    }
    
    static {
      defaultInstance = new UpgradeStatusReportProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:UpgradeStatusReportProto)
  }
  
  public interface StorageInfoProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint32 layoutVersion = 1;
    boolean hasLayoutVersion();
    int getLayoutVersion();
    
    // required uint32 namespceID = 2;
    boolean hasNamespceID();
    int getNamespceID();
    
    // required string clusterID = 3;
    boolean hasClusterID();
    String getClusterID();
    
    // required uint64 cTime = 4;
    boolean hasCTime();
    long getCTime();
  }
  public static final class StorageInfoProto extends
      com.google.protobuf.GeneratedMessage
      implements StorageInfoProtoOrBuilder {
    // Use StorageInfoProto.newBuilder() to construct.
    private StorageInfoProto(Builder builder) {
      super(builder);
    }
    private StorageInfoProto(boolean noInit) {}
    
    private static final StorageInfoProto defaultInstance;
    public static StorageInfoProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public StorageInfoProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_StorageInfoProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_StorageInfoProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint32 layoutVersion = 1;
    public static final int LAYOUTVERSION_FIELD_NUMBER = 1;
    private int layoutVersion_;
    public boolean hasLayoutVersion() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getLayoutVersion() {
      return layoutVersion_;
    }
    
    // required uint32 namespceID = 2;
    public static final int NAMESPCEID_FIELD_NUMBER = 2;
    private int namespceID_;
    public boolean hasNamespceID() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public int getNamespceID() {
      return namespceID_;
    }
    
    // required string clusterID = 3;
    public static final int CLUSTERID_FIELD_NUMBER = 3;
    private java.lang.Object clusterID_;
    public boolean hasClusterID() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public String getClusterID() {
      java.lang.Object ref = clusterID_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          clusterID_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getClusterIDBytes() {
      java.lang.Object ref = clusterID_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        clusterID_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required uint64 cTime = 4;
    public static final int CTIME_FIELD_NUMBER = 4;
    private long cTime_;
    public boolean hasCTime() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public long getCTime() {
      return cTime_;
    }
    
    private void initFields() {
      layoutVersion_ = 0;
      namespceID_ = 0;
      clusterID_ = "";
      cTime_ = 0L;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasLayoutVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasNamespceID()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasClusterID()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasCTime()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(1, layoutVersion_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt32(2, namespceID_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, getClusterIDBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeUInt64(4, cTime_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(1, layoutVersion_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(2, namespceID_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, getClusterIDBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(4, cTime_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto) obj;
      
      boolean result = true;
      result = result && (hasLayoutVersion() == other.hasLayoutVersion());
      if (hasLayoutVersion()) {
        result = result && (getLayoutVersion()
            == other.getLayoutVersion());
      }
      result = result && (hasNamespceID() == other.hasNamespceID());
      if (hasNamespceID()) {
        result = result && (getNamespceID()
            == other.getNamespceID());
      }
      result = result && (hasClusterID() == other.hasClusterID());
      if (hasClusterID()) {
        result = result && getClusterID()
            .equals(other.getClusterID());
      }
      result = result && (hasCTime() == other.hasCTime());
      if (hasCTime()) {
        result = result && (getCTime()
            == other.getCTime());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasLayoutVersion()) {
        hash = (37 * hash) + LAYOUTVERSION_FIELD_NUMBER;
        hash = (53 * hash) + getLayoutVersion();
      }
      if (hasNamespceID()) {
        hash = (37 * hash) + NAMESPCEID_FIELD_NUMBER;
        hash = (53 * hash) + getNamespceID();
      }
      if (hasClusterID()) {
        hash = (37 * hash) + CLUSTERID_FIELD_NUMBER;
        hash = (53 * hash) + getClusterID().hashCode();
      }
      if (hasCTime()) {
        hash = (37 * hash) + CTIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getCTime());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_StorageInfoProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_StorageInfoProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        layoutVersion_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        namespceID_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        clusterID_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        cTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.layoutVersion_ = layoutVersion_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.namespceID_ = namespceID_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.clusterID_ = clusterID_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.cTime_ = cTime_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance()) return this;
        if (other.hasLayoutVersion()) {
          setLayoutVersion(other.getLayoutVersion());
        }
        if (other.hasNamespceID()) {
          setNamespceID(other.getNamespceID());
        }
        if (other.hasClusterID()) {
          setClusterID(other.getClusterID());
        }
        if (other.hasCTime()) {
          setCTime(other.getCTime());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasLayoutVersion()) {
          
          return false;
        }
        if (!hasNamespceID()) {
          
          return false;
        }
        if (!hasClusterID()) {
          
          return false;
        }
        if (!hasCTime()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              layoutVersion_ = input.readUInt32();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              namespceID_ = input.readUInt32();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              clusterID_ = input.readBytes();
              break;
            }
            case 32: {
              bitField0_ |= 0x00000008;
              cTime_ = input.readUInt64();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint32 layoutVersion = 1;
      private int layoutVersion_ ;
      public boolean hasLayoutVersion() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public int getLayoutVersion() {
        return layoutVersion_;
      }
      public Builder setLayoutVersion(int value) {
        bitField0_ |= 0x00000001;
        layoutVersion_ = value;
        onChanged();
        return this;
      }
      public Builder clearLayoutVersion() {
        bitField0_ = (bitField0_ & ~0x00000001);
        layoutVersion_ = 0;
        onChanged();
        return this;
      }
      
      // required uint32 namespceID = 2;
      private int namespceID_ ;
      public boolean hasNamespceID() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public int getNamespceID() {
        return namespceID_;
      }
      public Builder setNamespceID(int value) {
        bitField0_ |= 0x00000002;
        namespceID_ = value;
        onChanged();
        return this;
      }
      public Builder clearNamespceID() {
        bitField0_ = (bitField0_ & ~0x00000002);
        namespceID_ = 0;
        onChanged();
        return this;
      }
      
      // required string clusterID = 3;
      private java.lang.Object clusterID_ = "";
      public boolean hasClusterID() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public String getClusterID() {
        java.lang.Object ref = clusterID_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          clusterID_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setClusterID(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        clusterID_ = value;
        onChanged();
        return this;
      }
      public Builder clearClusterID() {
        bitField0_ = (bitField0_ & ~0x00000004);
        clusterID_ = getDefaultInstance().getClusterID();
        onChanged();
        return this;
      }
      void setClusterID(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000004;
        clusterID_ = value;
        onChanged();
      }
      
      // required uint64 cTime = 4;
      private long cTime_ ;
      public boolean hasCTime() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public long getCTime() {
        return cTime_;
      }
      public Builder setCTime(long value) {
        bitField0_ |= 0x00000008;
        cTime_ = value;
        onChanged();
        return this;
      }
      public Builder clearCTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        cTime_ = 0L;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:StorageInfoProto)
    }
    
    static {
      defaultInstance = new StorageInfoProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:StorageInfoProto)
  }
  
  public interface NamenodeRegistrationProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string rpcAddress = 1;
    boolean hasRpcAddress();
    String getRpcAddress();
    
    // required string httpAddress = 2;
    boolean hasHttpAddress();
    String getHttpAddress();
    
    // required .StorageInfoProto storageInfo = 3;
    boolean hasStorageInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder();
    
    // optional .NamenodeRegistrationProto.NamenodeRoleProto role = 4 [default = NAMENODE];
    boolean hasRole();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto getRole();
  }
  public static final class NamenodeRegistrationProto extends
      com.google.protobuf.GeneratedMessage
      implements NamenodeRegistrationProtoOrBuilder {
    // Use NamenodeRegistrationProto.newBuilder() to construct.
    private NamenodeRegistrationProto(Builder builder) {
      super(builder);
    }
    private NamenodeRegistrationProto(boolean noInit) {}
    
    private static final NamenodeRegistrationProto defaultInstance;
    public static NamenodeRegistrationProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public NamenodeRegistrationProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeRegistrationProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeRegistrationProto_fieldAccessorTable;
    }
    
    public enum NamenodeRoleProto
        implements com.google.protobuf.ProtocolMessageEnum {
      NAMENODE(0, 1),
      BACKUP(1, 2),
      CHECKPOINT(2, 3),
      ;
      
      public static final int NAMENODE_VALUE = 1;
      public static final int BACKUP_VALUE = 2;
      public static final int CHECKPOINT_VALUE = 3;
      
      
      public final int getNumber() { return value; }
      
      public static NamenodeRoleProto valueOf(int value) {
        switch (value) {
          case 1: return NAMENODE;
          case 2: return BACKUP;
          case 3: return CHECKPOINT;
          default: return null;
        }
      }
      
      public static com.google.protobuf.Internal.EnumLiteMap
          internalGetValueMap() {
        return internalValueMap;
      }
      private static com.google.protobuf.Internal.EnumLiteMap
          internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap() {
              public NamenodeRoleProto findValueByNumber(int number) {
                return NamenodeRoleProto.valueOf(number);
              }
            };
      
      public final com.google.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(index);
      }
      public final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.getDescriptor().getEnumTypes().get(0);
      }
      
      private static final NamenodeRoleProto[] VALUES = {
        NAMENODE, BACKUP, CHECKPOINT, 
      };
      
      public static NamenodeRoleProto valueOf(
          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }
      
      private final int index;
      private final int value;
      
      private NamenodeRoleProto(int index, int value) {
        this.index = index;
        this.value = value;
      }
      
      // @@protoc_insertion_point(enum_scope:NamenodeRegistrationProto.NamenodeRoleProto)
    }
    
    private int bitField0_;
    // required string rpcAddress = 1;
    public static final int RPCADDRESS_FIELD_NUMBER = 1;
    private java.lang.Object rpcAddress_;
    public boolean hasRpcAddress() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public String getRpcAddress() {
      java.lang.Object ref = rpcAddress_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          rpcAddress_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getRpcAddressBytes() {
      java.lang.Object ref = rpcAddress_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        rpcAddress_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required string httpAddress = 2;
    public static final int HTTPADDRESS_FIELD_NUMBER = 2;
    private java.lang.Object httpAddress_;
    public boolean hasHttpAddress() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public String getHttpAddress() {
      java.lang.Object ref = httpAddress_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          httpAddress_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getHttpAddressBytes() {
      java.lang.Object ref = httpAddress_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        httpAddress_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required .StorageInfoProto storageInfo = 3;
    public static final int STORAGEINFO_FIELD_NUMBER = 3;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto storageInfo_;
    public boolean hasStorageInfo() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo() {
      return storageInfo_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder() {
      return storageInfo_;
    }
    
    // optional .NamenodeRegistrationProto.NamenodeRoleProto role = 4 [default = NAMENODE];
    public static final int ROLE_FIELD_NUMBER = 4;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto role_;
    public boolean hasRole() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto getRole() {
      return role_;
    }
    
    private void initFields() {
      rpcAddress_ = "";
      httpAddress_ = "";
      storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
      role_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto.NAMENODE;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasRpcAddress()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasHttpAddress()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasStorageInfo()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getStorageInfo().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getRpcAddressBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBytes(2, getHttpAddressBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeMessage(3, storageInfo_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeEnum(4, role_.getNumber());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getRpcAddressBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(2, getHttpAddressBytes());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(3, storageInfo_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(4, role_.getNumber());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto) obj;
      
      boolean result = true;
      result = result && (hasRpcAddress() == other.hasRpcAddress());
      if (hasRpcAddress()) {
        result = result && getRpcAddress()
            .equals(other.getRpcAddress());
      }
      result = result && (hasHttpAddress() == other.hasHttpAddress());
      if (hasHttpAddress()) {
        result = result && getHttpAddress()
            .equals(other.getHttpAddress());
      }
      result = result && (hasStorageInfo() == other.hasStorageInfo());
      if (hasStorageInfo()) {
        result = result && getStorageInfo()
            .equals(other.getStorageInfo());
      }
      result = result && (hasRole() == other.hasRole());
      if (hasRole()) {
        result = result &&
            (getRole() == other.getRole());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasRpcAddress()) {
        hash = (37 * hash) + RPCADDRESS_FIELD_NUMBER;
        hash = (53 * hash) + getRpcAddress().hashCode();
      }
      if (hasHttpAddress()) {
        hash = (37 * hash) + HTTPADDRESS_FIELD_NUMBER;
        hash = (53 * hash) + getHttpAddress().hashCode();
      }
      if (hasStorageInfo()) {
        hash = (37 * hash) + STORAGEINFO_FIELD_NUMBER;
        hash = (53 * hash) + getStorageInfo().hashCode();
      }
      if (hasRole()) {
        hash = (37 * hash) + ROLE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getRole());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeRegistrationProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeRegistrationProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getStorageInfoFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        rpcAddress_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        httpAddress_ = "";
        bitField0_ = (bitField0_ & ~0x00000002);
        if (storageInfoBuilder_ == null) {
          storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
        } else {
          storageInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        role_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto.NAMENODE;
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.rpcAddress_ = rpcAddress_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.httpAddress_ = httpAddress_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        if (storageInfoBuilder_ == null) {
          result.storageInfo_ = storageInfo_;
        } else {
          result.storageInfo_ = storageInfoBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        result.role_ = role_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.getDefaultInstance()) return this;
        if (other.hasRpcAddress()) {
          setRpcAddress(other.getRpcAddress());
        }
        if (other.hasHttpAddress()) {
          setHttpAddress(other.getHttpAddress());
        }
        if (other.hasStorageInfo()) {
          mergeStorageInfo(other.getStorageInfo());
        }
        if (other.hasRole()) {
          setRole(other.getRole());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasRpcAddress()) {
          
          return false;
        }
        if (!hasHttpAddress()) {
          
          return false;
        }
        if (!hasStorageInfo()) {
          
          return false;
        }
        if (!getStorageInfo().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              rpcAddress_ = input.readBytes();
              break;
            }
            case 18: {
              bitField0_ |= 0x00000002;
              httpAddress_ = input.readBytes();
              break;
            }
            case 26: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder();
              if (hasStorageInfo()) {
                subBuilder.mergeFrom(getStorageInfo());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setStorageInfo(subBuilder.buildPartial());
              break;
            }
            case 32: {
              int rawValue = input.readEnum();
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto value = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(4, rawValue);
              } else {
                bitField0_ |= 0x00000008;
                role_ = value;
              }
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required string rpcAddress = 1;
      private java.lang.Object rpcAddress_ = "";
      public boolean hasRpcAddress() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public String getRpcAddress() {
        java.lang.Object ref = rpcAddress_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          rpcAddress_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setRpcAddress(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        rpcAddress_ = value;
        onChanged();
        return this;
      }
      public Builder clearRpcAddress() {
        bitField0_ = (bitField0_ & ~0x00000001);
        rpcAddress_ = getDefaultInstance().getRpcAddress();
        onChanged();
        return this;
      }
      void setRpcAddress(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000001;
        rpcAddress_ = value;
        onChanged();
      }
      
      // required string httpAddress = 2;
      private java.lang.Object httpAddress_ = "";
      public boolean hasHttpAddress() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public String getHttpAddress() {
        java.lang.Object ref = httpAddress_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          httpAddress_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setHttpAddress(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000002;
        httpAddress_ = value;
        onChanged();
        return this;
      }
      public Builder clearHttpAddress() {
        bitField0_ = (bitField0_ & ~0x00000002);
        httpAddress_ = getDefaultInstance().getHttpAddress();
        onChanged();
        return this;
      }
      void setHttpAddress(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000002;
        httpAddress_ = value;
        onChanged();
      }
      
      // required .StorageInfoProto storageInfo = 3;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder> storageInfoBuilder_;
      public boolean hasStorageInfo() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo() {
        if (storageInfoBuilder_ == null) {
          return storageInfo_;
        } else {
          return storageInfoBuilder_.getMessage();
        }
      }
      public Builder setStorageInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto value) {
        if (storageInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          storageInfo_ = value;
          onChanged();
        } else {
          storageInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      public Builder setStorageInfo(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder builderForValue) {
        if (storageInfoBuilder_ == null) {
          storageInfo_ = builderForValue.build();
          onChanged();
        } else {
          storageInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      public Builder mergeStorageInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto value) {
        if (storageInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000004) == 0x00000004) &&
              storageInfo_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance()) {
            storageInfo_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder(storageInfo_).mergeFrom(value).buildPartial();
          } else {
            storageInfo_ = value;
          }
          onChanged();
        } else {
          storageInfoBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      public Builder clearStorageInfo() {
        if (storageInfoBuilder_ == null) {
          storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
          onChanged();
        } else {
          storageInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder getStorageInfoBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getStorageInfoFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder() {
        if (storageInfoBuilder_ != null) {
          return storageInfoBuilder_.getMessageOrBuilder();
        } else {
          return storageInfo_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder> 
          getStorageInfoFieldBuilder() {
        if (storageInfoBuilder_ == null) {
          storageInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder>(
                  storageInfo_,
                  getParentForChildren(),
                  isClean());
          storageInfo_ = null;
        }
        return storageInfoBuilder_;
      }
      
      // optional .NamenodeRegistrationProto.NamenodeRoleProto role = 4 [default = NAMENODE];
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto role_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto.NAMENODE;
      public boolean hasRole() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto getRole() {
        return role_;
      }
      public Builder setRole(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000008;
        role_ = value;
        onChanged();
        return this;
      }
      public Builder clearRole() {
        bitField0_ = (bitField0_ & ~0x00000008);
        role_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto.NAMENODE;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:NamenodeRegistrationProto)
    }
    
    static {
      defaultInstance = new NamenodeRegistrationProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:NamenodeRegistrationProto)
  }
  
  public interface CheckpointSignatureProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string blockPoolId = 1;
    boolean hasBlockPoolId();
    String getBlockPoolId();
    
    // required uint64 mostRecentCheckpointTxId = 2;
    boolean hasMostRecentCheckpointTxId();
    long getMostRecentCheckpointTxId();
    
    // required uint64 curSegmentTxId = 3;
    boolean hasCurSegmentTxId();
    long getCurSegmentTxId();
    
    // required .StorageInfoProto storageInfo = 4;
    boolean hasStorageInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder();
  }
  public static final class CheckpointSignatureProto extends
      com.google.protobuf.GeneratedMessage
      implements CheckpointSignatureProtoOrBuilder {
    // Use CheckpointSignatureProto.newBuilder() to construct.
    private CheckpointSignatureProto(Builder builder) {
      super(builder);
    }
    private CheckpointSignatureProto(boolean noInit) {}
    
    private static final CheckpointSignatureProto defaultInstance;
    public static CheckpointSignatureProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public CheckpointSignatureProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointSignatureProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointSignatureProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required string blockPoolId = 1;
    public static final int BLOCKPOOLID_FIELD_NUMBER = 1;
    private java.lang.Object blockPoolId_;
    public boolean hasBlockPoolId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public String getBlockPoolId() {
      java.lang.Object ref = blockPoolId_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          blockPoolId_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getBlockPoolIdBytes() {
      java.lang.Object ref = blockPoolId_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        blockPoolId_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required uint64 mostRecentCheckpointTxId = 2;
    public static final int MOSTRECENTCHECKPOINTTXID_FIELD_NUMBER = 2;
    private long mostRecentCheckpointTxId_;
    public boolean hasMostRecentCheckpointTxId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getMostRecentCheckpointTxId() {
      return mostRecentCheckpointTxId_;
    }
    
    // required uint64 curSegmentTxId = 3;
    public static final int CURSEGMENTTXID_FIELD_NUMBER = 3;
    private long curSegmentTxId_;
    public boolean hasCurSegmentTxId() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getCurSegmentTxId() {
      return curSegmentTxId_;
    }
    
    // required .StorageInfoProto storageInfo = 4;
    public static final int STORAGEINFO_FIELD_NUMBER = 4;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto storageInfo_;
    public boolean hasStorageInfo() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo() {
      return storageInfo_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder() {
      return storageInfo_;
    }
    
    private void initFields() {
      blockPoolId_ = "";
      mostRecentCheckpointTxId_ = 0L;
      curSegmentTxId_ = 0L;
      storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasBlockPoolId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasMostRecentCheckpointTxId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasCurSegmentTxId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasStorageInfo()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getStorageInfo().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getBlockPoolIdBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, mostRecentCheckpointTxId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, curSegmentTxId_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeMessage(4, storageInfo_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getBlockPoolIdBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, mostRecentCheckpointTxId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, curSegmentTxId_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(4, storageInfo_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto) obj;
      
      boolean result = true;
      result = result && (hasBlockPoolId() == other.hasBlockPoolId());
      if (hasBlockPoolId()) {
        result = result && getBlockPoolId()
            .equals(other.getBlockPoolId());
      }
      result = result && (hasMostRecentCheckpointTxId() == other.hasMostRecentCheckpointTxId());
      if (hasMostRecentCheckpointTxId()) {
        result = result && (getMostRecentCheckpointTxId()
            == other.getMostRecentCheckpointTxId());
      }
      result = result && (hasCurSegmentTxId() == other.hasCurSegmentTxId());
      if (hasCurSegmentTxId()) {
        result = result && (getCurSegmentTxId()
            == other.getCurSegmentTxId());
      }
      result = result && (hasStorageInfo() == other.hasStorageInfo());
      if (hasStorageInfo()) {
        result = result && getStorageInfo()
            .equals(other.getStorageInfo());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasBlockPoolId()) {
        hash = (37 * hash) + BLOCKPOOLID_FIELD_NUMBER;
        hash = (53 * hash) + getBlockPoolId().hashCode();
      }
      if (hasMostRecentCheckpointTxId()) {
        hash = (37 * hash) + MOSTRECENTCHECKPOINTTXID_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getMostRecentCheckpointTxId());
      }
      if (hasCurSegmentTxId()) {
        hash = (37 * hash) + CURSEGMENTTXID_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getCurSegmentTxId());
      }
      if (hasStorageInfo()) {
        hash = (37 * hash) + STORAGEINFO_FIELD_NUMBER;
        hash = (53 * hash) + getStorageInfo().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointSignatureProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointSignatureProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getStorageInfoFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        blockPoolId_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        mostRecentCheckpointTxId_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        curSegmentTxId_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        if (storageInfoBuilder_ == null) {
          storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
        } else {
          storageInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.blockPoolId_ = blockPoolId_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.mostRecentCheckpointTxId_ = mostRecentCheckpointTxId_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.curSegmentTxId_ = curSegmentTxId_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        if (storageInfoBuilder_ == null) {
          result.storageInfo_ = storageInfo_;
        } else {
          result.storageInfo_ = storageInfoBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance()) return this;
        if (other.hasBlockPoolId()) {
          setBlockPoolId(other.getBlockPoolId());
        }
        if (other.hasMostRecentCheckpointTxId()) {
          setMostRecentCheckpointTxId(other.getMostRecentCheckpointTxId());
        }
        if (other.hasCurSegmentTxId()) {
          setCurSegmentTxId(other.getCurSegmentTxId());
        }
        if (other.hasStorageInfo()) {
          mergeStorageInfo(other.getStorageInfo());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasBlockPoolId()) {
          
          return false;
        }
        if (!hasMostRecentCheckpointTxId()) {
          
          return false;
        }
        if (!hasCurSegmentTxId()) {
          
          return false;
        }
        if (!hasStorageInfo()) {
          
          return false;
        }
        if (!getStorageInfo().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              blockPoolId_ = input.readBytes();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              mostRecentCheckpointTxId_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              curSegmentTxId_ = input.readUInt64();
              break;
            }
            case 34: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder();
              if (hasStorageInfo()) {
                subBuilder.mergeFrom(getStorageInfo());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setStorageInfo(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required string blockPoolId = 1;
      private java.lang.Object blockPoolId_ = "";
      public boolean hasBlockPoolId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public String getBlockPoolId() {
        java.lang.Object ref = blockPoolId_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          blockPoolId_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setBlockPoolId(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        blockPoolId_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockPoolId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        blockPoolId_ = getDefaultInstance().getBlockPoolId();
        onChanged();
        return this;
      }
      void setBlockPoolId(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000001;
        blockPoolId_ = value;
        onChanged();
      }
      
      // required uint64 mostRecentCheckpointTxId = 2;
      private long mostRecentCheckpointTxId_ ;
      public boolean hasMostRecentCheckpointTxId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getMostRecentCheckpointTxId() {
        return mostRecentCheckpointTxId_;
      }
      public Builder setMostRecentCheckpointTxId(long value) {
        bitField0_ |= 0x00000002;
        mostRecentCheckpointTxId_ = value;
        onChanged();
        return this;
      }
      public Builder clearMostRecentCheckpointTxId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        mostRecentCheckpointTxId_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 curSegmentTxId = 3;
      private long curSegmentTxId_ ;
      public boolean hasCurSegmentTxId() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getCurSegmentTxId() {
        return curSegmentTxId_;
      }
      public Builder setCurSegmentTxId(long value) {
        bitField0_ |= 0x00000004;
        curSegmentTxId_ = value;
        onChanged();
        return this;
      }
      public Builder clearCurSegmentTxId() {
        bitField0_ = (bitField0_ & ~0x00000004);
        curSegmentTxId_ = 0L;
        onChanged();
        return this;
      }
      
      // required .StorageInfoProto storageInfo = 4;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder> storageInfoBuilder_;
      public boolean hasStorageInfo() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo() {
        if (storageInfoBuilder_ == null) {
          return storageInfo_;
        } else {
          return storageInfoBuilder_.getMessage();
        }
      }
      public Builder setStorageInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto value) {
        if (storageInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          storageInfo_ = value;
          onChanged();
        } else {
          storageInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder setStorageInfo(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder builderForValue) {
        if (storageInfoBuilder_ == null) {
          storageInfo_ = builderForValue.build();
          onChanged();
        } else {
          storageInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder mergeStorageInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto value) {
        if (storageInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000008) == 0x00000008) &&
              storageInfo_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance()) {
            storageInfo_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder(storageInfo_).mergeFrom(value).buildPartial();
          } else {
            storageInfo_ = value;
          }
          onChanged();
        } else {
          storageInfoBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder clearStorageInfo() {
        if (storageInfoBuilder_ == null) {
          storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
          onChanged();
        } else {
          storageInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder getStorageInfoBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getStorageInfoFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder() {
        if (storageInfoBuilder_ != null) {
          return storageInfoBuilder_.getMessageOrBuilder();
        } else {
          return storageInfo_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder> 
          getStorageInfoFieldBuilder() {
        if (storageInfoBuilder_ == null) {
          storageInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder>(
                  storageInfo_,
                  getParentForChildren(),
                  isClean());
          storageInfo_ = null;
        }
        return storageInfoBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:CheckpointSignatureProto)
    }
    
    static {
      defaultInstance = new CheckpointSignatureProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:CheckpointSignatureProto)
  }
  
  public interface NamenodeCommandProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint32 action = 1;
    boolean hasAction();
    int getAction();
    
    // required .NamenodeCommandProto.Type type = 2;
    boolean hasType();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type getType();
    
    // optional .CheckpointCommandProto checkpointCmd = 3;
    boolean hasCheckpointCmd();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto getCheckpointCmd();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder getCheckpointCmdOrBuilder();
  }
  public static final class NamenodeCommandProto extends
      com.google.protobuf.GeneratedMessage
      implements NamenodeCommandProtoOrBuilder {
    // Use NamenodeCommandProto.newBuilder() to construct.
    private NamenodeCommandProto(Builder builder) {
      super(builder);
    }
    private NamenodeCommandProto(boolean noInit) {}
    
    private static final NamenodeCommandProto defaultInstance;
    public static NamenodeCommandProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public NamenodeCommandProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeCommandProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeCommandProto_fieldAccessorTable;
    }
    
    public enum Type
        implements com.google.protobuf.ProtocolMessageEnum {
      NamenodeCommand(0, 0),
      CheckPointCommand(1, 1),
      ;
      
      public static final int NamenodeCommand_VALUE = 0;
      public static final int CheckPointCommand_VALUE = 1;
      
      
      public final int getNumber() { return value; }
      
      public static Type valueOf(int value) {
        switch (value) {
          case 0: return NamenodeCommand;
          case 1: return CheckPointCommand;
          default: return null;
        }
      }
      
      public static com.google.protobuf.Internal.EnumLiteMap
          internalGetValueMap() {
        return internalValueMap;
      }
      private static com.google.protobuf.Internal.EnumLiteMap
          internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap() {
              public Type findValueByNumber(int number) {
                return Type.valueOf(number);
              }
            };
      
      public final com.google.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(index);
      }
      public final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final com.google.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.getDescriptor().getEnumTypes().get(0);
      }
      
      private static final Type[] VALUES = {
        NamenodeCommand, CheckPointCommand, 
      };
      
      public static Type valueOf(
          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }
      
      private final int index;
      private final int value;
      
      private Type(int index, int value) {
        this.index = index;
        this.value = value;
      }
      
      // @@protoc_insertion_point(enum_scope:NamenodeCommandProto.Type)
    }
    
    private int bitField0_;
    // required uint32 action = 1;
    public static final int ACTION_FIELD_NUMBER = 1;
    private int action_;
    public boolean hasAction() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getAction() {
      return action_;
    }
    
    // required .NamenodeCommandProto.Type type = 2;
    public static final int TYPE_FIELD_NUMBER = 2;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type type_;
    public boolean hasType() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type getType() {
      return type_;
    }
    
    // optional .CheckpointCommandProto checkpointCmd = 3;
    public static final int CHECKPOINTCMD_FIELD_NUMBER = 3;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto checkpointCmd_;
    public boolean hasCheckpointCmd() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto getCheckpointCmd() {
      return checkpointCmd_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder getCheckpointCmdOrBuilder() {
      return checkpointCmd_;
    }
    
    private void initFields() {
      action_ = 0;
      type_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type.NamenodeCommand;
      checkpointCmd_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasAction()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (hasCheckpointCmd()) {
        if (!getCheckpointCmd().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(1, action_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeEnum(2, type_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeMessage(3, checkpointCmd_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(1, action_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeEnumSize(2, type_.getNumber());
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(3, checkpointCmd_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto) obj;
      
      boolean result = true;
      result = result && (hasAction() == other.hasAction());
      if (hasAction()) {
        result = result && (getAction()
            == other.getAction());
      }
      result = result && (hasType() == other.hasType());
      if (hasType()) {
        result = result &&
            (getType() == other.getType());
      }
      result = result && (hasCheckpointCmd() == other.hasCheckpointCmd());
      if (hasCheckpointCmd()) {
        result = result && getCheckpointCmd()
            .equals(other.getCheckpointCmd());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasAction()) {
        hash = (37 * hash) + ACTION_FIELD_NUMBER;
        hash = (53 * hash) + getAction();
      }
      if (hasType()) {
        hash = (37 * hash) + TYPE_FIELD_NUMBER;
        hash = (53 * hash) + hashEnum(getType());
      }
      if (hasCheckpointCmd()) {
        hash = (37 * hash) + CHECKPOINTCMD_FIELD_NUMBER;
        hash = (53 * hash) + getCheckpointCmd().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeCommandProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamenodeCommandProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getCheckpointCmdFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        action_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        type_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type.NamenodeCommand;
        bitField0_ = (bitField0_ & ~0x00000002);
        if (checkpointCmdBuilder_ == null) {
          checkpointCmd_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance();
        } else {
          checkpointCmdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.action_ = action_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.type_ = type_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        if (checkpointCmdBuilder_ == null) {
          result.checkpointCmd_ = checkpointCmd_;
        } else {
          result.checkpointCmd_ = checkpointCmdBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.getDefaultInstance()) return this;
        if (other.hasAction()) {
          setAction(other.getAction());
        }
        if (other.hasType()) {
          setType(other.getType());
        }
        if (other.hasCheckpointCmd()) {
          mergeCheckpointCmd(other.getCheckpointCmd());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasAction()) {
          
          return false;
        }
        if (!hasType()) {
          
          return false;
        }
        if (hasCheckpointCmd()) {
          if (!getCheckpointCmd().isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              action_ = input.readUInt32();
              break;
            }
            case 16: {
              int rawValue = input.readEnum();
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type value = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type.valueOf(rawValue);
              if (value == null) {
                unknownFields.mergeVarintField(2, rawValue);
              } else {
                bitField0_ |= 0x00000002;
                type_ = value;
              }
              break;
            }
            case 26: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.newBuilder();
              if (hasCheckpointCmd()) {
                subBuilder.mergeFrom(getCheckpointCmd());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setCheckpointCmd(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint32 action = 1;
      private int action_ ;
      public boolean hasAction() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public int getAction() {
        return action_;
      }
      public Builder setAction(int value) {
        bitField0_ |= 0x00000001;
        action_ = value;
        onChanged();
        return this;
      }
      public Builder clearAction() {
        bitField0_ = (bitField0_ & ~0x00000001);
        action_ = 0;
        onChanged();
        return this;
      }
      
      // required .NamenodeCommandProto.Type type = 2;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type type_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type.NamenodeCommand;
      public boolean hasType() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type getType() {
        return type_;
      }
      public Builder setType(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        type_ = value;
        onChanged();
        return this;
      }
      public Builder clearType() {
        bitField0_ = (bitField0_ & ~0x00000002);
        type_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Type.NamenodeCommand;
        onChanged();
        return this;
      }
      
      // optional .CheckpointCommandProto checkpointCmd = 3;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto checkpointCmd_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder> checkpointCmdBuilder_;
      public boolean hasCheckpointCmd() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto getCheckpointCmd() {
        if (checkpointCmdBuilder_ == null) {
          return checkpointCmd_;
        } else {
          return checkpointCmdBuilder_.getMessage();
        }
      }
      public Builder setCheckpointCmd(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto value) {
        if (checkpointCmdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          checkpointCmd_ = value;
          onChanged();
        } else {
          checkpointCmdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      public Builder setCheckpointCmd(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder builderForValue) {
        if (checkpointCmdBuilder_ == null) {
          checkpointCmd_ = builderForValue.build();
          onChanged();
        } else {
          checkpointCmdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      public Builder mergeCheckpointCmd(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto value) {
        if (checkpointCmdBuilder_ == null) {
          if (((bitField0_ & 0x00000004) == 0x00000004) &&
              checkpointCmd_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance()) {
            checkpointCmd_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.newBuilder(checkpointCmd_).mergeFrom(value).buildPartial();
          } else {
            checkpointCmd_ = value;
          }
          onChanged();
        } else {
          checkpointCmdBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      public Builder clearCheckpointCmd() {
        if (checkpointCmdBuilder_ == null) {
          checkpointCmd_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance();
          onChanged();
        } else {
          checkpointCmdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder getCheckpointCmdBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getCheckpointCmdFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder getCheckpointCmdOrBuilder() {
        if (checkpointCmdBuilder_ != null) {
          return checkpointCmdBuilder_.getMessageOrBuilder();
        } else {
          return checkpointCmd_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder> 
          getCheckpointCmdFieldBuilder() {
        if (checkpointCmdBuilder_ == null) {
          checkpointCmdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder>(
                  checkpointCmd_,
                  getParentForChildren(),
                  isClean());
          checkpointCmd_ = null;
        }
        return checkpointCmdBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:NamenodeCommandProto)
    }
    
    static {
      defaultInstance = new NamenodeCommandProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:NamenodeCommandProto)
  }
  
  public interface CheckpointCommandProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .CheckpointSignatureProto signature = 1;
    boolean hasSignature();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto getSignature();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder getSignatureOrBuilder();
    
    // required bool needToReturnImage = 2;
    boolean hasNeedToReturnImage();
    boolean getNeedToReturnImage();
  }
  public static final class CheckpointCommandProto extends
      com.google.protobuf.GeneratedMessage
      implements CheckpointCommandProtoOrBuilder {
    // Use CheckpointCommandProto.newBuilder() to construct.
    private CheckpointCommandProto(Builder builder) {
      super(builder);
    }
    private CheckpointCommandProto(boolean noInit) {}
    
    private static final CheckpointCommandProto defaultInstance;
    public static CheckpointCommandProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public CheckpointCommandProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointCommandProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointCommandProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .CheckpointSignatureProto signature = 1;
    public static final int SIGNATURE_FIELD_NUMBER = 1;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto signature_;
    public boolean hasSignature() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto getSignature() {
      return signature_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder getSignatureOrBuilder() {
      return signature_;
    }
    
    // required bool needToReturnImage = 2;
    public static final int NEEDTORETURNIMAGE_FIELD_NUMBER = 2;
    private boolean needToReturnImage_;
    public boolean hasNeedToReturnImage() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public boolean getNeedToReturnImage() {
      return needToReturnImage_;
    }
    
    private void initFields() {
      signature_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance();
      needToReturnImage_ = false;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasSignature()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasNeedToReturnImage()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getSignature().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, signature_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeBool(2, needToReturnImage_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, signature_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(2, needToReturnImage_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto) obj;
      
      boolean result = true;
      result = result && (hasSignature() == other.hasSignature());
      if (hasSignature()) {
        result = result && getSignature()
            .equals(other.getSignature());
      }
      result = result && (hasNeedToReturnImage() == other.hasNeedToReturnImage());
      if (hasNeedToReturnImage()) {
        result = result && (getNeedToReturnImage()
            == other.getNeedToReturnImage());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasSignature()) {
        hash = (37 * hash) + SIGNATURE_FIELD_NUMBER;
        hash = (53 * hash) + getSignature().hashCode();
      }
      if (hasNeedToReturnImage()) {
        hash = (37 * hash) + NEEDTORETURNIMAGE_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getNeedToReturnImage());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointCommandProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_CheckpointCommandProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getSignatureFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (signatureBuilder_ == null) {
          signature_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance();
        } else {
          signatureBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        needToReturnImage_ = false;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (signatureBuilder_ == null) {
          result.signature_ = signature_;
        } else {
          result.signature_ = signatureBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.needToReturnImage_ = needToReturnImage_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.getDefaultInstance()) return this;
        if (other.hasSignature()) {
          mergeSignature(other.getSignature());
        }
        if (other.hasNeedToReturnImage()) {
          setNeedToReturnImage(other.getNeedToReturnImage());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasSignature()) {
          
          return false;
        }
        if (!hasNeedToReturnImage()) {
          
          return false;
        }
        if (!getSignature().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.newBuilder();
              if (hasSignature()) {
                subBuilder.mergeFrom(getSignature());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setSignature(subBuilder.buildPartial());
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              needToReturnImage_ = input.readBool();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .CheckpointSignatureProto signature = 1;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto signature_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder> signatureBuilder_;
      public boolean hasSignature() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto getSignature() {
        if (signatureBuilder_ == null) {
          return signature_;
        } else {
          return signatureBuilder_.getMessage();
        }
      }
      public Builder setSignature(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto value) {
        if (signatureBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          signature_ = value;
          onChanged();
        } else {
          signatureBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setSignature(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder builderForValue) {
        if (signatureBuilder_ == null) {
          signature_ = builderForValue.build();
          onChanged();
        } else {
          signatureBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeSignature(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto value) {
        if (signatureBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              signature_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance()) {
            signature_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.newBuilder(signature_).mergeFrom(value).buildPartial();
          } else {
            signature_ = value;
          }
          onChanged();
        } else {
          signatureBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearSignature() {
        if (signatureBuilder_ == null) {
          signature_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.getDefaultInstance();
          onChanged();
        } else {
          signatureBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder getSignatureBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getSignatureFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder getSignatureOrBuilder() {
        if (signatureBuilder_ != null) {
          return signatureBuilder_.getMessageOrBuilder();
        } else {
          return signature_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder> 
          getSignatureFieldBuilder() {
        if (signatureBuilder_ == null) {
          signatureBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProtoOrBuilder>(
                  signature_,
                  getParentForChildren(),
                  isClean());
          signature_ = null;
        }
        return signatureBuilder_;
      }
      
      // required bool needToReturnImage = 2;
      private boolean needToReturnImage_ ;
      public boolean hasNeedToReturnImage() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public boolean getNeedToReturnImage() {
        return needToReturnImage_;
      }
      public Builder setNeedToReturnImage(boolean value) {
        bitField0_ |= 0x00000002;
        needToReturnImage_ = value;
        onChanged();
        return this;
      }
      public Builder clearNeedToReturnImage() {
        bitField0_ = (bitField0_ & ~0x00000002);
        needToReturnImage_ = false;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:CheckpointCommandProto)
    }
    
    static {
      defaultInstance = new CheckpointCommandProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:CheckpointCommandProto)
  }
  
  public interface BlockProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint64 blockId = 1;
    boolean hasBlockId();
    long getBlockId();
    
    // required uint64 genStamp = 2;
    boolean hasGenStamp();
    long getGenStamp();
    
    // optional uint64 numBytes = 3 [default = 0];
    boolean hasNumBytes();
    long getNumBytes();
  }
  public static final class BlockProto extends
      com.google.protobuf.GeneratedMessage
      implements BlockProtoOrBuilder {
    // Use BlockProto.newBuilder() to construct.
    private BlockProto(Builder builder) {
      super(builder);
    }
    private BlockProto(boolean noInit) {}
    
    private static final BlockProto defaultInstance;
    public static BlockProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public BlockProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint64 blockId = 1;
    public static final int BLOCKID_FIELD_NUMBER = 1;
    private long blockId_;
    public boolean hasBlockId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public long getBlockId() {
      return blockId_;
    }
    
    // required uint64 genStamp = 2;
    public static final int GENSTAMP_FIELD_NUMBER = 2;
    private long genStamp_;
    public boolean hasGenStamp() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getGenStamp() {
      return genStamp_;
    }
    
    // optional uint64 numBytes = 3 [default = 0];
    public static final int NUMBYTES_FIELD_NUMBER = 3;
    private long numBytes_;
    public boolean hasNumBytes() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getNumBytes() {
      return numBytes_;
    }
    
    private void initFields() {
      blockId_ = 0L;
      genStamp_ = 0L;
      numBytes_ = 0L;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasBlockId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasGenStamp()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt64(1, blockId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, genStamp_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, numBytes_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(1, blockId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, genStamp_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, numBytes_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto) obj;
      
      boolean result = true;
      result = result && (hasBlockId() == other.hasBlockId());
      if (hasBlockId()) {
        result = result && (getBlockId()
            == other.getBlockId());
      }
      result = result && (hasGenStamp() == other.hasGenStamp());
      if (hasGenStamp()) {
        result = result && (getGenStamp()
            == other.getGenStamp());
      }
      result = result && (hasNumBytes() == other.hasNumBytes());
      if (hasNumBytes()) {
        result = result && (getNumBytes()
            == other.getNumBytes());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasBlockId()) {
        hash = (37 * hash) + BLOCKID_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getBlockId());
      }
      if (hasGenStamp()) {
        hash = (37 * hash) + GENSTAMP_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getGenStamp());
      }
      if (hasNumBytes()) {
        hash = (37 * hash) + NUMBYTES_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getNumBytes());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        blockId_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000001);
        genStamp_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        numBytes_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.blockId_ = blockId_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.genStamp_ = genStamp_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.numBytes_ = numBytes_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance()) return this;
        if (other.hasBlockId()) {
          setBlockId(other.getBlockId());
        }
        if (other.hasGenStamp()) {
          setGenStamp(other.getGenStamp());
        }
        if (other.hasNumBytes()) {
          setNumBytes(other.getNumBytes());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasBlockId()) {
          
          return false;
        }
        if (!hasGenStamp()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              blockId_ = input.readUInt64();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              genStamp_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              numBytes_ = input.readUInt64();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint64 blockId = 1;
      private long blockId_ ;
      public boolean hasBlockId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public long getBlockId() {
        return blockId_;
      }
      public Builder setBlockId(long value) {
        bitField0_ |= 0x00000001;
        blockId_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        blockId_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 genStamp = 2;
      private long genStamp_ ;
      public boolean hasGenStamp() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getGenStamp() {
        return genStamp_;
      }
      public Builder setGenStamp(long value) {
        bitField0_ |= 0x00000002;
        genStamp_ = value;
        onChanged();
        return this;
      }
      public Builder clearGenStamp() {
        bitField0_ = (bitField0_ & ~0x00000002);
        genStamp_ = 0L;
        onChanged();
        return this;
      }
      
      // optional uint64 numBytes = 3 [default = 0];
      private long numBytes_ ;
      public boolean hasNumBytes() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getNumBytes() {
        return numBytes_;
      }
      public Builder setNumBytes(long value) {
        bitField0_ |= 0x00000004;
        numBytes_ = value;
        onChanged();
        return this;
      }
      public Builder clearNumBytes() {
        bitField0_ = (bitField0_ & ~0x00000004);
        numBytes_ = 0L;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:BlockProto)
    }
    
    static {
      defaultInstance = new BlockProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:BlockProto)
  }
  
  public interface BlockWithLocationsProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .BlockProto block = 1;
    boolean hasBlock();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlock();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlockOrBuilder();
    
    // repeated string storageIDs = 2;
    java.util.List getStorageIDsList();
    int getStorageIDsCount();
    String getStorageIDs(int index);
  }
  public static final class BlockWithLocationsProto extends
      com.google.protobuf.GeneratedMessage
      implements BlockWithLocationsProtoOrBuilder {
    // Use BlockWithLocationsProto.newBuilder() to construct.
    private BlockWithLocationsProto(Builder builder) {
      super(builder);
    }
    private BlockWithLocationsProto(boolean noInit) {}
    
    private static final BlockWithLocationsProto defaultInstance;
    public static BlockWithLocationsProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public BlockWithLocationsProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockWithLocationsProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockWithLocationsProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .BlockProto block = 1;
    public static final int BLOCK_FIELD_NUMBER = 1;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto block_;
    public boolean hasBlock() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlock() {
      return block_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlockOrBuilder() {
      return block_;
    }
    
    // repeated string storageIDs = 2;
    public static final int STORAGEIDS_FIELD_NUMBER = 2;
    private com.google.protobuf.LazyStringList storageIDs_;
    public java.util.List
        getStorageIDsList() {
      return storageIDs_;
    }
    public int getStorageIDsCount() {
      return storageIDs_.size();
    }
    public String getStorageIDs(int index) {
      return storageIDs_.get(index);
    }
    
    private void initFields() {
      block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance();
      storageIDs_ = com.google.protobuf.LazyStringArrayList.EMPTY;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasBlock()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getBlock().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, block_);
      }
      for (int i = 0; i < storageIDs_.size(); i++) {
        output.writeBytes(2, storageIDs_.getByteString(i));
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, block_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < storageIDs_.size(); i++) {
          dataSize += com.google.protobuf.CodedOutputStream
            .computeBytesSizeNoTag(storageIDs_.getByteString(i));
        }
        size += dataSize;
        size += 1 * getStorageIDsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto) obj;
      
      boolean result = true;
      result = result && (hasBlock() == other.hasBlock());
      if (hasBlock()) {
        result = result && getBlock()
            .equals(other.getBlock());
      }
      result = result && getStorageIDsList()
          .equals(other.getStorageIDsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasBlock()) {
        hash = (37 * hash) + BLOCK_FIELD_NUMBER;
        hash = (53 * hash) + getBlock().hashCode();
      }
      if (getStorageIDsCount() > 0) {
        hash = (37 * hash) + STORAGEIDS_FIELD_NUMBER;
        hash = (53 * hash) + getStorageIDsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockWithLocationsProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockWithLocationsProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getBlockFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (blockBuilder_ == null) {
          block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance();
        } else {
          blockBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        storageIDs_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (blockBuilder_ == null) {
          result.block_ = block_;
        } else {
          result.block_ = blockBuilder_.build();
        }
        if (((bitField0_ & 0x00000002) == 0x00000002)) {
          storageIDs_ = new com.google.protobuf.UnmodifiableLazyStringList(
              storageIDs_);
          bitField0_ = (bitField0_ & ~0x00000002);
        }
        result.storageIDs_ = storageIDs_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.getDefaultInstance()) return this;
        if (other.hasBlock()) {
          mergeBlock(other.getBlock());
        }
        if (!other.storageIDs_.isEmpty()) {
          if (storageIDs_.isEmpty()) {
            storageIDs_ = other.storageIDs_;
            bitField0_ = (bitField0_ & ~0x00000002);
          } else {
            ensureStorageIDsIsMutable();
            storageIDs_.addAll(other.storageIDs_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasBlock()) {
          
          return false;
        }
        if (!getBlock().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.newBuilder();
              if (hasBlock()) {
                subBuilder.mergeFrom(getBlock());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setBlock(subBuilder.buildPartial());
              break;
            }
            case 18: {
              ensureStorageIDsIsMutable();
              storageIDs_.add(input.readBytes());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .BlockProto block = 1;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> blockBuilder_;
      public boolean hasBlock() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto getBlock() {
        if (blockBuilder_ == null) {
          return block_;
        } else {
          return blockBuilder_.getMessage();
        }
      }
      public Builder setBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
        if (blockBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          block_ = value;
          onChanged();
        } else {
          blockBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setBlock(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) {
        if (blockBuilder_ == null) {
          block_ = builderForValue.build();
          onChanged();
        } else {
          blockBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) {
        if (blockBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              block_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance()) {
            block_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.newBuilder(block_).mergeFrom(value).buildPartial();
          } else {
            block_ = value;
          }
          onChanged();
        } else {
          blockBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearBlock() {
        if (blockBuilder_ == null) {
          block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance();
          onChanged();
        } else {
          blockBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder getBlockBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getBlockFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder getBlockOrBuilder() {
        if (blockBuilder_ != null) {
          return blockBuilder_.getMessageOrBuilder();
        } else {
          return block_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder> 
          getBlockFieldBuilder() {
        if (blockBuilder_ == null) {
          blockBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProtoOrBuilder>(
                  block_,
                  getParentForChildren(),
                  isClean());
          block_ = null;
        }
        return blockBuilder_;
      }
      
      // repeated string storageIDs = 2;
      private com.google.protobuf.LazyStringList storageIDs_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      private void ensureStorageIDsIsMutable() {
        if (!((bitField0_ & 0x00000002) == 0x00000002)) {
          storageIDs_ = new com.google.protobuf.LazyStringArrayList(storageIDs_);
          bitField0_ |= 0x00000002;
         }
      }
      public java.util.List
          getStorageIDsList() {
        return java.util.Collections.unmodifiableList(storageIDs_);
      }
      public int getStorageIDsCount() {
        return storageIDs_.size();
      }
      public String getStorageIDs(int index) {
        return storageIDs_.get(index);
      }
      public Builder setStorageIDs(
          int index, String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureStorageIDsIsMutable();
        storageIDs_.set(index, value);
        onChanged();
        return this;
      }
      public Builder addStorageIDs(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  ensureStorageIDsIsMutable();
        storageIDs_.add(value);
        onChanged();
        return this;
      }
      public Builder addAllStorageIDs(
          java.lang.Iterable values) {
        ensureStorageIDsIsMutable();
        super.addAll(values, storageIDs_);
        onChanged();
        return this;
      }
      public Builder clearStorageIDs() {
        storageIDs_ = com.google.protobuf.LazyStringArrayList.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      void addStorageIDs(com.google.protobuf.ByteString value) {
        ensureStorageIDsIsMutable();
        storageIDs_.add(value);
        onChanged();
      }
      
      // @@protoc_insertion_point(builder_scope:BlockWithLocationsProto)
    }
    
    static {
      defaultInstance = new BlockWithLocationsProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:BlockWithLocationsProto)
  }
  
  public interface BlocksWithLocationsProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated .BlockWithLocationsProto blocks = 1;
    java.util.List 
        getBlocksList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto getBlocks(int index);
    int getBlocksCount();
    java.util.List 
        getBlocksOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder getBlocksOrBuilder(
        int index);
  }
  public static final class BlocksWithLocationsProto extends
      com.google.protobuf.GeneratedMessage
      implements BlocksWithLocationsProtoOrBuilder {
    // Use BlocksWithLocationsProto.newBuilder() to construct.
    private BlocksWithLocationsProto(Builder builder) {
      super(builder);
    }
    private BlocksWithLocationsProto(boolean noInit) {}
    
    private static final BlocksWithLocationsProto defaultInstance;
    public static BlocksWithLocationsProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public BlocksWithLocationsProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlocksWithLocationsProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlocksWithLocationsProto_fieldAccessorTable;
    }
    
    // repeated .BlockWithLocationsProto blocks = 1;
    public static final int BLOCKS_FIELD_NUMBER = 1;
    private java.util.List blocks_;
    public java.util.List getBlocksList() {
      return blocks_;
    }
    public java.util.List 
        getBlocksOrBuilderList() {
      return blocks_;
    }
    public int getBlocksCount() {
      return blocks_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto getBlocks(int index) {
      return blocks_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder getBlocksOrBuilder(
        int index) {
      return blocks_.get(index);
    }
    
    private void initFields() {
      blocks_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      for (int i = 0; i < getBlocksCount(); i++) {
        if (!getBlocks(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < blocks_.size(); i++) {
        output.writeMessage(1, blocks_.get(i));
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      for (int i = 0; i < blocks_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, blocks_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto) obj;
      
      boolean result = true;
      result = result && getBlocksList()
          .equals(other.getBlocksList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getBlocksCount() > 0) {
        hash = (37 * hash) + BLOCKS_FIELD_NUMBER;
        hash = (53 * hash) + getBlocksList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlocksWithLocationsProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlocksWithLocationsProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getBlocksFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (blocksBuilder_ == null) {
          blocks_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          blocksBuilder_.clear();
        }
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto(this);
        int from_bitField0_ = bitField0_;
        if (blocksBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            blocks_ = java.util.Collections.unmodifiableList(blocks_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.blocks_ = blocks_;
        } else {
          result.blocks_ = blocksBuilder_.build();
        }
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto.getDefaultInstance()) return this;
        if (blocksBuilder_ == null) {
          if (!other.blocks_.isEmpty()) {
            if (blocks_.isEmpty()) {
              blocks_ = other.blocks_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureBlocksIsMutable();
              blocks_.addAll(other.blocks_);
            }
            onChanged();
          }
        } else {
          if (!other.blocks_.isEmpty()) {
            if (blocksBuilder_.isEmpty()) {
              blocksBuilder_.dispose();
              blocksBuilder_ = null;
              blocks_ = other.blocks_;
              bitField0_ = (bitField0_ & ~0x00000001);
              blocksBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getBlocksFieldBuilder() : null;
            } else {
              blocksBuilder_.addAllMessages(other.blocks_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        for (int i = 0; i < getBlocksCount(); i++) {
          if (!getBlocks(i).isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addBlocks(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // repeated .BlockWithLocationsProto blocks = 1;
      private java.util.List blocks_ =
        java.util.Collections.emptyList();
      private void ensureBlocksIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          blocks_ = new java.util.ArrayList(blocks_);
          bitField0_ |= 0x00000001;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder> blocksBuilder_;
      
      public java.util.List getBlocksList() {
        if (blocksBuilder_ == null) {
          return java.util.Collections.unmodifiableList(blocks_);
        } else {
          return blocksBuilder_.getMessageList();
        }
      }
      public int getBlocksCount() {
        if (blocksBuilder_ == null) {
          return blocks_.size();
        } else {
          return blocksBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto getBlocks(int index) {
        if (blocksBuilder_ == null) {
          return blocks_.get(index);
        } else {
          return blocksBuilder_.getMessage(index);
        }
      }
      public Builder setBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto value) {
        if (blocksBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureBlocksIsMutable();
          blocks_.set(index, value);
          onChanged();
        } else {
          blocksBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder builderForValue) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.set(index, builderForValue.build());
          onChanged();
        } else {
          blocksBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addBlocks(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto value) {
        if (blocksBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureBlocksIsMutable();
          blocks_.add(value);
          onChanged();
        } else {
          blocksBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto value) {
        if (blocksBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureBlocksIsMutable();
          blocks_.add(index, value);
          onChanged();
        } else {
          blocksBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addBlocks(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder builderForValue) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.add(builderForValue.build());
          onChanged();
        } else {
          blocksBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addBlocks(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder builderForValue) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.add(index, builderForValue.build());
          onChanged();
        } else {
          blocksBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllBlocks(
          java.lang.Iterable values) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          super.addAll(values, blocks_);
          onChanged();
        } else {
          blocksBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearBlocks() {
        if (blocksBuilder_ == null) {
          blocks_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          blocksBuilder_.clear();
        }
        return this;
      }
      public Builder removeBlocks(int index) {
        if (blocksBuilder_ == null) {
          ensureBlocksIsMutable();
          blocks_.remove(index);
          onChanged();
        } else {
          blocksBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder getBlocksBuilder(
          int index) {
        return getBlocksFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder getBlocksOrBuilder(
          int index) {
        if (blocksBuilder_ == null) {
          return blocks_.get(index);  } else {
          return blocksBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getBlocksOrBuilderList() {
        if (blocksBuilder_ != null) {
          return blocksBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(blocks_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder addBlocksBuilder() {
        return getBlocksFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder addBlocksBuilder(
          int index) {
        return getBlocksFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.getDefaultInstance());
      }
      public java.util.List 
           getBlocksBuilderList() {
        return getBlocksFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder> 
          getBlocksFieldBuilder() {
        if (blocksBuilder_ == null) {
          blocksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProtoOrBuilder>(
                  blocks_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          blocks_ = null;
        }
        return blocksBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:BlocksWithLocationsProto)
    }
    
    static {
      defaultInstance = new BlocksWithLocationsProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:BlocksWithLocationsProto)
  }
  
  public interface RemoteEditLogProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint64 startTxId = 1;
    boolean hasStartTxId();
    long getStartTxId();
    
    // required uint64 endTxId = 2;
    boolean hasEndTxId();
    long getEndTxId();
    
    // optional bool isInProgress = 3 [default = false];
    boolean hasIsInProgress();
    boolean getIsInProgress();
  }
  public static final class RemoteEditLogProto extends
      com.google.protobuf.GeneratedMessage
      implements RemoteEditLogProtoOrBuilder {
    // Use RemoteEditLogProto.newBuilder() to construct.
    private RemoteEditLogProto(Builder builder) {
      super(builder);
    }
    private RemoteEditLogProto(boolean noInit) {}
    
    private static final RemoteEditLogProto defaultInstance;
    public static RemoteEditLogProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public RemoteEditLogProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint64 startTxId = 1;
    public static final int STARTTXID_FIELD_NUMBER = 1;
    private long startTxId_;
    public boolean hasStartTxId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public long getStartTxId() {
      return startTxId_;
    }
    
    // required uint64 endTxId = 2;
    public static final int ENDTXID_FIELD_NUMBER = 2;
    private long endTxId_;
    public boolean hasEndTxId() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getEndTxId() {
      return endTxId_;
    }
    
    // optional bool isInProgress = 3 [default = false];
    public static final int ISINPROGRESS_FIELD_NUMBER = 3;
    private boolean isInProgress_;
    public boolean hasIsInProgress() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public boolean getIsInProgress() {
      return isInProgress_;
    }
    
    private void initFields() {
      startTxId_ = 0L;
      endTxId_ = 0L;
      isInProgress_ = false;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasStartTxId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasEndTxId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt64(1, startTxId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, endTxId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBool(3, isInProgress_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(1, startTxId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, endTxId_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(3, isInProgress_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto) obj;
      
      boolean result = true;
      result = result && (hasStartTxId() == other.hasStartTxId());
      if (hasStartTxId()) {
        result = result && (getStartTxId()
            == other.getStartTxId());
      }
      result = result && (hasEndTxId() == other.hasEndTxId());
      if (hasEndTxId()) {
        result = result && (getEndTxId()
            == other.getEndTxId());
      }
      result = result && (hasIsInProgress() == other.hasIsInProgress());
      if (hasIsInProgress()) {
        result = result && (getIsInProgress()
            == other.getIsInProgress());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasStartTxId()) {
        hash = (37 * hash) + STARTTXID_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getStartTxId());
      }
      if (hasEndTxId()) {
        hash = (37 * hash) + ENDTXID_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getEndTxId());
      }
      if (hasIsInProgress()) {
        hash = (37 * hash) + ISINPROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getIsInProgress());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        startTxId_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000001);
        endTxId_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        isInProgress_ = false;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.startTxId_ = startTxId_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.endTxId_ = endTxId_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.isInProgress_ = isInProgress_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.getDefaultInstance()) return this;
        if (other.hasStartTxId()) {
          setStartTxId(other.getStartTxId());
        }
        if (other.hasEndTxId()) {
          setEndTxId(other.getEndTxId());
        }
        if (other.hasIsInProgress()) {
          setIsInProgress(other.getIsInProgress());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasStartTxId()) {
          
          return false;
        }
        if (!hasEndTxId()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              startTxId_ = input.readUInt64();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              endTxId_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              isInProgress_ = input.readBool();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint64 startTxId = 1;
      private long startTxId_ ;
      public boolean hasStartTxId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public long getStartTxId() {
        return startTxId_;
      }
      public Builder setStartTxId(long value) {
        bitField0_ |= 0x00000001;
        startTxId_ = value;
        onChanged();
        return this;
      }
      public Builder clearStartTxId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        startTxId_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 endTxId = 2;
      private long endTxId_ ;
      public boolean hasEndTxId() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getEndTxId() {
        return endTxId_;
      }
      public Builder setEndTxId(long value) {
        bitField0_ |= 0x00000002;
        endTxId_ = value;
        onChanged();
        return this;
      }
      public Builder clearEndTxId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        endTxId_ = 0L;
        onChanged();
        return this;
      }
      
      // optional bool isInProgress = 3 [default = false];
      private boolean isInProgress_ ;
      public boolean hasIsInProgress() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public boolean getIsInProgress() {
        return isInProgress_;
      }
      public Builder setIsInProgress(boolean value) {
        bitField0_ |= 0x00000004;
        isInProgress_ = value;
        onChanged();
        return this;
      }
      public Builder clearIsInProgress() {
        bitField0_ = (bitField0_ & ~0x00000004);
        isInProgress_ = false;
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:RemoteEditLogProto)
    }
    
    static {
      defaultInstance = new RemoteEditLogProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:RemoteEditLogProto)
  }
  
  public interface RemoteEditLogManifestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated .RemoteEditLogProto logs = 1;
    java.util.List 
        getLogsList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto getLogs(int index);
    int getLogsCount();
    java.util.List 
        getLogsOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder getLogsOrBuilder(
        int index);
  }
  public static final class RemoteEditLogManifestProto extends
      com.google.protobuf.GeneratedMessage
      implements RemoteEditLogManifestProtoOrBuilder {
    // Use RemoteEditLogManifestProto.newBuilder() to construct.
    private RemoteEditLogManifestProto(Builder builder) {
      super(builder);
    }
    private RemoteEditLogManifestProto(boolean noInit) {}
    
    private static final RemoteEditLogManifestProto defaultInstance;
    public static RemoteEditLogManifestProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public RemoteEditLogManifestProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogManifestProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogManifestProto_fieldAccessorTable;
    }
    
    // repeated .RemoteEditLogProto logs = 1;
    public static final int LOGS_FIELD_NUMBER = 1;
    private java.util.List logs_;
    public java.util.List getLogsList() {
      return logs_;
    }
    public java.util.List 
        getLogsOrBuilderList() {
      return logs_;
    }
    public int getLogsCount() {
      return logs_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto getLogs(int index) {
      return logs_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder getLogsOrBuilder(
        int index) {
      return logs_.get(index);
    }
    
    private void initFields() {
      logs_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      for (int i = 0; i < getLogsCount(); i++) {
        if (!getLogs(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < logs_.size(); i++) {
        output.writeMessage(1, logs_.get(i));
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      for (int i = 0; i < logs_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, logs_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto) obj;
      
      boolean result = true;
      result = result && getLogsList()
          .equals(other.getLogsList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getLogsCount() > 0) {
        hash = (37 * hash) + LOGS_FIELD_NUMBER;
        hash = (53 * hash) + getLogsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogManifestProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RemoteEditLogManifestProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getLogsFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (logsBuilder_ == null) {
          logs_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          logsBuilder_.clear();
        }
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto(this);
        int from_bitField0_ = bitField0_;
        if (logsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            logs_ = java.util.Collections.unmodifiableList(logs_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.logs_ = logs_;
        } else {
          result.logs_ = logsBuilder_.build();
        }
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto.getDefaultInstance()) return this;
        if (logsBuilder_ == null) {
          if (!other.logs_.isEmpty()) {
            if (logs_.isEmpty()) {
              logs_ = other.logs_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureLogsIsMutable();
              logs_.addAll(other.logs_);
            }
            onChanged();
          }
        } else {
          if (!other.logs_.isEmpty()) {
            if (logsBuilder_.isEmpty()) {
              logsBuilder_.dispose();
              logsBuilder_ = null;
              logs_ = other.logs_;
              bitField0_ = (bitField0_ & ~0x00000001);
              logsBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getLogsFieldBuilder() : null;
            } else {
              logsBuilder_.addAllMessages(other.logs_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        for (int i = 0; i < getLogsCount(); i++) {
          if (!getLogs(i).isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addLogs(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // repeated .RemoteEditLogProto logs = 1;
      private java.util.List logs_ =
        java.util.Collections.emptyList();
      private void ensureLogsIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          logs_ = new java.util.ArrayList(logs_);
          bitField0_ |= 0x00000001;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder> logsBuilder_;
      
      public java.util.List getLogsList() {
        if (logsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(logs_);
        } else {
          return logsBuilder_.getMessageList();
        }
      }
      public int getLogsCount() {
        if (logsBuilder_ == null) {
          return logs_.size();
        } else {
          return logsBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto getLogs(int index) {
        if (logsBuilder_ == null) {
          return logs_.get(index);
        } else {
          return logsBuilder_.getMessage(index);
        }
      }
      public Builder setLogs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto value) {
        if (logsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLogsIsMutable();
          logs_.set(index, value);
          onChanged();
        } else {
          logsBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setLogs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder builderForValue) {
        if (logsBuilder_ == null) {
          ensureLogsIsMutable();
          logs_.set(index, builderForValue.build());
          onChanged();
        } else {
          logsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addLogs(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto value) {
        if (logsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLogsIsMutable();
          logs_.add(value);
          onChanged();
        } else {
          logsBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addLogs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto value) {
        if (logsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLogsIsMutable();
          logs_.add(index, value);
          onChanged();
        } else {
          logsBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addLogs(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder builderForValue) {
        if (logsBuilder_ == null) {
          ensureLogsIsMutable();
          logs_.add(builderForValue.build());
          onChanged();
        } else {
          logsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addLogs(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder builderForValue) {
        if (logsBuilder_ == null) {
          ensureLogsIsMutable();
          logs_.add(index, builderForValue.build());
          onChanged();
        } else {
          logsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllLogs(
          java.lang.Iterable values) {
        if (logsBuilder_ == null) {
          ensureLogsIsMutable();
          super.addAll(values, logs_);
          onChanged();
        } else {
          logsBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearLogs() {
        if (logsBuilder_ == null) {
          logs_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          logsBuilder_.clear();
        }
        return this;
      }
      public Builder removeLogs(int index) {
        if (logsBuilder_ == null) {
          ensureLogsIsMutable();
          logs_.remove(index);
          onChanged();
        } else {
          logsBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder getLogsBuilder(
          int index) {
        return getLogsFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder getLogsOrBuilder(
          int index) {
        if (logsBuilder_ == null) {
          return logs_.get(index);  } else {
          return logsBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getLogsOrBuilderList() {
        if (logsBuilder_ != null) {
          return logsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(logs_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder addLogsBuilder() {
        return getLogsFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder addLogsBuilder(
          int index) {
        return getLogsFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.getDefaultInstance());
      }
      public java.util.List 
           getLogsBuilderList() {
        return getLogsFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder> 
          getLogsFieldBuilder() {
        if (logsBuilder_ == null) {
          logsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProtoOrBuilder>(
                  logs_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          logs_ = null;
        }
        return logsBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:RemoteEditLogManifestProto)
    }
    
    static {
      defaultInstance = new RemoteEditLogManifestProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:RemoteEditLogManifestProto)
  }
  
  public interface NamespaceInfoProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string buildVersion = 1;
    boolean hasBuildVersion();
    String getBuildVersion();
    
    // required uint32 distUpgradeVersion = 2;
    boolean hasDistUpgradeVersion();
    int getDistUpgradeVersion();
    
    // required string blockPoolID = 3;
    boolean hasBlockPoolID();
    String getBlockPoolID();
    
    // required .StorageInfoProto storageInfo = 4;
    boolean hasStorageInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder();
    
    // required string softwareVersion = 5;
    boolean hasSoftwareVersion();
    String getSoftwareVersion();
  }
  public static final class NamespaceInfoProto extends
      com.google.protobuf.GeneratedMessage
      implements NamespaceInfoProtoOrBuilder {
    // Use NamespaceInfoProto.newBuilder() to construct.
    private NamespaceInfoProto(Builder builder) {
      super(builder);
    }
    private NamespaceInfoProto(boolean noInit) {}
    
    private static final NamespaceInfoProto defaultInstance;
    public static NamespaceInfoProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public NamespaceInfoProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamespaceInfoProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamespaceInfoProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required string buildVersion = 1;
    public static final int BUILDVERSION_FIELD_NUMBER = 1;
    private java.lang.Object buildVersion_;
    public boolean hasBuildVersion() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public String getBuildVersion() {
      java.lang.Object ref = buildVersion_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          buildVersion_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getBuildVersionBytes() {
      java.lang.Object ref = buildVersion_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        buildVersion_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required uint32 distUpgradeVersion = 2;
    public static final int DISTUPGRADEVERSION_FIELD_NUMBER = 2;
    private int distUpgradeVersion_;
    public boolean hasDistUpgradeVersion() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public int getDistUpgradeVersion() {
      return distUpgradeVersion_;
    }
    
    // required string blockPoolID = 3;
    public static final int BLOCKPOOLID_FIELD_NUMBER = 3;
    private java.lang.Object blockPoolID_;
    public boolean hasBlockPoolID() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public String getBlockPoolID() {
      java.lang.Object ref = blockPoolID_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          blockPoolID_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getBlockPoolIDBytes() {
      java.lang.Object ref = blockPoolID_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        blockPoolID_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    // required .StorageInfoProto storageInfo = 4;
    public static final int STORAGEINFO_FIELD_NUMBER = 4;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto storageInfo_;
    public boolean hasStorageInfo() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo() {
      return storageInfo_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder() {
      return storageInfo_;
    }
    
    // required string softwareVersion = 5;
    public static final int SOFTWAREVERSION_FIELD_NUMBER = 5;
    private java.lang.Object softwareVersion_;
    public boolean hasSoftwareVersion() {
      return ((bitField0_ & 0x00000010) == 0x00000010);
    }
    public String getSoftwareVersion() {
      java.lang.Object ref = softwareVersion_;
      if (ref instanceof String) {
        return (String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        String s = bs.toStringUtf8();
        if (com.google.protobuf.Internal.isValidUtf8(bs)) {
          softwareVersion_ = s;
        }
        return s;
      }
    }
    private com.google.protobuf.ByteString getSoftwareVersionBytes() {
      java.lang.Object ref = softwareVersion_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8((String) ref);
        softwareVersion_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    
    private void initFields() {
      buildVersion_ = "";
      distUpgradeVersion_ = 0;
      blockPoolID_ = "";
      storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
      softwareVersion_ = "";
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasBuildVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasDistUpgradeVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBlockPoolID()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasStorageInfo()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasSoftwareVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getStorageInfo().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBytes(1, getBuildVersionBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt32(2, distUpgradeVersion_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, getBlockPoolIDBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeMessage(4, storageInfo_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        output.writeBytes(5, getSoftwareVersionBytes());
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(1, getBuildVersionBytes());
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(2, distUpgradeVersion_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, getBlockPoolIDBytes());
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(4, storageInfo_);
      }
      if (((bitField0_ & 0x00000010) == 0x00000010)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(5, getSoftwareVersionBytes());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto) obj;
      
      boolean result = true;
      result = result && (hasBuildVersion() == other.hasBuildVersion());
      if (hasBuildVersion()) {
        result = result && getBuildVersion()
            .equals(other.getBuildVersion());
      }
      result = result && (hasDistUpgradeVersion() == other.hasDistUpgradeVersion());
      if (hasDistUpgradeVersion()) {
        result = result && (getDistUpgradeVersion()
            == other.getDistUpgradeVersion());
      }
      result = result && (hasBlockPoolID() == other.hasBlockPoolID());
      if (hasBlockPoolID()) {
        result = result && getBlockPoolID()
            .equals(other.getBlockPoolID());
      }
      result = result && (hasStorageInfo() == other.hasStorageInfo());
      if (hasStorageInfo()) {
        result = result && getStorageInfo()
            .equals(other.getStorageInfo());
      }
      result = result && (hasSoftwareVersion() == other.hasSoftwareVersion());
      if (hasSoftwareVersion()) {
        result = result && getSoftwareVersion()
            .equals(other.getSoftwareVersion());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasBuildVersion()) {
        hash = (37 * hash) + BUILDVERSION_FIELD_NUMBER;
        hash = (53 * hash) + getBuildVersion().hashCode();
      }
      if (hasDistUpgradeVersion()) {
        hash = (37 * hash) + DISTUPGRADEVERSION_FIELD_NUMBER;
        hash = (53 * hash) + getDistUpgradeVersion();
      }
      if (hasBlockPoolID()) {
        hash = (37 * hash) + BLOCKPOOLID_FIELD_NUMBER;
        hash = (53 * hash) + getBlockPoolID().hashCode();
      }
      if (hasStorageInfo()) {
        hash = (37 * hash) + STORAGEINFO_FIELD_NUMBER;
        hash = (53 * hash) + getStorageInfo().hashCode();
      }
      if (hasSoftwareVersion()) {
        hash = (37 * hash) + SOFTWAREVERSION_FIELD_NUMBER;
        hash = (53 * hash) + getSoftwareVersion().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamespaceInfoProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_NamespaceInfoProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getStorageInfoFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        buildVersion_ = "";
        bitField0_ = (bitField0_ & ~0x00000001);
        distUpgradeVersion_ = 0;
        bitField0_ = (bitField0_ & ~0x00000002);
        blockPoolID_ = "";
        bitField0_ = (bitField0_ & ~0x00000004);
        if (storageInfoBuilder_ == null) {
          storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
        } else {
          storageInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        softwareVersion_ = "";
        bitField0_ = (bitField0_ & ~0x00000010);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.buildVersion_ = buildVersion_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.distUpgradeVersion_ = distUpgradeVersion_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.blockPoolID_ = blockPoolID_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        if (storageInfoBuilder_ == null) {
          result.storageInfo_ = storageInfo_;
        } else {
          result.storageInfo_ = storageInfoBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
          to_bitField0_ |= 0x00000010;
        }
        result.softwareVersion_ = softwareVersion_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance()) return this;
        if (other.hasBuildVersion()) {
          setBuildVersion(other.getBuildVersion());
        }
        if (other.hasDistUpgradeVersion()) {
          setDistUpgradeVersion(other.getDistUpgradeVersion());
        }
        if (other.hasBlockPoolID()) {
          setBlockPoolID(other.getBlockPoolID());
        }
        if (other.hasStorageInfo()) {
          mergeStorageInfo(other.getStorageInfo());
        }
        if (other.hasSoftwareVersion()) {
          setSoftwareVersion(other.getSoftwareVersion());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasBuildVersion()) {
          
          return false;
        }
        if (!hasDistUpgradeVersion()) {
          
          return false;
        }
        if (!hasBlockPoolID()) {
          
          return false;
        }
        if (!hasStorageInfo()) {
          
          return false;
        }
        if (!hasSoftwareVersion()) {
          
          return false;
        }
        if (!getStorageInfo().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              bitField0_ |= 0x00000001;
              buildVersion_ = input.readBytes();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              distUpgradeVersion_ = input.readUInt32();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              blockPoolID_ = input.readBytes();
              break;
            }
            case 34: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder();
              if (hasStorageInfo()) {
                subBuilder.mergeFrom(getStorageInfo());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setStorageInfo(subBuilder.buildPartial());
              break;
            }
            case 42: {
              bitField0_ |= 0x00000010;
              softwareVersion_ = input.readBytes();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required string buildVersion = 1;
      private java.lang.Object buildVersion_ = "";
      public boolean hasBuildVersion() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public String getBuildVersion() {
        java.lang.Object ref = buildVersion_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          buildVersion_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setBuildVersion(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000001;
        buildVersion_ = value;
        onChanged();
        return this;
      }
      public Builder clearBuildVersion() {
        bitField0_ = (bitField0_ & ~0x00000001);
        buildVersion_ = getDefaultInstance().getBuildVersion();
        onChanged();
        return this;
      }
      void setBuildVersion(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000001;
        buildVersion_ = value;
        onChanged();
      }
      
      // required uint32 distUpgradeVersion = 2;
      private int distUpgradeVersion_ ;
      public boolean hasDistUpgradeVersion() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public int getDistUpgradeVersion() {
        return distUpgradeVersion_;
      }
      public Builder setDistUpgradeVersion(int value) {
        bitField0_ |= 0x00000002;
        distUpgradeVersion_ = value;
        onChanged();
        return this;
      }
      public Builder clearDistUpgradeVersion() {
        bitField0_ = (bitField0_ & ~0x00000002);
        distUpgradeVersion_ = 0;
        onChanged();
        return this;
      }
      
      // required string blockPoolID = 3;
      private java.lang.Object blockPoolID_ = "";
      public boolean hasBlockPoolID() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public String getBlockPoolID() {
        java.lang.Object ref = blockPoolID_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          blockPoolID_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setBlockPoolID(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        blockPoolID_ = value;
        onChanged();
        return this;
      }
      public Builder clearBlockPoolID() {
        bitField0_ = (bitField0_ & ~0x00000004);
        blockPoolID_ = getDefaultInstance().getBlockPoolID();
        onChanged();
        return this;
      }
      void setBlockPoolID(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000004;
        blockPoolID_ = value;
        onChanged();
      }
      
      // required .StorageInfoProto storageInfo = 4;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder> storageInfoBuilder_;
      public boolean hasStorageInfo() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto getStorageInfo() {
        if (storageInfoBuilder_ == null) {
          return storageInfo_;
        } else {
          return storageInfoBuilder_.getMessage();
        }
      }
      public Builder setStorageInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto value) {
        if (storageInfoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          storageInfo_ = value;
          onChanged();
        } else {
          storageInfoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder setStorageInfo(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder builderForValue) {
        if (storageInfoBuilder_ == null) {
          storageInfo_ = builderForValue.build();
          onChanged();
        } else {
          storageInfoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder mergeStorageInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto value) {
        if (storageInfoBuilder_ == null) {
          if (((bitField0_ & 0x00000008) == 0x00000008) &&
              storageInfo_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance()) {
            storageInfo_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.newBuilder(storageInfo_).mergeFrom(value).buildPartial();
          } else {
            storageInfo_ = value;
          }
          onChanged();
        } else {
          storageInfoBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder clearStorageInfo() {
        if (storageInfoBuilder_ == null) {
          storageInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.getDefaultInstance();
          onChanged();
        } else {
          storageInfoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder getStorageInfoBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getStorageInfoFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder getStorageInfoOrBuilder() {
        if (storageInfoBuilder_ != null) {
          return storageInfoBuilder_.getMessageOrBuilder();
        } else {
          return storageInfo_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder> 
          getStorageInfoFieldBuilder() {
        if (storageInfoBuilder_ == null) {
          storageInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProtoOrBuilder>(
                  storageInfo_,
                  getParentForChildren(),
                  isClean());
          storageInfo_ = null;
        }
        return storageInfoBuilder_;
      }
      
      // required string softwareVersion = 5;
      private java.lang.Object softwareVersion_ = "";
      public boolean hasSoftwareVersion() {
        return ((bitField0_ & 0x00000010) == 0x00000010);
      }
      public String getSoftwareVersion() {
        java.lang.Object ref = softwareVersion_;
        if (!(ref instanceof String)) {
          String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
          softwareVersion_ = s;
          return s;
        } else {
          return (String) ref;
        }
      }
      public Builder setSoftwareVersion(String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000010;
        softwareVersion_ = value;
        onChanged();
        return this;
      }
      public Builder clearSoftwareVersion() {
        bitField0_ = (bitField0_ & ~0x00000010);
        softwareVersion_ = getDefaultInstance().getSoftwareVersion();
        onChanged();
        return this;
      }
      void setSoftwareVersion(com.google.protobuf.ByteString value) {
        bitField0_ |= 0x00000010;
        softwareVersion_ = value;
        onChanged();
      }
      
      // @@protoc_insertion_point(builder_scope:NamespaceInfoProto)
    }
    
    static {
      defaultInstance = new NamespaceInfoProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:NamespaceInfoProto)
  }
  
  public interface BlockKeyProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint32 keyId = 1;
    boolean hasKeyId();
    int getKeyId();
    
    // required uint64 expiryDate = 2;
    boolean hasExpiryDate();
    long getExpiryDate();
    
    // optional bytes keyBytes = 3;
    boolean hasKeyBytes();
    com.google.protobuf.ByteString getKeyBytes();
  }
  public static final class BlockKeyProto extends
      com.google.protobuf.GeneratedMessage
      implements BlockKeyProtoOrBuilder {
    // Use BlockKeyProto.newBuilder() to construct.
    private BlockKeyProto(Builder builder) {
      super(builder);
    }
    private BlockKeyProto(boolean noInit) {}
    
    private static final BlockKeyProto defaultInstance;
    public static BlockKeyProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public BlockKeyProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockKeyProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockKeyProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint32 keyId = 1;
    public static final int KEYID_FIELD_NUMBER = 1;
    private int keyId_;
    public boolean hasKeyId() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public int getKeyId() {
      return keyId_;
    }
    
    // required uint64 expiryDate = 2;
    public static final int EXPIRYDATE_FIELD_NUMBER = 2;
    private long expiryDate_;
    public boolean hasExpiryDate() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getExpiryDate() {
      return expiryDate_;
    }
    
    // optional bytes keyBytes = 3;
    public static final int KEYBYTES_FIELD_NUMBER = 3;
    private com.google.protobuf.ByteString keyBytes_;
    public boolean hasKeyBytes() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public com.google.protobuf.ByteString getKeyBytes() {
      return keyBytes_;
    }
    
    private void initFields() {
      keyId_ = 0;
      expiryDate_ = 0L;
      keyBytes_ = com.google.protobuf.ByteString.EMPTY;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasKeyId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasExpiryDate()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt32(1, keyId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, expiryDate_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(3, keyBytes_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt32Size(1, keyId_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, expiryDate_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(3, keyBytes_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto) obj;
      
      boolean result = true;
      result = result && (hasKeyId() == other.hasKeyId());
      if (hasKeyId()) {
        result = result && (getKeyId()
            == other.getKeyId());
      }
      result = result && (hasExpiryDate() == other.hasExpiryDate());
      if (hasExpiryDate()) {
        result = result && (getExpiryDate()
            == other.getExpiryDate());
      }
      result = result && (hasKeyBytes() == other.hasKeyBytes());
      if (hasKeyBytes()) {
        result = result && getKeyBytes()
            .equals(other.getKeyBytes());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasKeyId()) {
        hash = (37 * hash) + KEYID_FIELD_NUMBER;
        hash = (53 * hash) + getKeyId();
      }
      if (hasExpiryDate()) {
        hash = (37 * hash) + EXPIRYDATE_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getExpiryDate());
      }
      if (hasKeyBytes()) {
        hash = (37 * hash) + KEYBYTES_FIELD_NUMBER;
        hash = (53 * hash) + getKeyBytes().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockKeyProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockKeyProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        keyId_ = 0;
        bitField0_ = (bitField0_ & ~0x00000001);
        expiryDate_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        keyBytes_ = com.google.protobuf.ByteString.EMPTY;
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.keyId_ = keyId_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.expiryDate_ = expiryDate_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.keyBytes_ = keyBytes_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance()) return this;
        if (other.hasKeyId()) {
          setKeyId(other.getKeyId());
        }
        if (other.hasExpiryDate()) {
          setExpiryDate(other.getExpiryDate());
        }
        if (other.hasKeyBytes()) {
          setKeyBytes(other.getKeyBytes());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasKeyId()) {
          
          return false;
        }
        if (!hasExpiryDate()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              keyId_ = input.readUInt32();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              expiryDate_ = input.readUInt64();
              break;
            }
            case 26: {
              bitField0_ |= 0x00000004;
              keyBytes_ = input.readBytes();
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint32 keyId = 1;
      private int keyId_ ;
      public boolean hasKeyId() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public int getKeyId() {
        return keyId_;
      }
      public Builder setKeyId(int value) {
        bitField0_ |= 0x00000001;
        keyId_ = value;
        onChanged();
        return this;
      }
      public Builder clearKeyId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        keyId_ = 0;
        onChanged();
        return this;
      }
      
      // required uint64 expiryDate = 2;
      private long expiryDate_ ;
      public boolean hasExpiryDate() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getExpiryDate() {
        return expiryDate_;
      }
      public Builder setExpiryDate(long value) {
        bitField0_ |= 0x00000002;
        expiryDate_ = value;
        onChanged();
        return this;
      }
      public Builder clearExpiryDate() {
        bitField0_ = (bitField0_ & ~0x00000002);
        expiryDate_ = 0L;
        onChanged();
        return this;
      }
      
      // optional bytes keyBytes = 3;
      private com.google.protobuf.ByteString keyBytes_ = com.google.protobuf.ByteString.EMPTY;
      public boolean hasKeyBytes() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public com.google.protobuf.ByteString getKeyBytes() {
        return keyBytes_;
      }
      public Builder setKeyBytes(com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000004;
        keyBytes_ = value;
        onChanged();
        return this;
      }
      public Builder clearKeyBytes() {
        bitField0_ = (bitField0_ & ~0x00000004);
        keyBytes_ = getDefaultInstance().getKeyBytes();
        onChanged();
        return this;
      }
      
      // @@protoc_insertion_point(builder_scope:BlockKeyProto)
    }
    
    static {
      defaultInstance = new BlockKeyProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:BlockKeyProto)
  }
  
  public interface ExportedBlockKeysProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required bool isBlockTokenEnabled = 1;
    boolean hasIsBlockTokenEnabled();
    boolean getIsBlockTokenEnabled();
    
    // required uint64 keyUpdateInterval = 2;
    boolean hasKeyUpdateInterval();
    long getKeyUpdateInterval();
    
    // required uint64 tokenLifeTime = 3;
    boolean hasTokenLifeTime();
    long getTokenLifeTime();
    
    // required .BlockKeyProto currentKey = 4;
    boolean hasCurrentKey();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getCurrentKey();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder getCurrentKeyOrBuilder();
    
    // repeated .BlockKeyProto allKeys = 5;
    java.util.List 
        getAllKeysList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getAllKeys(int index);
    int getAllKeysCount();
    java.util.List 
        getAllKeysOrBuilderList();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder getAllKeysOrBuilder(
        int index);
  }
  public static final class ExportedBlockKeysProto extends
      com.google.protobuf.GeneratedMessage
      implements ExportedBlockKeysProtoOrBuilder {
    // Use ExportedBlockKeysProto.newBuilder() to construct.
    private ExportedBlockKeysProto(Builder builder) {
      super(builder);
    }
    private ExportedBlockKeysProto(boolean noInit) {}
    
    private static final ExportedBlockKeysProto defaultInstance;
    public static ExportedBlockKeysProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public ExportedBlockKeysProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExportedBlockKeysProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExportedBlockKeysProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required bool isBlockTokenEnabled = 1;
    public static final int ISBLOCKTOKENENABLED_FIELD_NUMBER = 1;
    private boolean isBlockTokenEnabled_;
    public boolean hasIsBlockTokenEnabled() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public boolean getIsBlockTokenEnabled() {
      return isBlockTokenEnabled_;
    }
    
    // required uint64 keyUpdateInterval = 2;
    public static final int KEYUPDATEINTERVAL_FIELD_NUMBER = 2;
    private long keyUpdateInterval_;
    public boolean hasKeyUpdateInterval() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public long getKeyUpdateInterval() {
      return keyUpdateInterval_;
    }
    
    // required uint64 tokenLifeTime = 3;
    public static final int TOKENLIFETIME_FIELD_NUMBER = 3;
    private long tokenLifeTime_;
    public boolean hasTokenLifeTime() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    public long getTokenLifeTime() {
      return tokenLifeTime_;
    }
    
    // required .BlockKeyProto currentKey = 4;
    public static final int CURRENTKEY_FIELD_NUMBER = 4;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto currentKey_;
    public boolean hasCurrentKey() {
      return ((bitField0_ & 0x00000008) == 0x00000008);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getCurrentKey() {
      return currentKey_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder getCurrentKeyOrBuilder() {
      return currentKey_;
    }
    
    // repeated .BlockKeyProto allKeys = 5;
    public static final int ALLKEYS_FIELD_NUMBER = 5;
    private java.util.List allKeys_;
    public java.util.List getAllKeysList() {
      return allKeys_;
    }
    public java.util.List 
        getAllKeysOrBuilderList() {
      return allKeys_;
    }
    public int getAllKeysCount() {
      return allKeys_.size();
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getAllKeys(int index) {
      return allKeys_.get(index);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder getAllKeysOrBuilder(
        int index) {
      return allKeys_.get(index);
    }
    
    private void initFields() {
      isBlockTokenEnabled_ = false;
      keyUpdateInterval_ = 0L;
      tokenLifeTime_ = 0L;
      currentKey_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance();
      allKeys_ = java.util.Collections.emptyList();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasIsBlockTokenEnabled()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasKeyUpdateInterval()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasTokenLifeTime()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasCurrentKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getCurrentKey().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getAllKeysCount(); i++) {
        if (!getAllKeys(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBool(1, isBlockTokenEnabled_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeUInt64(2, keyUpdateInterval_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeUInt64(3, tokenLifeTime_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        output.writeMessage(4, currentKey_);
      }
      for (int i = 0; i < allKeys_.size(); i++) {
        output.writeMessage(5, allKeys_.get(i));
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(1, isBlockTokenEnabled_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(2, keyUpdateInterval_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(3, tokenLifeTime_);
      }
      if (((bitField0_ & 0x00000008) == 0x00000008)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(4, currentKey_);
      }
      for (int i = 0; i < allKeys_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(5, allKeys_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto) obj;
      
      boolean result = true;
      result = result && (hasIsBlockTokenEnabled() == other.hasIsBlockTokenEnabled());
      if (hasIsBlockTokenEnabled()) {
        result = result && (getIsBlockTokenEnabled()
            == other.getIsBlockTokenEnabled());
      }
      result = result && (hasKeyUpdateInterval() == other.hasKeyUpdateInterval());
      if (hasKeyUpdateInterval()) {
        result = result && (getKeyUpdateInterval()
            == other.getKeyUpdateInterval());
      }
      result = result && (hasTokenLifeTime() == other.hasTokenLifeTime());
      if (hasTokenLifeTime()) {
        result = result && (getTokenLifeTime()
            == other.getTokenLifeTime());
      }
      result = result && (hasCurrentKey() == other.hasCurrentKey());
      if (hasCurrentKey()) {
        result = result && getCurrentKey()
            .equals(other.getCurrentKey());
      }
      result = result && getAllKeysList()
          .equals(other.getAllKeysList());
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasIsBlockTokenEnabled()) {
        hash = (37 * hash) + ISBLOCKTOKENENABLED_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getIsBlockTokenEnabled());
      }
      if (hasKeyUpdateInterval()) {
        hash = (37 * hash) + KEYUPDATEINTERVAL_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getKeyUpdateInterval());
      }
      if (hasTokenLifeTime()) {
        hash = (37 * hash) + TOKENLIFETIME_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getTokenLifeTime());
      }
      if (hasCurrentKey()) {
        hash = (37 * hash) + CURRENTKEY_FIELD_NUMBER;
        hash = (53 * hash) + getCurrentKey().hashCode();
      }
      if (getAllKeysCount() > 0) {
        hash = (37 * hash) + ALLKEYS_FIELD_NUMBER;
        hash = (53 * hash) + getAllKeysList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExportedBlockKeysProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExportedBlockKeysProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getCurrentKeyFieldBuilder();
          getAllKeysFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        isBlockTokenEnabled_ = false;
        bitField0_ = (bitField0_ & ~0x00000001);
        keyUpdateInterval_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000002);
        tokenLifeTime_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000004);
        if (currentKeyBuilder_ == null) {
          currentKey_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance();
        } else {
          currentKeyBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        if (allKeysBuilder_ == null) {
          allKeys_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000010);
        } else {
          allKeysBuilder_.clear();
        }
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.isBlockTokenEnabled_ = isBlockTokenEnabled_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        result.keyUpdateInterval_ = keyUpdateInterval_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000004;
        }
        result.tokenLifeTime_ = tokenLifeTime_;
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000008;
        }
        if (currentKeyBuilder_ == null) {
          result.currentKey_ = currentKey_;
        } else {
          result.currentKey_ = currentKeyBuilder_.build();
        }
        if (allKeysBuilder_ == null) {
          if (((bitField0_ & 0x00000010) == 0x00000010)) {
            allKeys_ = java.util.Collections.unmodifiableList(allKeys_);
            bitField0_ = (bitField0_ & ~0x00000010);
          }
          result.allKeys_ = allKeys_;
        } else {
          result.allKeys_ = allKeysBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto.getDefaultInstance()) return this;
        if (other.hasIsBlockTokenEnabled()) {
          setIsBlockTokenEnabled(other.getIsBlockTokenEnabled());
        }
        if (other.hasKeyUpdateInterval()) {
          setKeyUpdateInterval(other.getKeyUpdateInterval());
        }
        if (other.hasTokenLifeTime()) {
          setTokenLifeTime(other.getTokenLifeTime());
        }
        if (other.hasCurrentKey()) {
          mergeCurrentKey(other.getCurrentKey());
        }
        if (allKeysBuilder_ == null) {
          if (!other.allKeys_.isEmpty()) {
            if (allKeys_.isEmpty()) {
              allKeys_ = other.allKeys_;
              bitField0_ = (bitField0_ & ~0x00000010);
            } else {
              ensureAllKeysIsMutable();
              allKeys_.addAll(other.allKeys_);
            }
            onChanged();
          }
        } else {
          if (!other.allKeys_.isEmpty()) {
            if (allKeysBuilder_.isEmpty()) {
              allKeysBuilder_.dispose();
              allKeysBuilder_ = null;
              allKeys_ = other.allKeys_;
              bitField0_ = (bitField0_ & ~0x00000010);
              allKeysBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getAllKeysFieldBuilder() : null;
            } else {
              allKeysBuilder_.addAllMessages(other.allKeys_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasIsBlockTokenEnabled()) {
          
          return false;
        }
        if (!hasKeyUpdateInterval()) {
          
          return false;
        }
        if (!hasTokenLifeTime()) {
          
          return false;
        }
        if (!hasCurrentKey()) {
          
          return false;
        }
        if (!getCurrentKey().isInitialized()) {
          
          return false;
        }
        for (int i = 0; i < getAllKeysCount(); i++) {
          if (!getAllKeys(i).isInitialized()) {
            
            return false;
          }
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              isBlockTokenEnabled_ = input.readBool();
              break;
            }
            case 16: {
              bitField0_ |= 0x00000002;
              keyUpdateInterval_ = input.readUInt64();
              break;
            }
            case 24: {
              bitField0_ |= 0x00000004;
              tokenLifeTime_ = input.readUInt64();
              break;
            }
            case 34: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.newBuilder();
              if (hasCurrentKey()) {
                subBuilder.mergeFrom(getCurrentKey());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setCurrentKey(subBuilder.buildPartial());
              break;
            }
            case 42: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.newBuilder();
              input.readMessage(subBuilder, extensionRegistry);
              addAllKeys(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required bool isBlockTokenEnabled = 1;
      private boolean isBlockTokenEnabled_ ;
      public boolean hasIsBlockTokenEnabled() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public boolean getIsBlockTokenEnabled() {
        return isBlockTokenEnabled_;
      }
      public Builder setIsBlockTokenEnabled(boolean value) {
        bitField0_ |= 0x00000001;
        isBlockTokenEnabled_ = value;
        onChanged();
        return this;
      }
      public Builder clearIsBlockTokenEnabled() {
        bitField0_ = (bitField0_ & ~0x00000001);
        isBlockTokenEnabled_ = false;
        onChanged();
        return this;
      }
      
      // required uint64 keyUpdateInterval = 2;
      private long keyUpdateInterval_ ;
      public boolean hasKeyUpdateInterval() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public long getKeyUpdateInterval() {
        return keyUpdateInterval_;
      }
      public Builder setKeyUpdateInterval(long value) {
        bitField0_ |= 0x00000002;
        keyUpdateInterval_ = value;
        onChanged();
        return this;
      }
      public Builder clearKeyUpdateInterval() {
        bitField0_ = (bitField0_ & ~0x00000002);
        keyUpdateInterval_ = 0L;
        onChanged();
        return this;
      }
      
      // required uint64 tokenLifeTime = 3;
      private long tokenLifeTime_ ;
      public boolean hasTokenLifeTime() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      public long getTokenLifeTime() {
        return tokenLifeTime_;
      }
      public Builder setTokenLifeTime(long value) {
        bitField0_ |= 0x00000004;
        tokenLifeTime_ = value;
        onChanged();
        return this;
      }
      public Builder clearTokenLifeTime() {
        bitField0_ = (bitField0_ & ~0x00000004);
        tokenLifeTime_ = 0L;
        onChanged();
        return this;
      }
      
      // required .BlockKeyProto currentKey = 4;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto currentKey_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder> currentKeyBuilder_;
      public boolean hasCurrentKey() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getCurrentKey() {
        if (currentKeyBuilder_ == null) {
          return currentKey_;
        } else {
          return currentKeyBuilder_.getMessage();
        }
      }
      public Builder setCurrentKey(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto value) {
        if (currentKeyBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          currentKey_ = value;
          onChanged();
        } else {
          currentKeyBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder setCurrentKey(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder builderForValue) {
        if (currentKeyBuilder_ == null) {
          currentKey_ = builderForValue.build();
          onChanged();
        } else {
          currentKeyBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder mergeCurrentKey(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto value) {
        if (currentKeyBuilder_ == null) {
          if (((bitField0_ & 0x00000008) == 0x00000008) &&
              currentKey_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance()) {
            currentKey_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.newBuilder(currentKey_).mergeFrom(value).buildPartial();
          } else {
            currentKey_ = value;
          }
          onChanged();
        } else {
          currentKeyBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000008;
        return this;
      }
      public Builder clearCurrentKey() {
        if (currentKeyBuilder_ == null) {
          currentKey_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance();
          onChanged();
        } else {
          currentKeyBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder getCurrentKeyBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getCurrentKeyFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder getCurrentKeyOrBuilder() {
        if (currentKeyBuilder_ != null) {
          return currentKeyBuilder_.getMessageOrBuilder();
        } else {
          return currentKey_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder> 
          getCurrentKeyFieldBuilder() {
        if (currentKeyBuilder_ == null) {
          currentKeyBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder>(
                  currentKey_,
                  getParentForChildren(),
                  isClean());
          currentKey_ = null;
        }
        return currentKeyBuilder_;
      }
      
      // repeated .BlockKeyProto allKeys = 5;
      private java.util.List allKeys_ =
        java.util.Collections.emptyList();
      private void ensureAllKeysIsMutable() {
        if (!((bitField0_ & 0x00000010) == 0x00000010)) {
          allKeys_ = new java.util.ArrayList(allKeys_);
          bitField0_ |= 0x00000010;
         }
      }
      
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder> allKeysBuilder_;
      
      public java.util.List getAllKeysList() {
        if (allKeysBuilder_ == null) {
          return java.util.Collections.unmodifiableList(allKeys_);
        } else {
          return allKeysBuilder_.getMessageList();
        }
      }
      public int getAllKeysCount() {
        if (allKeysBuilder_ == null) {
          return allKeys_.size();
        } else {
          return allKeysBuilder_.getCount();
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto getAllKeys(int index) {
        if (allKeysBuilder_ == null) {
          return allKeys_.get(index);
        } else {
          return allKeysBuilder_.getMessage(index);
        }
      }
      public Builder setAllKeys(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto value) {
        if (allKeysBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllKeysIsMutable();
          allKeys_.set(index, value);
          onChanged();
        } else {
          allKeysBuilder_.setMessage(index, value);
        }
        return this;
      }
      public Builder setAllKeys(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder builderForValue) {
        if (allKeysBuilder_ == null) {
          ensureAllKeysIsMutable();
          allKeys_.set(index, builderForValue.build());
          onChanged();
        } else {
          allKeysBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllKeys(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto value) {
        if (allKeysBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllKeysIsMutable();
          allKeys_.add(value);
          onChanged();
        } else {
          allKeysBuilder_.addMessage(value);
        }
        return this;
      }
      public Builder addAllKeys(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto value) {
        if (allKeysBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllKeysIsMutable();
          allKeys_.add(index, value);
          onChanged();
        } else {
          allKeysBuilder_.addMessage(index, value);
        }
        return this;
      }
      public Builder addAllKeys(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder builderForValue) {
        if (allKeysBuilder_ == null) {
          ensureAllKeysIsMutable();
          allKeys_.add(builderForValue.build());
          onChanged();
        } else {
          allKeysBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      public Builder addAllKeys(
          int index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder builderForValue) {
        if (allKeysBuilder_ == null) {
          ensureAllKeysIsMutable();
          allKeys_.add(index, builderForValue.build());
          onChanged();
        } else {
          allKeysBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      public Builder addAllAllKeys(
          java.lang.Iterable values) {
        if (allKeysBuilder_ == null) {
          ensureAllKeysIsMutable();
          super.addAll(values, allKeys_);
          onChanged();
        } else {
          allKeysBuilder_.addAllMessages(values);
        }
        return this;
      }
      public Builder clearAllKeys() {
        if (allKeysBuilder_ == null) {
          allKeys_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000010);
          onChanged();
        } else {
          allKeysBuilder_.clear();
        }
        return this;
      }
      public Builder removeAllKeys(int index) {
        if (allKeysBuilder_ == null) {
          ensureAllKeysIsMutable();
          allKeys_.remove(index);
          onChanged();
        } else {
          allKeysBuilder_.remove(index);
        }
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder getAllKeysBuilder(
          int index) {
        return getAllKeysFieldBuilder().getBuilder(index);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder getAllKeysOrBuilder(
          int index) {
        if (allKeysBuilder_ == null) {
          return allKeys_.get(index);  } else {
          return allKeysBuilder_.getMessageOrBuilder(index);
        }
      }
      public java.util.List 
           getAllKeysOrBuilderList() {
        if (allKeysBuilder_ != null) {
          return allKeysBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(allKeys_);
        }
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder addAllKeysBuilder() {
        return getAllKeysFieldBuilder().addBuilder(
            org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance());
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder addAllKeysBuilder(
          int index) {
        return getAllKeysFieldBuilder().addBuilder(
            index, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.getDefaultInstance());
      }
      public java.util.List 
           getAllKeysBuilderList() {
        return getAllKeysFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder> 
          getAllKeysFieldBuilder() {
        if (allKeysBuilder_ == null) {
          allKeysBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProtoOrBuilder>(
                  allKeys_,
                  ((bitField0_ & 0x00000010) == 0x00000010),
                  getParentForChildren(),
                  isClean());
          allKeys_ = null;
        }
        return allKeysBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:ExportedBlockKeysProto)
    }
    
    static {
      defaultInstance = new ExportedBlockKeysProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:ExportedBlockKeysProto)
  }
  
  public interface RecoveringBlockProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required uint64 newGenStamp = 1;
    boolean hasNewGenStamp();
    long getNewGenStamp();
    
    // required .LocatedBlockProto block = 2;
    boolean hasBlock();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getBlock();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getBlockOrBuilder();
  }
  public static final class RecoveringBlockProto extends
      com.google.protobuf.GeneratedMessage
      implements RecoveringBlockProtoOrBuilder {
    // Use RecoveringBlockProto.newBuilder() to construct.
    private RecoveringBlockProto(Builder builder) {
      super(builder);
    }
    private RecoveringBlockProto(boolean noInit) {}
    
    private static final RecoveringBlockProto defaultInstance;
    public static RecoveringBlockProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public RecoveringBlockProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RecoveringBlockProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RecoveringBlockProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required uint64 newGenStamp = 1;
    public static final int NEWGENSTAMP_FIELD_NUMBER = 1;
    private long newGenStamp_;
    public boolean hasNewGenStamp() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public long getNewGenStamp() {
      return newGenStamp_;
    }
    
    // required .LocatedBlockProto block = 2;
    public static final int BLOCK_FIELD_NUMBER = 2;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto block_;
    public boolean hasBlock() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getBlock() {
      return block_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getBlockOrBuilder() {
      return block_;
    }
    
    private void initFields() {
      newGenStamp_ = 0L;
      block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasNewGenStamp()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBlock()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getBlock().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeUInt64(1, newGenStamp_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeMessage(2, block_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeUInt64Size(1, newGenStamp_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(2, block_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto) obj;
      
      boolean result = true;
      result = result && (hasNewGenStamp() == other.hasNewGenStamp());
      if (hasNewGenStamp()) {
        result = result && (getNewGenStamp()
            == other.getNewGenStamp());
      }
      result = result && (hasBlock() == other.hasBlock());
      if (hasBlock()) {
        result = result && getBlock()
            .equals(other.getBlock());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasNewGenStamp()) {
        hash = (37 * hash) + NEWGENSTAMP_FIELD_NUMBER;
        hash = (53 * hash) + hashLong(getNewGenStamp());
      }
      if (hasBlock()) {
        hash = (37 * hash) + BLOCK_FIELD_NUMBER;
        hash = (53 * hash) + getBlock().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RecoveringBlockProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_RecoveringBlockProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getBlockFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        newGenStamp_ = 0L;
        bitField0_ = (bitField0_ & ~0x00000001);
        if (blockBuilder_ == null) {
          block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
        } else {
          blockBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.newGenStamp_ = newGenStamp_;
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000002;
        }
        if (blockBuilder_ == null) {
          result.block_ = block_;
        } else {
          result.block_ = blockBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto.getDefaultInstance()) return this;
        if (other.hasNewGenStamp()) {
          setNewGenStamp(other.getNewGenStamp());
        }
        if (other.hasBlock()) {
          mergeBlock(other.getBlock());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasNewGenStamp()) {
          
          return false;
        }
        if (!hasBlock()) {
          
          return false;
        }
        if (!getBlock().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              newGenStamp_ = input.readUInt64();
              break;
            }
            case 18: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder();
              if (hasBlock()) {
                subBuilder.mergeFrom(getBlock());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setBlock(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required uint64 newGenStamp = 1;
      private long newGenStamp_ ;
      public boolean hasNewGenStamp() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public long getNewGenStamp() {
        return newGenStamp_;
      }
      public Builder setNewGenStamp(long value) {
        bitField0_ |= 0x00000001;
        newGenStamp_ = value;
        onChanged();
        return this;
      }
      public Builder clearNewGenStamp() {
        bitField0_ = (bitField0_ & ~0x00000001);
        newGenStamp_ = 0L;
        onChanged();
        return this;
      }
      
      // required .LocatedBlockProto block = 2;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder> blockBuilder_;
      public boolean hasBlock() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto getBlock() {
        if (blockBuilder_ == null) {
          return block_;
        } else {
          return blockBuilder_.getMessage();
        }
      }
      public Builder setBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (blockBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          block_ = value;
          onChanged();
        } else {
          blockBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      public Builder setBlock(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder builderForValue) {
        if (blockBuilder_ == null) {
          block_ = builderForValue.build();
          onChanged();
        } else {
          blockBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      public Builder mergeBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) {
        if (blockBuilder_ == null) {
          if (((bitField0_ & 0x00000002) == 0x00000002) &&
              block_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) {
            block_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder(block_).mergeFrom(value).buildPartial();
          } else {
            block_ = value;
          }
          onChanged();
        } else {
          blockBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000002;
        return this;
      }
      public Builder clearBlock() {
        if (blockBuilder_ == null) {
          block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance();
          onChanged();
        } else {
          blockBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder getBlockBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getBlockFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder getBlockOrBuilder() {
        if (blockBuilder_ != null) {
          return blockBuilder_.getMessageOrBuilder();
        } else {
          return block_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder> 
          getBlockFieldBuilder() {
        if (blockBuilder_ == null) {
          blockBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProtoOrBuilder>(
                  block_,
                  getParentForChildren(),
                  isClean());
          block_ = null;
        }
        return blockBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:RecoveringBlockProto)
    }
    
    static {
      defaultInstance = new RecoveringBlockProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:RecoveringBlockProto)
  }
  
  public interface VersionRequestProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
  }
  public static final class VersionRequestProto extends
      com.google.protobuf.GeneratedMessage
      implements VersionRequestProtoOrBuilder {
    // Use VersionRequestProto.newBuilder() to construct.
    private VersionRequestProto(Builder builder) {
      super(builder);
    }
    private VersionRequestProto(boolean noInit) {}
    
    private static final VersionRequestProto defaultInstance;
    public static VersionRequestProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public VersionRequestProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionRequestProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionRequestProto_fieldAccessorTable;
    }
    
    private void initFields() {
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto) obj;
      
      boolean result = true;
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionRequestProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionRequestProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto(this);
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
          }
        }
      }
      
      
      // @@protoc_insertion_point(builder_scope:VersionRequestProto)
    }
    
    static {
      defaultInstance = new VersionRequestProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:VersionRequestProto)
  }
  
  public interface VersionResponseProtoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required .NamespaceInfoProto info = 1;
    boolean hasInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto getInfo();
    org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder getInfoOrBuilder();
  }
  public static final class VersionResponseProto extends
      com.google.protobuf.GeneratedMessage
      implements VersionResponseProtoOrBuilder {
    // Use VersionResponseProto.newBuilder() to construct.
    private VersionResponseProto(Builder builder) {
      super(builder);
    }
    private VersionResponseProto(boolean noInit) {}
    
    private static final VersionResponseProto defaultInstance;
    public static VersionResponseProto getDefaultInstance() {
      return defaultInstance;
    }
    
    public VersionResponseProto getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionResponseProto_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionResponseProto_fieldAccessorTable;
    }
    
    private int bitField0_;
    // required .NamespaceInfoProto info = 1;
    public static final int INFO_FIELD_NUMBER = 1;
    private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto info_;
    public boolean hasInfo() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto getInfo() {
      return info_;
    }
    public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder getInfoOrBuilder() {
      return info_;
    }
    
    private void initFields() {
      info_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance();
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;
      
      if (!hasInfo()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getInfo().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }
    
    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeMessage(1, info_);
      }
      getUnknownFields().writeTo(output);
    }
    
    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;
    
      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, info_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }
    
    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }
    
    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto) obj;
      
      boolean result = true;
      result = result && (hasInfo() == other.hasInfo());
      if (hasInfo()) {
        result = result && getInfo()
            .equals(other.getInfo());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }
    
    @java.lang.Override
    public int hashCode() {
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasInfo()) {
        hash = (37 * hash) + INFO_FIELD_NUMBER;
        hash = (53 * hash) + getInfo().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      return hash;
    }
    
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return newBuilder().mergeFrom(data, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      Builder builder = newBuilder();
      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
        return builder.buildParsed();
      } else {
        return null;
      }
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input).buildParsed();
    }
    public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return newBuilder().mergeFrom(input, extensionRegistry)
               .buildParsed();
    }
    
    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }
    
    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProtoOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionResponseProto_descriptor;
      }
      
      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_VersionResponseProto_fieldAccessorTable;
      }
      
      // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }
      
      private Builder(BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getInfoFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }
      
      public Builder clear() {
        super.clear();
        if (infoBuilder_ == null) {
          info_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance();
        } else {
          infoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      
      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }
      
      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto.getDescriptor();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto getDefaultInstanceForType() {
        return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto.getDefaultInstance();
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto build() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }
      
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto buildParsed()
          throws com.google.protobuf.InvalidProtocolBufferException {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(
            result).asInvalidProtocolBufferException();
        }
        return result;
      }
      
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto buildPartial() {
        org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        if (infoBuilder_ == null) {
          result.info_ = info_;
        } else {
          result.info_ = infoBuilder_.build();
        }
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }
      
      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto) {
          return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }
      
      public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto other) {
        if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto.getDefaultInstance()) return this;
        if (other.hasInfo()) {
          mergeInfo(other.getInfo());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }
      
      public final boolean isInitialized() {
        if (!hasInfo()) {
          
          return false;
        }
        if (!getInfo().isInitialized()) {
          
          return false;
        }
        return true;
      }
      
      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder(
            this.getUnknownFields());
        while (true) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              this.setUnknownFields(unknownFields.build());
              onChanged();
              return this;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                this.setUnknownFields(unknownFields.build());
                onChanged();
                return this;
              }
              break;
            }
            case 10: {
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.newBuilder();
              if (hasInfo()) {
                subBuilder.mergeFrom(getInfo());
              }
              input.readMessage(subBuilder, extensionRegistry);
              setInfo(subBuilder.buildPartial());
              break;
            }
          }
        }
      }
      
      private int bitField0_;
      
      // required .NamespaceInfoProto info = 1;
      private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto info_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder> infoBuilder_;
      public boolean hasInfo() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto getInfo() {
        if (infoBuilder_ == null) {
          return info_;
        } else {
          return infoBuilder_.getMessage();
        }
      }
      public Builder setInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto value) {
        if (infoBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          info_ = value;
          onChanged();
        } else {
          infoBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder setInfo(
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder builderForValue) {
        if (infoBuilder_ == null) {
          info_ = builderForValue.build();
          onChanged();
        } else {
          infoBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder mergeInfo(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto value) {
        if (infoBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001) &&
              info_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance()) {
            info_ =
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.newBuilder(info_).mergeFrom(value).buildPartial();
          } else {
            info_ = value;
          }
          onChanged();
        } else {
          infoBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000001;
        return this;
      }
      public Builder clearInfo() {
        if (infoBuilder_ == null) {
          info_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.getDefaultInstance();
          onChanged();
        } else {
          infoBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder getInfoBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getInfoFieldBuilder().getBuilder();
      }
      public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder getInfoOrBuilder() {
        if (infoBuilder_ != null) {
          return infoBuilder_.getMessageOrBuilder();
        } else {
          return info_;
        }
      }
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder> 
          getInfoFieldBuilder() {
        if (infoBuilder_ == null) {
          infoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProtoOrBuilder>(
                  info_,
                  getParentForChildren(),
                  isClean());
          info_ = null;
        }
        return infoBuilder_;
      }
      
      // @@protoc_insertion_point(builder_scope:VersionResponseProto)
    }
    
    static {
      defaultInstance = new VersionResponseProto(true);
      defaultInstance.initFields();
    }
    
    // @@protoc_insertion_point(class_scope:VersionResponseProto)
  }
  
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_ExtendedBlockProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_ExtendedBlockProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_BlockTokenIdentifierProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_BlockTokenIdentifierProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_DatanodeIDProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_DatanodeIDProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_DatanodeInfosProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_DatanodeInfosProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_DatanodeInfoProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_DatanodeInfoProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_ContentSummaryProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_ContentSummaryProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_CorruptFileBlocksProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_CorruptFileBlocksProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_FsPermissionProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_FsPermissionProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_LocatedBlockProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_LocatedBlockProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_DataEncryptionKeyProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_DataEncryptionKeyProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_LocatedBlocksProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_LocatedBlocksProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_HdfsFileStatusProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_HdfsFileStatusProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_FsServerDefaultsProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_FsServerDefaultsProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_DirectoryListingProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_DirectoryListingProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_UpgradeStatusReportProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_UpgradeStatusReportProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_StorageInfoProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_StorageInfoProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_NamenodeRegistrationProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_NamenodeRegistrationProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_CheckpointSignatureProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_CheckpointSignatureProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_NamenodeCommandProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_NamenodeCommandProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_CheckpointCommandProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_CheckpointCommandProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_BlockProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_BlockProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_BlockWithLocationsProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_BlockWithLocationsProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_BlocksWithLocationsProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_BlocksWithLocationsProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_RemoteEditLogProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_RemoteEditLogProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_RemoteEditLogManifestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_RemoteEditLogManifestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_NamespaceInfoProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_NamespaceInfoProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_BlockKeyProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_BlockKeyProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_ExportedBlockKeysProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_ExportedBlockKeysProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_RecoveringBlockProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_RecoveringBlockProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_VersionRequestProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_VersionRequestProto_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_VersionResponseProto_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_VersionResponseProto_fieldAccessorTable;
  
  public static com.google.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static com.google.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\nhdfs.proto\"c\n\022ExtendedBlockProto\022\016\n\006po" +
      "olId\030\001 \002(\t\022\017\n\007blockId\030\002 \002(\004\022\027\n\017generatio" +
      "nStamp\030\003 \002(\004\022\023\n\010numBytes\030\004 \001(\004:\0010\"`\n\031Blo" +
      "ckTokenIdentifierProto\022\022\n\nidentifier\030\001 \002" +
      "(\014\022\020\n\010password\030\002 \002(\014\022\014\n\004kind\030\003 \002(\t\022\017\n\007se" +
      "rvice\030\004 \002(\t\"{\n\017DatanodeIDProto\022\016\n\006ipAddr" +
      "\030\001 \002(\t\022\020\n\010hostName\030\002 \002(\t\022\021\n\tstorageID\030\003 " +
      "\002(\t\022\020\n\010xferPort\030\004 \002(\r\022\020\n\010infoPort\030\005 \002(\r\022" +
      "\017\n\007ipcPort\030\006 \002(\r\";\n\022DatanodeInfosProto\022%" +
      "\n\tdatanodes\030\001 \003(\0132\022.DatanodeInfoProto\"\322\002",
      "\n\021DatanodeInfoProto\022\034\n\002id\030\001 \002(\0132\020.Datano" +
      "deIDProto\022\023\n\010capacity\030\002 \001(\004:\0010\022\022\n\007dfsUse" +
      "d\030\003 \001(\004:\0010\022\024\n\tremaining\030\004 \001(\004:\0010\022\030\n\rbloc" +
      "kPoolUsed\030\005 \001(\004:\0010\022\025\n\nlastUpdate\030\006 \001(\004:\001" +
      "0\022\027\n\014xceiverCount\030\007 \001(\r:\0010\022\020\n\010location\030\010" +
      " \001(\t\0229\n\nadminState\030\n \001(\0162\035.DatanodeInfoP" +
      "roto.AdminState:\006NORMAL\"I\n\nAdminState\022\n\n" +
      "\006NORMAL\020\000\022\033\n\027DECOMMISSION_INPROGRESS\020\001\022\022" +
      "\n\016DECOMMISSIONED\020\002\"\212\001\n\023ContentSummaryPro" +
      "to\022\016\n\006length\030\001 \002(\004\022\021\n\tfileCount\030\002 \002(\004\022\026\n",
      "\016directoryCount\030\003 \002(\004\022\r\n\005quota\030\004 \002(\004\022\025\n\r" +
      "spaceConsumed\030\005 \002(\004\022\022\n\nspaceQuota\030\006 \002(\004\"" +
      "7\n\026CorruptFileBlocksProto\022\r\n\005files\030\001 \003(\t" +
      "\022\016\n\006cookie\030\002 \002(\t\"!\n\021FsPermissionProto\022\014\n" +
      "\004perm\030\001 \002(\r\"\246\001\n\021LocatedBlockProto\022\036\n\001b\030\001" +
      " \002(\0132\023.ExtendedBlockProto\022\016\n\006offset\030\002 \002(" +
      "\004\022 \n\004locs\030\003 \003(\0132\022.DatanodeInfoProto\022\017\n\007c" +
      "orrupt\030\004 \002(\010\022.\n\nblockToken\030\005 \002(\0132\032.Block" +
      "TokenIdentifierProto\"\223\001\n\026DataEncryptionK" +
      "eyProto\022\r\n\005keyId\030\001 \002(\r\022\023\n\013blockPoolId\030\002 ",
      "\002(\t\022\r\n\005nonce\030\003 \002(\014\022\025\n\rencryptionKey\030\004 \002(" +
      "\014\022\022\n\nexpiryDate\030\005 \002(\004\022\033\n\023encryptionAlgor" +
      "ithm\030\006 \001(\t\"\253\001\n\022LocatedBlocksProto\022\022\n\nfil" +
      "eLength\030\001 \002(\004\022\"\n\006blocks\030\002 \003(\0132\022.LocatedB" +
      "lockProto\022\031\n\021underConstruction\030\003 \002(\010\022%\n\t" +
      "lastBlock\030\004 \001(\0132\022.LocatedBlockProto\022\033\n\023i" +
      "sLastBlockComplete\030\005 \002(\010\"\374\002\n\023HdfsFileSta" +
      "tusProto\022/\n\010fileType\030\001 \002(\0162\035.HdfsFileSta" +
      "tusProto.FileType\022\014\n\004path\030\002 \002(\014\022\016\n\006lengt" +
      "h\030\003 \002(\004\022&\n\npermission\030\004 \002(\0132\022.FsPermissi",
      "onProto\022\r\n\005owner\030\005 \002(\t\022\r\n\005group\030\006 \002(\t\022\031\n" +
      "\021modification_time\030\007 \002(\004\022\023\n\013access_time\030" +
      "\010 \002(\004\022\017\n\007symlink\030\t \001(\014\022\034\n\021block_replicat" +
      "ion\030\n \001(\r:\0010\022\024\n\tblocksize\030\013 \001(\004:\0010\022&\n\tlo" +
      "cations\030\014 \001(\0132\023.LocatedBlocksProto\"3\n\010Fi" +
      "leType\022\n\n\006IS_DIR\020\001\022\013\n\007IS_FILE\020\002\022\016\n\nIS_SY" +
      "MLINK\020\003\"\202\002\n\025FsServerDefaultsProto\022\021\n\tblo" +
      "ckSize\030\001 \002(\004\022\030\n\020bytesPerChecksum\030\002 \002(\r\022\027" +
      "\n\017writePacketSize\030\003 \002(\r\022\023\n\013replication\030\004" +
      " \002(\r\022\026\n\016fileBufferSize\030\005 \002(\r\022\"\n\023encryptD",
      "ataTransfer\030\006 \001(\010:\005false\022\030\n\rtrashInterva" +
      "l\030\007 \001(\004:\0010\0228\n\014checksumType\030\010 \001(\0162\022.Check" +
      "sumTypeProto:\016CHECKSUM_CRC32\"_\n\025Director" +
      "yListingProto\022,\n\016partialListing\030\001 \003(\0132\024." +
      "HdfsFileStatusProto\022\030\n\020remainingEntries\030" +
      "\002 \002(\r\"U\n\030UpgradeStatusReportProto\022\017\n\007ver" +
      "sion\030\001 \002(\r\022\025\n\rupgradeStatus\030\002 \002(\r\022\021\n\tfin" +
      "alized\030\003 \002(\010\"_\n\020StorageInfoProto\022\025\n\rlayo" +
      "utVersion\030\001 \002(\r\022\022\n\nnamespceID\030\002 \002(\r\022\021\n\tc" +
      "lusterID\030\003 \002(\t\022\r\n\005cTime\030\004 \002(\004\"\361\001\n\031Nameno",
      "deRegistrationProto\022\022\n\nrpcAddress\030\001 \002(\t\022" +
      "\023\n\013httpAddress\030\002 \002(\t\022&\n\013storageInfo\030\003 \002(" +
      "\0132\021.StorageInfoProto\022D\n\004role\030\004 \001(\0162,.Nam" +
      "enodeRegistrationProto.NamenodeRoleProto" +
      ":\010NAMENODE\"=\n\021NamenodeRoleProto\022\014\n\010NAMEN" +
      "ODE\020\001\022\n\n\006BACKUP\020\002\022\016\n\nCHECKPOINT\020\003\"\221\001\n\030Ch" +
      "eckpointSignatureProto\022\023\n\013blockPoolId\030\001 " +
      "\002(\t\022 \n\030mostRecentCheckpointTxId\030\002 \002(\004\022\026\n" +
      "\016curSegmentTxId\030\003 \002(\004\022&\n\013storageInfo\030\004 \002" +
      "(\0132\021.StorageInfoProto\"\264\001\n\024NamenodeComman",
      "dProto\022\016\n\006action\030\001 \002(\r\022(\n\004type\030\002 \002(\0162\032.N" +
      "amenodeCommandProto.Type\022.\n\rcheckpointCm" +
      "d\030\003 \001(\0132\027.CheckpointCommandProto\"2\n\004Type" +
      "\022\023\n\017NamenodeCommand\020\000\022\025\n\021CheckPointComma" +
      "nd\020\001\"a\n\026CheckpointCommandProto\022,\n\tsignat" +
      "ure\030\001 \002(\0132\031.CheckpointSignatureProto\022\031\n\021" +
      "needToReturnImage\030\002 \002(\010\"D\n\nBlockProto\022\017\n" +
      "\007blockId\030\001 \002(\004\022\020\n\010genStamp\030\002 \002(\004\022\023\n\010numB" +
      "ytes\030\003 \001(\004:\0010\"I\n\027BlockWithLocationsProto" +
      "\022\032\n\005block\030\001 \002(\0132\013.BlockProto\022\022\n\nstorageI",
      "Ds\030\002 \003(\t\"D\n\030BlocksWithLocationsProto\022(\n\006" +
      "blocks\030\001 \003(\0132\030.BlockWithLocationsProto\"U" +
      "\n\022RemoteEditLogProto\022\021\n\tstartTxId\030\001 \002(\004\022" +
      "\017\n\007endTxId\030\002 \002(\004\022\033\n\014isInProgress\030\003 \001(\010:\005" +
      "false\"?\n\032RemoteEditLogManifestProto\022!\n\004l" +
      "ogs\030\001 \003(\0132\023.RemoteEditLogProto\"\234\001\n\022Names" +
      "paceInfoProto\022\024\n\014buildVersion\030\001 \002(\t\022\032\n\022d" +
      "istUpgradeVersion\030\002 \002(\r\022\023\n\013blockPoolID\030\003" +
      " \002(\t\022&\n\013storageInfo\030\004 \002(\0132\021.StorageInfoP" +
      "roto\022\027\n\017softwareVersion\030\005 \002(\t\"D\n\rBlockKe",
      "yProto\022\r\n\005keyId\030\001 \002(\r\022\022\n\nexpiryDate\030\002 \002(" +
      "\004\022\020\n\010keyBytes\030\003 \001(\014\"\254\001\n\026ExportedBlockKey" +
      "sProto\022\033\n\023isBlockTokenEnabled\030\001 \002(\010\022\031\n\021k" +
      "eyUpdateInterval\030\002 \002(\004\022\025\n\rtokenLifeTime\030" +
      "\003 \002(\004\022\"\n\ncurrentKey\030\004 \002(\0132\016.BlockKeyProt" +
      "o\022\037\n\007allKeys\030\005 \003(\0132\016.BlockKeyProto\"N\n\024Re" +
      "coveringBlockProto\022\023\n\013newGenStamp\030\001 \002(\004\022" +
      "!\n\005block\030\002 \002(\0132\022.LocatedBlockProto\"\025\n\023Ve" +
      "rsionRequestProto\"9\n\024VersionResponseProt" +
      "o\022!\n\004info\030\001 \002(\0132\023.NamespaceInfoProto*O\n\021",
      "ChecksumTypeProto\022\021\n\rCHECKSUM_NULL\020\000\022\022\n\016" +
      "CHECKSUM_CRC32\020\001\022\023\n\017CHECKSUM_CRC32C\020\002*L\n" +
      "\021ReplicaStateProto\022\r\n\tFINALIZED\020\000\022\007\n\003RBW" +
      "\020\001\022\007\n\003RWR\020\002\022\007\n\003RUR\020\003\022\r\n\tTEMPORARY\020\004B6\n%o" +
      "rg.apache.hadoop.hdfs.protocol.protoB\nHd" +
      "fsProtos\240\001\001"
    };
    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
        public com.google.protobuf.ExtensionRegistry assignDescriptors(
            com.google.protobuf.Descriptors.FileDescriptor root) {
          descriptor = root;
          internal_static_ExtendedBlockProto_descriptor =
            getDescriptor().getMessageTypes().get(0);
          internal_static_ExtendedBlockProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_ExtendedBlockProto_descriptor,
              new java.lang.String[] { "PoolId", "BlockId", "GenerationStamp", "NumBytes", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder.class);
          internal_static_BlockTokenIdentifierProto_descriptor =
            getDescriptor().getMessageTypes().get(1);
          internal_static_BlockTokenIdentifierProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_BlockTokenIdentifierProto_descriptor,
              new java.lang.String[] { "Identifier", "Password", "Kind", "Service", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder.class);
          internal_static_DatanodeIDProto_descriptor =
            getDescriptor().getMessageTypes().get(2);
          internal_static_DatanodeIDProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_DatanodeIDProto_descriptor,
              new java.lang.String[] { "IpAddr", "HostName", "StorageID", "XferPort", "InfoPort", "IpcPort", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder.class);
          internal_static_DatanodeInfosProto_descriptor =
            getDescriptor().getMessageTypes().get(3);
          internal_static_DatanodeInfosProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_DatanodeInfosProto_descriptor,
              new java.lang.String[] { "Datanodes", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto.Builder.class);
          internal_static_DatanodeInfoProto_descriptor =
            getDescriptor().getMessageTypes().get(4);
          internal_static_DatanodeInfoProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_DatanodeInfoProto_descriptor,
              new java.lang.String[] { "Id", "Capacity", "DfsUsed", "Remaining", "BlockPoolUsed", "LastUpdate", "XceiverCount", "Location", "AdminState", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder.class);
          internal_static_ContentSummaryProto_descriptor =
            getDescriptor().getMessageTypes().get(5);
          internal_static_ContentSummaryProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_ContentSummaryProto_descriptor,
              new java.lang.String[] { "Length", "FileCount", "DirectoryCount", "Quota", "SpaceConsumed", "SpaceQuota", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto.Builder.class);
          internal_static_CorruptFileBlocksProto_descriptor =
            getDescriptor().getMessageTypes().get(6);
          internal_static_CorruptFileBlocksProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_CorruptFileBlocksProto_descriptor,
              new java.lang.String[] { "Files", "Cookie", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto.Builder.class);
          internal_static_FsPermissionProto_descriptor =
            getDescriptor().getMessageTypes().get(7);
          internal_static_FsPermissionProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_FsPermissionProto_descriptor,
              new java.lang.String[] { "Perm", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto.Builder.class);
          internal_static_LocatedBlockProto_descriptor =
            getDescriptor().getMessageTypes().get(8);
          internal_static_LocatedBlockProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_LocatedBlockProto_descriptor,
              new java.lang.String[] { "B", "Offset", "Locs", "Corrupt", "BlockToken", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder.class);
          internal_static_DataEncryptionKeyProto_descriptor =
            getDescriptor().getMessageTypes().get(9);
          internal_static_DataEncryptionKeyProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_DataEncryptionKeyProto_descriptor,
              new java.lang.String[] { "KeyId", "BlockPoolId", "Nonce", "EncryptionKey", "ExpiryDate", "EncryptionAlgorithm", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto.Builder.class);
          internal_static_LocatedBlocksProto_descriptor =
            getDescriptor().getMessageTypes().get(10);
          internal_static_LocatedBlocksProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_LocatedBlocksProto_descriptor,
              new java.lang.String[] { "FileLength", "Blocks", "UnderConstruction", "LastBlock", "IsLastBlockComplete", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto.Builder.class);
          internal_static_HdfsFileStatusProto_descriptor =
            getDescriptor().getMessageTypes().get(11);
          internal_static_HdfsFileStatusProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_HdfsFileStatusProto_descriptor,
              new java.lang.String[] { "FileType", "Path", "Length", "Permission", "Owner", "Group", "ModificationTime", "AccessTime", "Symlink", "BlockReplication", "Blocksize", "Locations", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.Builder.class);
          internal_static_FsServerDefaultsProto_descriptor =
            getDescriptor().getMessageTypes().get(12);
          internal_static_FsServerDefaultsProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_FsServerDefaultsProto_descriptor,
              new java.lang.String[] { "BlockSize", "BytesPerChecksum", "WritePacketSize", "Replication", "FileBufferSize", "EncryptDataTransfer", "TrashInterval", "ChecksumType", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto.Builder.class);
          internal_static_DirectoryListingProto_descriptor =
            getDescriptor().getMessageTypes().get(13);
          internal_static_DirectoryListingProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_DirectoryListingProto_descriptor,
              new java.lang.String[] { "PartialListing", "RemainingEntries", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto.Builder.class);
          internal_static_UpgradeStatusReportProto_descriptor =
            getDescriptor().getMessageTypes().get(14);
          internal_static_UpgradeStatusReportProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_UpgradeStatusReportProto_descriptor,
              new java.lang.String[] { "Version", "UpgradeStatus", "Finalized", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto.Builder.class);
          internal_static_StorageInfoProto_descriptor =
            getDescriptor().getMessageTypes().get(15);
          internal_static_StorageInfoProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_StorageInfoProto_descriptor,
              new java.lang.String[] { "LayoutVersion", "NamespceID", "ClusterID", "CTime", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto.Builder.class);
          internal_static_NamenodeRegistrationProto_descriptor =
            getDescriptor().getMessageTypes().get(16);
          internal_static_NamenodeRegistrationProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_NamenodeRegistrationProto_descriptor,
              new java.lang.String[] { "RpcAddress", "HttpAddress", "StorageInfo", "Role", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.Builder.class);
          internal_static_CheckpointSignatureProto_descriptor =
            getDescriptor().getMessageTypes().get(17);
          internal_static_CheckpointSignatureProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_CheckpointSignatureProto_descriptor,
              new java.lang.String[] { "BlockPoolId", "MostRecentCheckpointTxId", "CurSegmentTxId", "StorageInfo", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto.Builder.class);
          internal_static_NamenodeCommandProto_descriptor =
            getDescriptor().getMessageTypes().get(18);
          internal_static_NamenodeCommandProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_NamenodeCommandProto_descriptor,
              new java.lang.String[] { "Action", "Type", "CheckpointCmd", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto.Builder.class);
          internal_static_CheckpointCommandProto_descriptor =
            getDescriptor().getMessageTypes().get(19);
          internal_static_CheckpointCommandProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_CheckpointCommandProto_descriptor,
              new java.lang.String[] { "Signature", "NeedToReturnImage", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto.Builder.class);
          internal_static_BlockProto_descriptor =
            getDescriptor().getMessageTypes().get(20);
          internal_static_BlockProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_BlockProto_descriptor,
              new java.lang.String[] { "BlockId", "GenStamp", "NumBytes", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder.class);
          internal_static_BlockWithLocationsProto_descriptor =
            getDescriptor().getMessageTypes().get(21);
          internal_static_BlockWithLocationsProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_BlockWithLocationsProto_descriptor,
              new java.lang.String[] { "Block", "StorageIDs", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto.Builder.class);
          internal_static_BlocksWithLocationsProto_descriptor =
            getDescriptor().getMessageTypes().get(22);
          internal_static_BlocksWithLocationsProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_BlocksWithLocationsProto_descriptor,
              new java.lang.String[] { "Blocks", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto.Builder.class);
          internal_static_RemoteEditLogProto_descriptor =
            getDescriptor().getMessageTypes().get(23);
          internal_static_RemoteEditLogProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_RemoteEditLogProto_descriptor,
              new java.lang.String[] { "StartTxId", "EndTxId", "IsInProgress", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto.Builder.class);
          internal_static_RemoteEditLogManifestProto_descriptor =
            getDescriptor().getMessageTypes().get(24);
          internal_static_RemoteEditLogManifestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_RemoteEditLogManifestProto_descriptor,
              new java.lang.String[] { "Logs", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto.Builder.class);
          internal_static_NamespaceInfoProto_descriptor =
            getDescriptor().getMessageTypes().get(25);
          internal_static_NamespaceInfoProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_NamespaceInfoProto_descriptor,
              new java.lang.String[] { "BuildVersion", "DistUpgradeVersion", "BlockPoolID", "StorageInfo", "SoftwareVersion", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto.Builder.class);
          internal_static_BlockKeyProto_descriptor =
            getDescriptor().getMessageTypes().get(26);
          internal_static_BlockKeyProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_BlockKeyProto_descriptor,
              new java.lang.String[] { "KeyId", "ExpiryDate", "KeyBytes", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto.Builder.class);
          internal_static_ExportedBlockKeysProto_descriptor =
            getDescriptor().getMessageTypes().get(27);
          internal_static_ExportedBlockKeysProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_ExportedBlockKeysProto_descriptor,
              new java.lang.String[] { "IsBlockTokenEnabled", "KeyUpdateInterval", "TokenLifeTime", "CurrentKey", "AllKeys", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto.Builder.class);
          internal_static_RecoveringBlockProto_descriptor =
            getDescriptor().getMessageTypes().get(28);
          internal_static_RecoveringBlockProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_RecoveringBlockProto_descriptor,
              new java.lang.String[] { "NewGenStamp", "Block", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto.Builder.class);
          internal_static_VersionRequestProto_descriptor =
            getDescriptor().getMessageTypes().get(29);
          internal_static_VersionRequestProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_VersionRequestProto_descriptor,
              new java.lang.String[] { },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionRequestProto.Builder.class);
          internal_static_VersionResponseProto_descriptor =
            getDescriptor().getMessageTypes().get(30);
          internal_static_VersionResponseProto_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_VersionResponseProto_descriptor,
              new java.lang.String[] { "Info", },
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto.class,
              org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.VersionResponseProto.Builder.class);
          return null;
        }
      };
    com.google.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new com.google.protobuf.Descriptors.FileDescriptor[] {
        }, assigner);
  }
  
  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy