All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos Maven / Gradle / Ivy

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: SecureBulkLoad.proto

package org.apache.hadoop.hbase.protobuf.generated;

public final class SecureBulkLoadProtos {
  private SecureBulkLoadProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface SecureBulkLoadHFilesRequestOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    java.util.List 
        getFamilyPathList();
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index);
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    int getFamilyPathCount();
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    java.util.List 
        getFamilyPathOrBuilderList();
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
        int index);

    // optional bool assign_seq_num = 2;
    /**
     * optional bool assign_seq_num = 2;
     */
    boolean hasAssignSeqNum();
    /**
     * optional bool assign_seq_num = 2;
     */
    boolean getAssignSeqNum();

    // required .hbase.pb.DelegationToken fs_token = 3;
    /**
     * required .hbase.pb.DelegationToken fs_token = 3;
     */
    boolean hasFsToken();
    /**
     * required .hbase.pb.DelegationToken fs_token = 3;
     */
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken();
    /**
     * required .hbase.pb.DelegationToken fs_token = 3;
     */
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder();

    // required string bulk_token = 4;
    /**
     * required string bulk_token = 4;
     */
    boolean hasBulkToken();
    /**
     * required string bulk_token = 4;
     */
    java.lang.String getBulkToken();
    /**
     * required string bulk_token = 4;
     */
    com.google.protobuf.ByteString
        getBulkTokenBytes();
  }
  /**
   * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesRequest}
   */
  public static final class SecureBulkLoadHFilesRequest extends
      com.google.protobuf.GeneratedMessage
      implements SecureBulkLoadHFilesRequestOrBuilder {
    // Use SecureBulkLoadHFilesRequest.newBuilder() to construct.
    private SecureBulkLoadHFilesRequest(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private SecureBulkLoadHFilesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final SecureBulkLoadHFilesRequest defaultInstance;
    public static SecureBulkLoadHFilesRequest getDefaultInstance() {
      return defaultInstance;
    }

    public SecureBulkLoadHFilesRequest getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private SecureBulkLoadHFilesRequest(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 10: {
              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
                familyPath_ = new java.util.ArrayList();
                mutable_bitField0_ |= 0x00000001;
              }
              familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry));
              break;
            }
            case 16: {
              bitField0_ |= 0x00000001;
              assignSeqNum_ = input.readBool();
              break;
            }
            case 26: {
              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null;
              if (((bitField0_ & 0x00000002) == 0x00000002)) {
                subBuilder = fsToken_.toBuilder();
              }
              fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry);
              if (subBuilder != null) {
                subBuilder.mergeFrom(fsToken_);
                fsToken_ = subBuilder.buildPartial();
              }
              bitField0_ |= 0x00000002;
              break;
            }
            case 34: {
              bitField0_ |= 0x00000004;
              bulkToken_ = input.readBytes();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
          familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
        }
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public SecureBulkLoadHFilesRequest parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new SecureBulkLoadHFilesRequest(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
    public static final int FAMILY_PATH_FIELD_NUMBER = 1;
    private java.util.List familyPath_;
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    public java.util.List getFamilyPathList() {
      return familyPath_;
    }
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    public java.util.List 
        getFamilyPathOrBuilderList() {
      return familyPath_;
    }
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    public int getFamilyPathCount() {
      return familyPath_.size();
    }
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
      return familyPath_.get(index);
    }
    /**
     * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
     */
    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
        int index) {
      return familyPath_.get(index);
    }

    // optional bool assign_seq_num = 2;
    public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 2;
    private boolean assignSeqNum_;
    /**
     * optional bool assign_seq_num = 2;
     */
    public boolean hasAssignSeqNum() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * optional bool assign_seq_num = 2;
     */
    public boolean getAssignSeqNum() {
      return assignSeqNum_;
    }

    // required .hbase.pb.DelegationToken fs_token = 3;
    public static final int FS_TOKEN_FIELD_NUMBER = 3;
    private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_;
    /**
     * required .hbase.pb.DelegationToken fs_token = 3;
     */
    public boolean hasFsToken() {
      return ((bitField0_ & 0x00000002) == 0x00000002);
    }
    /**
     * required .hbase.pb.DelegationToken fs_token = 3;
     */
    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
      return fsToken_;
    }
    /**
     * required .hbase.pb.DelegationToken fs_token = 3;
     */
    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
      return fsToken_;
    }

    // required string bulk_token = 4;
    public static final int BULK_TOKEN_FIELD_NUMBER = 4;
    private java.lang.Object bulkToken_;
    /**
     * required string bulk_token = 4;
     */
    public boolean hasBulkToken() {
      return ((bitField0_ & 0x00000004) == 0x00000004);
    }
    /**
     * required string bulk_token = 4;
     */
    public java.lang.String getBulkToken() {
      java.lang.Object ref = bulkToken_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        com.google.protobuf.ByteString bs = 
            (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          bulkToken_ = s;
        }
        return s;
      }
    }
    /**
     * required string bulk_token = 4;
     */
    public com.google.protobuf.ByteString
        getBulkTokenBytes() {
      java.lang.Object ref = bulkToken_;
      if (ref instanceof java.lang.String) {
        com.google.protobuf.ByteString b = 
            com.google.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        bulkToken_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }

    private void initFields() {
      familyPath_ = java.util.Collections.emptyList();
      assignSeqNum_ = false;
      fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
      bulkToken_ = "";
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      if (!hasFsToken()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasBulkToken()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getFamilyPathCount(); i++) {
        if (!getFamilyPath(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      for (int i = 0; i < familyPath_.size(); i++) {
        output.writeMessage(1, familyPath_.get(i));
      }
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBool(2, assignSeqNum_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        output.writeMessage(3, fsToken_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        output.writeBytes(4, getBulkTokenBytes());
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < familyPath_.size(); i++) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(1, familyPath_.get(i));
      }
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(2, assignSeqNum_);
      }
      if (((bitField0_ & 0x00000002) == 0x00000002)) {
        size += com.google.protobuf.CodedOutputStream
          .computeMessageSize(3, fsToken_);
      }
      if (((bitField0_ & 0x00000004) == 0x00000004)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBytesSize(4, getBulkTokenBytes());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) obj;

      boolean result = true;
      result = result && getFamilyPathList()
          .equals(other.getFamilyPathList());
      result = result && (hasAssignSeqNum() == other.hasAssignSeqNum());
      if (hasAssignSeqNum()) {
        result = result && (getAssignSeqNum()
            == other.getAssignSeqNum());
      }
      result = result && (hasFsToken() == other.hasFsToken());
      if (hasFsToken()) {
        result = result && getFsToken()
            .equals(other.getFsToken());
      }
      result = result && (hasBulkToken() == other.hasBulkToken());
      if (hasBulkToken()) {
        result = result && getBulkToken()
            .equals(other.getBulkToken());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (getFamilyPathCount() > 0) {
        hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER;
        hash = (53 * hash) + getFamilyPathList().hashCode();
      }
      if (hasAssignSeqNum()) {
        hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getAssignSeqNum());
      }
      if (hasFsToken()) {
        hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getFsToken().hashCode();
      }
      if (hasBulkToken()) {
        hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getBulkToken().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesRequest}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequestOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class);
      }

      // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
          getFamilyPathFieldBuilder();
          getFsTokenFieldBuilder();
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        if (familyPathBuilder_ == null) {
          familyPath_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
        } else {
          familyPathBuilder_.clear();
        }
        assignSeqNum_ = false;
        bitField0_ = (bitField0_ & ~0x00000002);
        if (fsTokenBuilder_ == null) {
          fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
        } else {
          fsTokenBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        bulkToken_ = "";
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor;
      }

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest getDefaultInstanceForType() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance();
      }

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest build() {
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest buildPartial() {
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (familyPathBuilder_ == null) {
          if (((bitField0_ & 0x00000001) == 0x00000001)) {
            familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.familyPath_ = familyPath_;
        } else {
          result.familyPath_ = familyPathBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
          to_bitField0_ |= 0x00000001;
        }
        result.assignSeqNum_ = assignSeqNum_;
        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
          to_bitField0_ |= 0x00000002;
        }
        if (fsTokenBuilder_ == null) {
          result.fsToken_ = fsToken_;
        } else {
          result.fsToken_ = fsTokenBuilder_.build();
        }
        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
          to_bitField0_ |= 0x00000004;
        }
        result.bulkToken_ = bulkToken_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) {
          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other) {
        if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance()) return this;
        if (familyPathBuilder_ == null) {
          if (!other.familyPath_.isEmpty()) {
            if (familyPath_.isEmpty()) {
              familyPath_ = other.familyPath_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureFamilyPathIsMutable();
              familyPath_.addAll(other.familyPath_);
            }
            onChanged();
          }
        } else {
          if (!other.familyPath_.isEmpty()) {
            if (familyPathBuilder_.isEmpty()) {
              familyPathBuilder_.dispose();
              familyPathBuilder_ = null;
              familyPath_ = other.familyPath_;
              bitField0_ = (bitField0_ & ~0x00000001);
              familyPathBuilder_ = 
                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                   getFamilyPathFieldBuilder() : null;
            } else {
              familyPathBuilder_.addAllMessages(other.familyPath_);
            }
          }
        }
        if (other.hasAssignSeqNum()) {
          setAssignSeqNum(other.getAssignSeqNum());
        }
        if (other.hasFsToken()) {
          mergeFsToken(other.getFsToken());
        }
        if (other.hasBulkToken()) {
          bitField0_ |= 0x00000008;
          bulkToken_ = other.bulkToken_;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        if (!hasFsToken()) {
          
          return false;
        }
        if (!hasBulkToken()) {
          
          return false;
        }
        for (int i = 0; i < getFamilyPathCount(); i++) {
          if (!getFamilyPath(i).isInitialized()) {
            
            return false;
          }
        }
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
      private java.util.List familyPath_ =
        java.util.Collections.emptyList();
      private void ensureFamilyPathIsMutable() {
        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
          familyPath_ = new java.util.ArrayList(familyPath_);
          bitField0_ |= 0x00000001;
         }
      }

      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_;

      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public java.util.List getFamilyPathList() {
        if (familyPathBuilder_ == null) {
          return java.util.Collections.unmodifiableList(familyPath_);
        } else {
          return familyPathBuilder_.getMessageList();
        }
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public int getFamilyPathCount() {
        if (familyPathBuilder_ == null) {
          return familyPath_.size();
        } else {
          return familyPathBuilder_.getCount();
        }
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
        if (familyPathBuilder_ == null) {
          return familyPath_.get(index);
        } else {
          return familyPathBuilder_.getMessage(index);
        }
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder setFamilyPath(
          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
        if (familyPathBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFamilyPathIsMutable();
          familyPath_.set(index, value);
          onChanged();
        } else {
          familyPathBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder setFamilyPath(
          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
        if (familyPathBuilder_ == null) {
          ensureFamilyPathIsMutable();
          familyPath_.set(index, builderForValue.build());
          onChanged();
        } else {
          familyPathBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
        if (familyPathBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFamilyPathIsMutable();
          familyPath_.add(value);
          onChanged();
        } else {
          familyPathBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder addFamilyPath(
          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
        if (familyPathBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureFamilyPathIsMutable();
          familyPath_.add(index, value);
          onChanged();
        } else {
          familyPathBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder addFamilyPath(
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
        if (familyPathBuilder_ == null) {
          ensureFamilyPathIsMutable();
          familyPath_.add(builderForValue.build());
          onChanged();
        } else {
          familyPathBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder addFamilyPath(
          int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
        if (familyPathBuilder_ == null) {
          ensureFamilyPathIsMutable();
          familyPath_.add(index, builderForValue.build());
          onChanged();
        } else {
          familyPathBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder addAllFamilyPath(
          java.lang.Iterable values) {
        if (familyPathBuilder_ == null) {
          ensureFamilyPathIsMutable();
          super.addAll(values, familyPath_);
          onChanged();
        } else {
          familyPathBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder clearFamilyPath() {
        if (familyPathBuilder_ == null) {
          familyPath_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          familyPathBuilder_.clear();
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public Builder removeFamilyPath(int index) {
        if (familyPathBuilder_ == null) {
          ensureFamilyPathIsMutable();
          familyPath_.remove(index);
          onChanged();
        } else {
          familyPathBuilder_.remove(index);
        }
        return this;
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder(
          int index) {
        return getFamilyPathFieldBuilder().getBuilder(index);
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
          int index) {
        if (familyPathBuilder_ == null) {
          return familyPath_.get(index);  } else {
          return familyPathBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public java.util.List 
           getFamilyPathOrBuilderList() {
        if (familyPathBuilder_ != null) {
          return familyPathBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(familyPath_);
        }
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() {
        return getFamilyPathFieldBuilder().addBuilder(
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder(
          int index) {
        return getFamilyPathFieldBuilder().addBuilder(
            index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
      }
      /**
       * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
       */
      public java.util.List 
           getFamilyPathBuilderList() {
        return getFamilyPathFieldBuilder().getBuilderList();
      }
      private com.google.protobuf.RepeatedFieldBuilder<
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
          getFamilyPathFieldBuilder() {
        if (familyPathBuilder_ == null) {
          familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>(
                  familyPath_,
                  ((bitField0_ & 0x00000001) == 0x00000001),
                  getParentForChildren(),
                  isClean());
          familyPath_ = null;
        }
        return familyPathBuilder_;
      }

      // optional bool assign_seq_num = 2;
      private boolean assignSeqNum_ ;
      /**
       * optional bool assign_seq_num = 2;
       */
      public boolean hasAssignSeqNum() {
        return ((bitField0_ & 0x00000002) == 0x00000002);
      }
      /**
       * optional bool assign_seq_num = 2;
       */
      public boolean getAssignSeqNum() {
        return assignSeqNum_;
      }
      /**
       * optional bool assign_seq_num = 2;
       */
      public Builder setAssignSeqNum(boolean value) {
        bitField0_ |= 0x00000002;
        assignSeqNum_ = value;
        onChanged();
        return this;
      }
      /**
       * optional bool assign_seq_num = 2;
       */
      public Builder clearAssignSeqNum() {
        bitField0_ = (bitField0_ & ~0x00000002);
        assignSeqNum_ = false;
        onChanged();
        return this;
      }

      // required .hbase.pb.DelegationToken fs_token = 3;
      private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_;
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public boolean hasFsToken() {
        return ((bitField0_ & 0x00000004) == 0x00000004);
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
        if (fsTokenBuilder_ == null) {
          return fsToken_;
        } else {
          return fsTokenBuilder_.getMessage();
        }
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
        if (fsTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          fsToken_ = value;
          onChanged();
        } else {
          fsTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public Builder setFsToken(
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) {
        if (fsTokenBuilder_ == null) {
          fsToken_ = builderForValue.build();
          onChanged();
        } else {
          fsTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
        if (fsTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000004) == 0x00000004) &&
              fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) {
            fsToken_ =
              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial();
          } else {
            fsToken_ = value;
          }
          onChanged();
        } else {
          fsTokenBuilder_.mergeFrom(value);
        }
        bitField0_ |= 0x00000004;
        return this;
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public Builder clearFsToken() {
        if (fsTokenBuilder_ == null) {
          fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
          onChanged();
        } else {
          fsTokenBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getFsTokenFieldBuilder().getBuilder();
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
        if (fsTokenBuilder_ != null) {
          return fsTokenBuilder_.getMessageOrBuilder();
        } else {
          return fsToken_;
        }
      }
      /**
       * required .hbase.pb.DelegationToken fs_token = 3;
       */
      private com.google.protobuf.SingleFieldBuilder<
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> 
          getFsTokenFieldBuilder() {
        if (fsTokenBuilder_ == null) {
          fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>(
                  fsToken_,
                  getParentForChildren(),
                  isClean());
          fsToken_ = null;
        }
        return fsTokenBuilder_;
      }

      // required string bulk_token = 4;
      private java.lang.Object bulkToken_ = "";
      /**
       * required string bulk_token = 4;
       */
      public boolean hasBulkToken() {
        return ((bitField0_ & 0x00000008) == 0x00000008);
      }
      /**
       * required string bulk_token = 4;
       */
      public java.lang.String getBulkToken() {
        java.lang.Object ref = bulkToken_;
        if (!(ref instanceof java.lang.String)) {
          java.lang.String s = ((com.google.protobuf.ByteString) ref)
              .toStringUtf8();
          bulkToken_ = s;
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * required string bulk_token = 4;
       */
      public com.google.protobuf.ByteString
          getBulkTokenBytes() {
        java.lang.Object ref = bulkToken_;
        if (ref instanceof String) {
          com.google.protobuf.ByteString b = 
              com.google.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          bulkToken_ = b;
          return b;
        } else {
          return (com.google.protobuf.ByteString) ref;
        }
      }
      /**
       * required string bulk_token = 4;
       */
      public Builder setBulkToken(
          java.lang.String value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        bulkToken_ = value;
        onChanged();
        return this;
      }
      /**
       * required string bulk_token = 4;
       */
      public Builder clearBulkToken() {
        bitField0_ = (bitField0_ & ~0x00000008);
        bulkToken_ = getDefaultInstance().getBulkToken();
        onChanged();
        return this;
      }
      /**
       * required string bulk_token = 4;
       */
      public Builder setBulkTokenBytes(
          com.google.protobuf.ByteString value) {
        if (value == null) {
    throw new NullPointerException();
  }
  bitField0_ |= 0x00000008;
        bulkToken_ = value;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hbase.pb.SecureBulkLoadHFilesRequest)
    }

    static {
      defaultInstance = new SecureBulkLoadHFilesRequest(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesRequest)
  }

  public interface SecureBulkLoadHFilesResponseOrBuilder
      extends com.google.protobuf.MessageOrBuilder {

    // required bool loaded = 1;
    /**
     * required bool loaded = 1;
     */
    boolean hasLoaded();
    /**
     * required bool loaded = 1;
     */
    boolean getLoaded();
  }
  /**
   * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesResponse}
   */
  public static final class SecureBulkLoadHFilesResponse extends
      com.google.protobuf.GeneratedMessage
      implements SecureBulkLoadHFilesResponseOrBuilder {
    // Use SecureBulkLoadHFilesResponse.newBuilder() to construct.
    private SecureBulkLoadHFilesResponse(com.google.protobuf.GeneratedMessage.Builder builder) {
      super(builder);
      this.unknownFields = builder.getUnknownFields();
    }
    private SecureBulkLoadHFilesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }

    private static final SecureBulkLoadHFilesResponse defaultInstance;
    public static SecureBulkLoadHFilesResponse getDefaultInstance() {
      return defaultInstance;
    }

    public SecureBulkLoadHFilesResponse getDefaultInstanceForType() {
      return defaultInstance;
    }

    private final com.google.protobuf.UnknownFieldSet unknownFields;
    @java.lang.Override
    public final com.google.protobuf.UnknownFieldSet
        getUnknownFields() {
      return this.unknownFields;
    }
    private SecureBulkLoadHFilesResponse(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      initFields();
      int mutable_bitField0_ = 0;
      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
          com.google.protobuf.UnknownFieldSet.newBuilder();
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            default: {
              if (!parseUnknownField(input, unknownFields,
                                     extensionRegistry, tag)) {
                done = true;
              }
              break;
            }
            case 8: {
              bitField0_ |= 0x00000001;
              loaded_ = input.readBool();
              break;
            }
          }
        }
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.setUnfinishedMessage(this);
      } catch (java.io.IOException e) {
        throw new com.google.protobuf.InvalidProtocolBufferException(
            e.getMessage()).setUnfinishedMessage(this);
      } finally {
        this.unknownFields = unknownFields.build();
        makeExtensionsImmutable();
      }
    }
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor;
    }

    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class);
    }

    public static com.google.protobuf.Parser PARSER =
        new com.google.protobuf.AbstractParser() {
      public SecureBulkLoadHFilesResponse parsePartialFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws com.google.protobuf.InvalidProtocolBufferException {
        return new SecureBulkLoadHFilesResponse(input, extensionRegistry);
      }
    };

    @java.lang.Override
    public com.google.protobuf.Parser getParserForType() {
      return PARSER;
    }

    private int bitField0_;
    // required bool loaded = 1;
    public static final int LOADED_FIELD_NUMBER = 1;
    private boolean loaded_;
    /**
     * required bool loaded = 1;
     */
    public boolean hasLoaded() {
      return ((bitField0_ & 0x00000001) == 0x00000001);
    }
    /**
     * required bool loaded = 1;
     */
    public boolean getLoaded() {
      return loaded_;
    }

    private void initFields() {
      loaded_ = false;
    }
    private byte memoizedIsInitialized = -1;
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized != -1) return isInitialized == 1;

      if (!hasLoaded()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    public void writeTo(com.google.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getSerializedSize();
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        output.writeBool(1, loaded_);
      }
      getUnknownFields().writeTo(output);
    }

    private int memoizedSerializedSize = -1;
    public int getSerializedSize() {
      int size = memoizedSerializedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) == 0x00000001)) {
        size += com.google.protobuf.CodedOutputStream
          .computeBoolSize(1, loaded_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSerializedSize = size;
      return size;
    }

    private static final long serialVersionUID = 0L;
    @java.lang.Override
    protected java.lang.Object writeReplace()
        throws java.io.ObjectStreamException {
      return super.writeReplace();
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)) {
        return super.equals(obj);
      }
      org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) obj;

      boolean result = true;
      result = result && (hasLoaded() == other.hasLoaded());
      if (hasLoaded()) {
        result = result && (getLoaded()
            == other.getLoaded());
      }
      result = result &&
          getUnknownFields().equals(other.getUnknownFields());
      return result;
    }

    private int memoizedHashCode = 0;
    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptorForType().hashCode();
      if (hasLoaded()) {
        hash = (37 * hash) + LOADED_FIELD_NUMBER;
        hash = (53 * hash) + hashBoolean(getLoaded());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
        com.google.protobuf.ByteString data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
        com.google.protobuf.ByteString data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(byte[] data)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
        byte[] data,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws com.google.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(
        java.io.InputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseDelimitedFrom(input, extensionRegistry);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
        com.google.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return PARSER.parseFrom(input);
    }
    public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return PARSER.parseFrom(input, extensionRegistry);
    }

    public static Builder newBuilder() { return Builder.create(); }
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse prototype) {
      return newBuilder().mergeFrom(prototype);
    }
    public Builder toBuilder() { return newBuilder(this); }

    @java.lang.Override
    protected Builder newBuilderForType(
        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesResponse}
     */
    public static final class Builder extends
        com.google.protobuf.GeneratedMessage.Builder
       implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponseOrBuilder {
      public static final com.google.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor;
      }

      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class);
      }

      // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
        }
      }
      private static Builder create() {
        return new Builder();
      }

      public Builder clear() {
        super.clear();
        loaded_ = false;
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      public Builder clone() {
        return create().mergeFrom(buildPartial());
      }

      public com.google.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor;
      }

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse getDefaultInstanceForType() {
        return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance();
      }

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse build() {
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse buildPartial() {
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse(this);
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
          to_bitField0_ |= 0x00000001;
        }
        result.loaded_ = loaded_;
        result.bitField0_ = to_bitField0_;
        onBuilt();
        return result;
      }

      public Builder mergeFrom(com.google.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) {
          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other) {
        if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()) return this;
        if (other.hasLoaded()) {
          setLoaded(other.getLoaded());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        return this;
      }

      public final boolean isInitialized() {
        if (!hasLoaded()) {
          
          return false;
        }
        return true;
      }

      public Builder mergeFrom(
          com.google.protobuf.CodedInputStream input,
          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parsedMessage = null;
        try {
          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) e.getUnfinishedMessage();
          throw e;
        } finally {
          if (parsedMessage != null) {
            mergeFrom(parsedMessage);
          }
        }
        return this;
      }
      private int bitField0_;

      // required bool loaded = 1;
      private boolean loaded_ ;
      /**
       * required bool loaded = 1;
       */
      public boolean hasLoaded() {
        return ((bitField0_ & 0x00000001) == 0x00000001);
      }
      /**
       * required bool loaded = 1;
       */
      public boolean getLoaded() {
        return loaded_;
      }
      /**
       * required bool loaded = 1;
       */
      public Builder setLoaded(boolean value) {
        bitField0_ |= 0x00000001;
        loaded_ = value;
        onChanged();
        return this;
      }
      /**
       * required bool loaded = 1;
       */
      public Builder clearLoaded() {
        bitField0_ = (bitField0_ & ~0x00000001);
        loaded_ = false;
        onChanged();
        return this;
      }

      // @@protoc_insertion_point(builder_scope:hbase.pb.SecureBulkLoadHFilesResponse)
    }

    static {
      defaultInstance = new SecureBulkLoadHFilesResponse(true);
      defaultInstance.initFields();
    }

    // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesResponse)
  }

  /**
   * Protobuf service {@code hbase.pb.SecureBulkLoadService}
   */
  public static abstract class SecureBulkLoadService
      implements com.google.protobuf.Service {
    protected SecureBulkLoadService() {}

    public interface Interface {
      /**
       * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);
       */
      public abstract void prepareBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
          com.google.protobuf.RpcCallback done);

      /**
       * rpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse);
       */
      public abstract void secureBulkLoadHFiles(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request,
          com.google.protobuf.RpcCallback done);

      /**
       * rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);
       */
      public abstract void cleanupBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
          com.google.protobuf.RpcCallback done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new SecureBulkLoadService() {
        @java.lang.Override
        public  void prepareBulkLoad(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
            com.google.protobuf.RpcCallback done) {
          impl.prepareBulkLoad(controller, request, done);
        }

        @java.lang.Override
        public  void secureBulkLoadHFiles(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request,
            com.google.protobuf.RpcCallback done) {
          impl.secureBulkLoadHFiles(controller, request, done);
        }

        @java.lang.Override
        public  void cleanupBulkLoad(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
            com.google.protobuf.RpcCallback done) {
          impl.cleanupBulkLoad(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request);
            case 1:
              return impl.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request);
            case 2:
              return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance();
            case 1:
              return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance();
            case 2:
              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance();
            case 1:
              return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance();
            case 2:
              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);
     */
    public abstract void prepareBulkLoad(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
        com.google.protobuf.RpcCallback done);

    /**
     * rpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse);
     */
    public abstract void secureBulkLoadHFiles(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request,
        com.google.protobuf.RpcCallback done);

    /**
     * rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);
     */
    public abstract void cleanupBulkLoad(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
        com.google.protobuf.RpcCallback done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.getDescriptor().getServices().get(0);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 1:
          this.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 2:
          this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request,
            com.google.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance();
        case 1:
          return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance();
        case 2:
          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance();
        case 1:
          return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance();
        case 2:
          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void prepareBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class,
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()));
      }

      public  void secureBulkLoadHFiles(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class,
            org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()));
      }

      public  void cleanupBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
          com.google.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class,
            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance());
      }


      public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance());
      }


      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadService)
  }

  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable;
  private static com.google.protobuf.Descriptors.Descriptor
    internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor;
  private static
    com.google.protobuf.GeneratedMessage.FieldAccessorTable
      internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable;

  public static com.google.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static com.google.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\024SecureBulkLoad.proto\022\010hbase.pb\032\014Client" +
      ".proto\"\266\001\n\033SecureBulkLoadHFilesRequest\022>" +
      "\n\013family_path\030\001 \003(\0132).hbase.pb.BulkLoadH" +
      "FileRequest.FamilyPath\022\026\n\016assign_seq_num" +
      "\030\002 \001(\010\022+\n\010fs_token\030\003 \002(\0132\031.hbase.pb.Dele" +
      "gationToken\022\022\n\nbulk_token\030\004 \002(\t\".\n\034Secur" +
      "eBulkLoadHFilesResponse\022\016\n\006loaded\030\001 \002(\0102" +
      "\256\002\n\025SecureBulkLoadService\022V\n\017PrepareBulk" +
      "Load\022 .hbase.pb.PrepareBulkLoadRequest\032!" +
      ".hbase.pb.PrepareBulkLoadResponse\022e\n\024Sec",
      "ureBulkLoadHFiles\022%.hbase.pb.SecureBulkL" +
      "oadHFilesRequest\032&.hbase.pb.SecureBulkLo" +
      "adHFilesResponse\022V\n\017CleanupBulkLoad\022 .hb" +
      "ase.pb.CleanupBulkLoadRequest\032!.hbase.pb" +
      ".CleanupBulkLoadResponseBJ\n*org.apache.h" +
      "adoop.hbase.protobuf.generatedB\024SecureBu" +
      "lkLoadProtosH\001\210\001\001\240\001\001"
    };
    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
        public com.google.protobuf.ExtensionRegistry assignDescriptors(
            com.google.protobuf.Descriptors.FileDescriptor root) {
          descriptor = root;
          internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor =
            getDescriptor().getMessageTypes().get(0);
          internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor,
              new java.lang.String[] { "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", });
          internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor =
            getDescriptor().getMessageTypes().get(1);
          internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable = new
            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
              internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor,
              new java.lang.String[] { "Loaded", });
          return null;
        }
      };
    com.google.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new com.google.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
        }, assigner);
  }

  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy