Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos Maven / Gradle / Ivy
package org.apache.hadoop.security.proto;
public final class RefreshUserMappingsProtocolProtos {
private RefreshUserMappingsProtocolProtos () {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite ) registry);
}
public interface RefreshUserToGroupsMappingsRequestProtoOrBuilder extends
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
public static final class RefreshUserToGroupsMappingsRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
RefreshUserToGroupsMappingsRequestProtoOrBuilder {
private static final long serialVersionUID = 0 L;
private RefreshUserToGroupsMappingsRequestProto (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder builder) {
super (builder);
}
private RefreshUserToGroupsMappingsRequestProto () {
}
@java .lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this .unknownFields;
}
private RefreshUserToGroupsMappingsRequestProto (
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this ();
if (extensionRegistry == null ) {
throw new java.lang.NullPointerException ();
}
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .Builder unknownFields =
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .newBuilder();
try {
boolean done = false ;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0 :
done = true ;
break ;
default : {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true ;
}
break ;
}
}
}
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this );
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException (
e).setUnfinishedMessage(this );
} finally {
this .unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .Builder .class );
}
private byte memoizedIsInitialized = -1 ;
@java .lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1 ) return true ;
if (isInitialized == 0 ) return false ;
memoizedIsInitialized = 1 ;
return true ;
}
@java .lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java .lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1 ) return size;
size = 0 ;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java .lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this ) {
return true ;
}
if (!(obj instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto )) {
return super .equals(obj);
}
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto other = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto ) obj;
if (!unknownFields.equals(other.unknownFields)) return false ;
return true ;
}
@java .lang.Override
public int hashCode() {
if (memoizedHashCode != 0 ) {
return memoizedHashCode;
}
int hash = 41 ;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
@java .lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE .toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto prototype) {
return DEFAULT_INSTANCE .toBuilder().mergeFrom(prototype);
}
@java .lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder () : new Builder ().mergeFrom(this );
}
@java .lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
Builder builder = new Builder (parent);
return builder;
}
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder implements
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .Builder .class );
}
private Builder () {
maybeForceBuilderInitialization();
}
private Builder (
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
super (parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java .lang.Override
public Builder clear() {
super .clear();
return this ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptorForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_descriptor;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .getDefaultInstance();
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto build() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto buildPartial() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto result = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto (this );
onBuilt();
return result;
}
@java .lang.Override
public Builder clone() {
return super .clone();
}
@java .lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .setField(field, value);
}
@java .lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field) {
return super .clearField(field);
}
@java .lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors .OneofDescriptor oneof) {
return super .clearOneof(oneof);
}
@java .lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
int index, java.lang.Object value) {
return super .setRepeatedField(field, index, value);
}
@java .lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .addRepeatedField(field, value);
}
@java .lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto ) {
return mergeFrom((org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto )other);
} else {
super .mergeFrom(other);
return this ;
}
}
public Builder mergeFrom(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto other) {
if (other == org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .getDefaultInstance()) return this ;
this .mergeUnknownFields(other.unknownFields);
onChanged();
return this ;
}
@java .lang.Override
public final boolean isInitialized() {
return true ;
}
@java .lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto parsedMessage = null ;
try {
parsedMessage = PARSER .parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto ) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null ) {
mergeFrom(parsedMessage);
}
}
return this ;
}
@java .lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .setUnknownFields(unknownFields);
}
@java .lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .mergeUnknownFields(unknownFields);
}
}
private static final org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto DEFAULT_INSTANCE ;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto ();
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE ;
}
@java .lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser () {
@java .lang.Override
public RefreshUserToGroupsMappingsRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new RefreshUserToGroupsMappingsRequestProto (input, extensionRegistry);
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE ;
}
}
public interface RefreshUserToGroupsMappingsResponseProtoOrBuilder extends
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
public static final class RefreshUserToGroupsMappingsResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
RefreshUserToGroupsMappingsResponseProtoOrBuilder {
private static final long serialVersionUID = 0 L;
private RefreshUserToGroupsMappingsResponseProto (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder builder) {
super (builder);
}
private RefreshUserToGroupsMappingsResponseProto () {
}
@java .lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this .unknownFields;
}
private RefreshUserToGroupsMappingsResponseProto (
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this ();
if (extensionRegistry == null ) {
throw new java.lang.NullPointerException ();
}
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .Builder unknownFields =
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .newBuilder();
try {
boolean done = false ;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0 :
done = true ;
break ;
default : {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true ;
}
break ;
}
}
}
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this );
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException (
e).setUnfinishedMessage(this );
} finally {
this .unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .Builder .class );
}
private byte memoizedIsInitialized = -1 ;
@java .lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1 ) return true ;
if (isInitialized == 0 ) return false ;
memoizedIsInitialized = 1 ;
return true ;
}
@java .lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java .lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1 ) return size;
size = 0 ;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java .lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this ) {
return true ;
}
if (!(obj instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto )) {
return super .equals(obj);
}
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto other = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto ) obj;
if (!unknownFields.equals(other.unknownFields)) return false ;
return true ;
}
@java .lang.Override
public int hashCode() {
if (memoizedHashCode != 0 ) {
return memoizedHashCode;
}
int hash = 41 ;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
@java .lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE .toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto prototype) {
return DEFAULT_INSTANCE .toBuilder().mergeFrom(prototype);
}
@java .lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder () : new Builder ().mergeFrom(this );
}
@java .lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
Builder builder = new Builder (parent);
return builder;
}
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder implements
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .Builder .class );
}
private Builder () {
maybeForceBuilderInitialization();
}
private Builder (
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
super (parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java .lang.Override
public Builder clear() {
super .clear();
return this ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptorForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_descriptor;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance();
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto build() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto buildPartial() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto result = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto (this );
onBuilt();
return result;
}
@java .lang.Override
public Builder clone() {
return super .clone();
}
@java .lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .setField(field, value);
}
@java .lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field) {
return super .clearField(field);
}
@java .lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors .OneofDescriptor oneof) {
return super .clearOneof(oneof);
}
@java .lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
int index, java.lang.Object value) {
return super .setRepeatedField(field, index, value);
}
@java .lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .addRepeatedField(field, value);
}
@java .lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto ) {
return mergeFrom((org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto )other);
} else {
super .mergeFrom(other);
return this ;
}
}
public Builder mergeFrom(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto other) {
if (other == org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance()) return this ;
this .mergeUnknownFields(other.unknownFields);
onChanged();
return this ;
}
@java .lang.Override
public final boolean isInitialized() {
return true ;
}
@java .lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto parsedMessage = null ;
try {
parsedMessage = PARSER .parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto ) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null ) {
mergeFrom(parsedMessage);
}
}
return this ;
}
@java .lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .setUnknownFields(unknownFields);
}
@java .lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .mergeUnknownFields(unknownFields);
}
}
private static final org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto DEFAULT_INSTANCE ;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto ();
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE ;
}
@java .lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser () {
@java .lang.Override
public RefreshUserToGroupsMappingsResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new RefreshUserToGroupsMappingsResponseProto (input, extensionRegistry);
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE ;
}
}
public interface RefreshSuperUserGroupsConfigurationRequestProtoOrBuilder extends
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
public static final class RefreshSuperUserGroupsConfigurationRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
RefreshSuperUserGroupsConfigurationRequestProtoOrBuilder {
private static final long serialVersionUID = 0 L;
private RefreshSuperUserGroupsConfigurationRequestProto (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder builder) {
super (builder);
}
private RefreshSuperUserGroupsConfigurationRequestProto () {
}
@java .lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this .unknownFields;
}
private RefreshSuperUserGroupsConfigurationRequestProto (
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this ();
if (extensionRegistry == null ) {
throw new java.lang.NullPointerException ();
}
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .Builder unknownFields =
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .newBuilder();
try {
boolean done = false ;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0 :
done = true ;
break ;
default : {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true ;
}
break ;
}
}
}
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this );
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException (
e).setUnfinishedMessage(this );
} finally {
this .unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .Builder .class );
}
private byte memoizedIsInitialized = -1 ;
@java .lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1 ) return true ;
if (isInitialized == 0 ) return false ;
memoizedIsInitialized = 1 ;
return true ;
}
@java .lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java .lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1 ) return size;
size = 0 ;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java .lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this ) {
return true ;
}
if (!(obj instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto )) {
return super .equals(obj);
}
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto other = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto ) obj;
if (!unknownFields.equals(other.unknownFields)) return false ;
return true ;
}
@java .lang.Override
public int hashCode() {
if (memoizedHashCode != 0 ) {
return memoizedHashCode;
}
int hash = 41 ;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
@java .lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE .toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto prototype) {
return DEFAULT_INSTANCE .toBuilder().mergeFrom(prototype);
}
@java .lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder () : new Builder ().mergeFrom(this );
}
@java .lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
Builder builder = new Builder (parent);
return builder;
}
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder implements
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .Builder .class );
}
private Builder () {
maybeForceBuilderInitialization();
}
private Builder (
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
super (parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java .lang.Override
public Builder clear() {
super .clear();
return this ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptorForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .getDefaultInstance();
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto build() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto buildPartial() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto result = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto (this );
onBuilt();
return result;
}
@java .lang.Override
public Builder clone() {
return super .clone();
}
@java .lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .setField(field, value);
}
@java .lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field) {
return super .clearField(field);
}
@java .lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors .OneofDescriptor oneof) {
return super .clearOneof(oneof);
}
@java .lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
int index, java.lang.Object value) {
return super .setRepeatedField(field, index, value);
}
@java .lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .addRepeatedField(field, value);
}
@java .lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto ) {
return mergeFrom((org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto )other);
} else {
super .mergeFrom(other);
return this ;
}
}
public Builder mergeFrom(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto other) {
if (other == org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .getDefaultInstance()) return this ;
this .mergeUnknownFields(other.unknownFields);
onChanged();
return this ;
}
@java .lang.Override
public final boolean isInitialized() {
return true ;
}
@java .lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto parsedMessage = null ;
try {
parsedMessage = PARSER .parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto ) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null ) {
mergeFrom(parsedMessage);
}
}
return this ;
}
@java .lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .setUnknownFields(unknownFields);
}
@java .lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .mergeUnknownFields(unknownFields);
}
}
private static final org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto DEFAULT_INSTANCE ;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto ();
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE ;
}
@java .lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser () {
@java .lang.Override
public RefreshSuperUserGroupsConfigurationRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new RefreshSuperUserGroupsConfigurationRequestProto (input, extensionRegistry);
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE ;
}
}
public interface RefreshSuperUserGroupsConfigurationResponseProtoOrBuilder extends
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
}
public static final class RefreshSuperUserGroupsConfigurationResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
RefreshSuperUserGroupsConfigurationResponseProtoOrBuilder {
private static final long serialVersionUID = 0 L;
private RefreshSuperUserGroupsConfigurationResponseProto (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder builder) {
super (builder);
}
private RefreshSuperUserGroupsConfigurationResponseProto () {
}
@java .lang.Override
public final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet
getUnknownFields() {
return this .unknownFields;
}
private RefreshSuperUserGroupsConfigurationResponseProto (
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
this ();
if (extensionRegistry == null ) {
throw new java.lang.NullPointerException ();
}
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .Builder unknownFields =
org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet .newBuilder();
try {
boolean done = false ;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0 :
done = true ;
break ;
default : {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true ;
}
break ;
}
}
}
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this );
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException (
e).setUnfinishedMessage(this );
} finally {
this .unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .Builder .class );
}
private byte memoizedIsInitialized = -1 ;
@java .lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1 ) return true ;
if (isInitialized == 0 ) return false ;
memoizedIsInitialized = 1 ;
return true ;
}
@java .lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java .lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1 ) return size;
size = 0 ;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java .lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this ) {
return true ;
}
if (!(obj instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto )) {
return super .equals(obj);
}
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto other = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto ) obj;
if (!unknownFields.equals(other.unknownFields)) return false ;
return true ;
}
@java .lang.Override
public int hashCode() {
if (memoizedHashCode != 0 ) {
return memoizedHashCode;
}
int hash = 41 ;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER .parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER , input, extensionRegistry);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input);
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER , input, extensionRegistry);
}
@java .lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE .toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto prototype) {
return DEFAULT_INSTANCE .toBuilder().mergeFrom(prototype);
}
@java .lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder () : new Builder ().mergeFrom(this );
}
@java .lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
Builder builder = new Builder (parent);
return builder;
}
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .Builder implements
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
}
@java .lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .class , org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .Builder .class );
}
private Builder () {
maybeForceBuilderInitialization();
}
private Builder (
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .BuilderParent parent) {
super (parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java .lang.Override
public Builder clear() {
super .clear();
return this ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
getDescriptorForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance();
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto build() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto buildPartial() {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto result = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto (this );
onBuilt();
return result;
}
@java .lang.Override
public Builder clone() {
return super .clone();
}
@java .lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .setField(field, value);
}
@java .lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field) {
return super .clearField(field);
}
@java .lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors .OneofDescriptor oneof) {
return super .clearOneof(oneof);
}
@java .lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
int index, java.lang.Object value) {
return super .setRepeatedField(field, index, value);
}
@java .lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FieldDescriptor field,
java.lang.Object value) {
return super .addRepeatedField(field, value);
}
@java .lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto ) {
return mergeFrom((org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto )other);
} else {
super .mergeFrom(other);
return this ;
}
}
public Builder mergeFrom(org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto other) {
if (other == org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance()) return this ;
this .mergeUnknownFields(other.unknownFields);
onChanged();
return this ;
}
@java .lang.Override
public final boolean isInitialized() {
return true ;
}
@java .lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto parsedMessage = null ;
try {
parsedMessage = PARSER .parsePartialFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto ) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null ) {
mergeFrom(parsedMessage);
}
}
return this ;
}
@java .lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .setUnknownFields(unknownFields);
}
@java .lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super .mergeUnknownFields(unknownFields);
}
}
private static final org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto DEFAULT_INSTANCE ;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto ();
}
public static org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE ;
}
@java .lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser () {
@java .lang.Override
public RefreshSuperUserGroupsConfigurationResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return new RefreshSuperUserGroupsConfigurationResponseProto (input, extensionRegistry);
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER ;
}
@java .lang.Override
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE ;
}
}
public static abstract class RefreshUserMappingsProtocolService
implements org.apache.hadoop.thirdparty.protobuf.Service {
protected RefreshUserMappingsProtocolService () {}
public interface Interface {
public abstract void refreshUserToGroupsMappings(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
public abstract void refreshSuperUserGroupsConfiguration(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
}
public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
final Interface impl) {
return new RefreshUserMappingsProtocolService () {
@java .lang.Override
public void refreshUserToGroupsMappings(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.refreshUserToGroupsMappings(controller, request, done);
}
@java .lang.Override
public void refreshSuperUserGroupsConfiguration(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.refreshSuperUserGroupsConfiguration(controller, request, done);
}
};
}
public static org.apache.hadoop.thirdparty.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new org.apache.hadoop.thirdparty.protobuf.BlockingService () {
public final org.apache.hadoop.thirdparty.protobuf.Descriptors .ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
org.apache.hadoop.thirdparty.protobuf.Descriptors .MethodDescriptor method,
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.thirdparty.protobuf.Message request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException (
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type." );
}
switch(method.getIndex()) {
case 0 :
return impl.refreshUserToGroupsMappings(controller, (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto )request);
case 1 :
return impl.refreshSuperUserGroupsConfiguration(controller, (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto )request);
default :
throw new java.lang.AssertionError ("Can't get here." );
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors .MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException (
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type." );
}
switch(method.getIndex()) {
case 0 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .getDefaultInstance();
case 1 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .getDefaultInstance();
default :
throw new java.lang.AssertionError ("Can't get here." );
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors .MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException (
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type." );
}
switch(method.getIndex()) {
case 0 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance();
case 1 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance();
default :
throw new java.lang.AssertionError ("Can't get here." );
}
}
};
}
public abstract void refreshUserToGroupsMappings(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
public abstract void refreshSuperUserGroupsConfiguration(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
public static final
org.apache.hadoop.thirdparty.protobuf.Descriptors .ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .getDescriptor().getServices().get(0 );
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors .ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
org.apache.hadoop.thirdparty.protobuf.Descriptors .MethodDescriptor method,
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.thirdparty.protobuf.Message request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback <
org.apache.hadoop.thirdparty.protobuf.Message > done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException (
"Service.callMethod() given method descriptor for wrong " +
"service type." );
}
switch(method.getIndex()) {
case 0 :
this .refreshUserToGroupsMappings(controller, (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto )request,
org.apache.hadoop.thirdparty.protobuf.RpcUtil .specializeCallback(
done));
return ;
case 1 :
this .refreshSuperUserGroupsConfiguration(controller, (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto )request,
org.apache.hadoop.thirdparty.protobuf.RpcUtil .specializeCallback(
done));
return ;
default :
throw new java.lang.AssertionError ("Can't get here." );
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors .MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException (
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type." );
}
switch(method.getIndex()) {
case 0 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto .getDefaultInstance();
case 1 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto .getDefaultInstance();
default :
throw new java.lang.AssertionError ("Can't get here." );
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors .MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException (
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type." );
}
switch(method.getIndex()) {
case 0 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance();
case 1 :
return org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance();
default :
throw new java.lang.AssertionError ("Can't get here." );
}
}
public static Stub newStub(
org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
return new Stub (channel);
}
public static final class Stub extends org .apache .hadoop .security .proto .RefreshUserMappingsProtocolProtos .RefreshUserMappingsProtocolService implements Interface {
private Stub (org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
this .channel = channel;
}
private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;
public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
return channel;
}
public void refreshUserToGroupsMappings(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0 ),
controller,
request,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance(),
org.apache.hadoop.thirdparty.protobuf.RpcUtil .generalizeCallback(
done,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .class ,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance()));
}
public void refreshSuperUserGroupsConfiguration(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1 ),
controller,
request,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance(),
org.apache.hadoop.thirdparty.protobuf.RpcUtil .generalizeCallback(
done,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .class ,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
return new BlockingStub (channel);
}
public interface BlockingInterface {
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto refreshUserToGroupsMappings(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException ;
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto refreshSuperUserGroupsConfiguration(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException ;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub (org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
this .channel = channel;
}
private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto refreshUserToGroupsMappings(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsRequestProto request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto ) channel.callBlockingMethod(
getDescriptor().getMethods().get(0 ),
controller,
request,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshUserToGroupsMappingsResponseProto .getDefaultInstance());
}
public org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto refreshSuperUserGroupsConfiguration(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationRequestProto request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto ) channel.callBlockingMethod(
getDescriptor().getMethods().get(1 ),
controller,
request,
org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos .RefreshSuperUserGroupsConfigurationResponseProto .getDefaultInstance());
}
}
}
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors .Descriptor
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_fieldAccessorTable;
public static org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor
descriptor;
static {
java.lang.String [] descriptorData = {
"\n!RefreshUserMappingsProtocol.proto\022\rhad" +
"oop.common\")\n\'RefreshUserToGroupsMapping" +
"sRequestProto\"*\n(RefreshUserToGroupsMapp" +
"ingsResponseProto\"1\n/RefreshSuperUserGro" +
"upsConfigurationRequestProto\"2\n0RefreshS" +
"uperUserGroupsConfigurationResponseProto" +
"2\336\002\n\"RefreshUserMappingsProtocolService\022" +
"\216\001\n\033refreshUserToGroupsMappings\0226.hadoop" +
".common.RefreshUserToGroupsMappingsReque" +
"stProto\0327.hadoop.common.RefreshUserToGro" +
"upsMappingsResponseProto\022\246\001\n#refreshSupe" +
"rUserGroupsConfiguration\022>.hadoop.common" +
".RefreshSuperUserGroupsConfigurationRequ" +
"estProto\032?.hadoop.common.RefreshSuperUse" +
"rGroupsConfigurationResponseProtoBK\n org" +
".apache.hadoop.security.protoB!RefreshUs" +
"erMappingsProtocolProtos\210\001\001\240\001\001"
};
org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor .InternalDescriptorAssigner assigner =
new org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor . InternalDescriptorAssigner () {
public org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor root) {
descriptor = root;
return null ;
}
};
org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.thirdparty.protobuf.Descriptors .FileDescriptor [] {
}, assigner);
internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_descriptor =
getDescriptor().getMessageTypes().get(0 );
internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable (
internal_static_hadoop_common_RefreshUserToGroupsMappingsRequestProto_descriptor,
new java.lang.String [] { });
internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_descriptor =
getDescriptor().getMessageTypes().get(1 );
internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable (
internal_static_hadoop_common_RefreshUserToGroupsMappingsResponseProto_descriptor,
new java.lang.String [] { });
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_descriptor =
getDescriptor().getMessageTypes().get(2 );
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable (
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationRequestProto_descriptor,
new java.lang.String [] { });
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_descriptor =
getDescriptor().getMessageTypes().get(3 );
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 .FieldAccessorTable (
internal_static_hadoop_common_RefreshSuperUserGroupsConfigurationResponseProto_descriptor,
new java.lang.String [] { });
}
}