org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos Maven / Gradle / Ivy
The newest version!
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: GetUserMappingsProtocol.proto
package org.apache.hadoop.tools.proto;
public final class GetUserMappingsProtocolProtos {
private GetUserMappingsProtocolProtos() {}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
public interface GetGroupsForUserRequestProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.GetGroupsForUserRequestProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* required string user = 1;
* @return Whether the user field is set.
*/
boolean hasUser();
/**
* required string user = 1;
* @return The user.
*/
java.lang.String getUser();
/**
* required string user = 1;
* @return The bytes for user.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes();
}
/**
*
**
* Get groups for user request.
*
*
* Protobuf type {@code hadoop.common.GetGroupsForUserRequestProto}
*/
public static final class GetGroupsForUserRequestProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.GetGroupsForUserRequestProto)
GetGroupsForUserRequestProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetGroupsForUserRequestProto.newBuilder() to construct.
private GetGroupsForUserRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetGroupsForUserRequestProto() {
user_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetGroupsForUserRequestProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.class, org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.Builder.class);
}
private int bitField0_;
public static final int USER_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object user_ = "";
/**
* required string user = 1;
* @return Whether the user field is set.
*/
@java.lang.Override
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string user = 1;
* @return The user.
*/
@java.lang.Override
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
user_ = s;
}
return s;
}
}
/**
* required string user = 1;
* @return The bytes for user.
*/
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
user_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasUser()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, user_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, user_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto)) {
return super.equals(obj);
}
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto other = (org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto) obj;
if (hasUser() != other.hasUser()) return false;
if (hasUser()) {
if (!getUser()
.equals(other.getUser())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUser()) {
hash = (37 * hash) + USER_FIELD_NUMBER;
hash = (53 * hash) + getUser().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Get groups for user request.
*
*
* Protobuf type {@code hadoop.common.GetGroupsForUserRequestProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.GetGroupsForUserRequestProto)
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserRequestProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.class, org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.Builder.class);
}
// Construct using org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
user_ = "";
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserRequestProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto build() {
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto buildPartial() {
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto result = new org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.user_ = user_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto) {
return mergeFrom((org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto other) {
if (other == org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.getDefaultInstance()) return this;
if (other.hasUser()) {
user_ = other.user_;
bitField0_ |= 0x00000001;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
if (!hasUser()) {
return false;
}
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
user_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object user_ = "";
/**
* required string user = 1;
* @return Whether the user field is set.
*/
public boolean hasUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* required string user = 1;
* @return The user.
*/
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (!(ref instanceof java.lang.String)) {
org.apache.hadoop.thirdparty.protobuf.ByteString bs =
(org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
user_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* required string user = 1;
* @return The bytes for user.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
if (ref instanceof String) {
org.apache.hadoop.thirdparty.protobuf.ByteString b =
org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
user_ = b;
return b;
} else {
return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
}
}
/**
* required string user = 1;
* @param value The user to set.
* @return This builder for chaining.
*/
public Builder setUser(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
user_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* required string user = 1;
* @return This builder for chaining.
*/
public Builder clearUser() {
user_ = getDefaultInstance().getUser();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* required string user = 1;
* @param value The bytes for user to set.
* @return This builder for chaining.
*/
public Builder setUserBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
user_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.GetGroupsForUserRequestProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.GetGroupsForUserRequestProto)
private static final org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto();
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetGroupsForUserRequestProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GetGroupsForUserResponseProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:hadoop.common.GetGroupsForUserResponseProto)
org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
/**
* repeated string groups = 1;
* @return A list containing the groups.
*/
java.util.List
getGroupsList();
/**
* repeated string groups = 1;
* @return The count of groups.
*/
int getGroupsCount();
/**
* repeated string groups = 1;
* @param index The index of the element to return.
* @return The groups at the given index.
*/
java.lang.String getGroups(int index);
/**
* repeated string groups = 1;
* @param index The index of the value to return.
* @return The bytes of the groups at the given index.
*/
org.apache.hadoop.thirdparty.protobuf.ByteString
getGroupsBytes(int index);
}
/**
*
**
* Response for get groups.
*
*
* Protobuf type {@code hadoop.common.GetGroupsForUserResponseProto}
*/
public static final class GetGroupsForUserResponseProto extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:hadoop.common.GetGroupsForUserResponseProto)
GetGroupsForUserResponseProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetGroupsForUserResponseProto.newBuilder() to construct.
private GetGroupsForUserResponseProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private GetGroupsForUserResponseProto() {
groups_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetGroupsForUserResponseProto();
}
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.class, org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.Builder.class);
}
public static final int GROUPS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList groups_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
/**
* repeated string groups = 1;
* @return A list containing the groups.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getGroupsList() {
return groups_;
}
/**
* repeated string groups = 1;
* @return The count of groups.
*/
public int getGroupsCount() {
return groups_.size();
}
/**
* repeated string groups = 1;
* @param index The index of the element to return.
* @return The groups at the given index.
*/
public java.lang.String getGroups(int index) {
return groups_.get(index);
}
/**
* repeated string groups = 1;
* @param index The index of the value to return.
* @return The bytes of the groups at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getGroupsBytes(int index) {
return groups_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < groups_.size(); i++) {
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, groups_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < groups_.size(); i++) {
dataSize += computeStringSizeNoTag(groups_.getRaw(i));
}
size += dataSize;
size += 1 * getGroupsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto other = (org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto) obj;
if (!getGroupsList()
.equals(other.getGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGroupsCount() > 0) {
hash = (37 * hash) + GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
java.nio.ByteBuffer data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
java.nio.ByteBuffer data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.ByteString data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(byte[] data)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
byte[] data,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseDelimitedFrom(
java.io.InputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto parseFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
**
* Response for get groups.
*
*
* Protobuf type {@code hadoop.common.GetGroupsForUserResponseProto}
*/
public static final class Builder extends
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:hadoop.common.GetGroupsForUserResponseProto)
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProtoOrBuilder {
public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserResponseProto_descriptor;
}
@java.lang.Override
protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.class, org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.Builder.class);
}
// Construct using org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.newBuilder()
private Builder() {
}
private Builder(
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
groups_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.internal_static_hadoop_common_GetGroupsForUserResponseProto_descriptor;
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance();
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto build() {
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto buildPartial() {
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto result = new org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
groups_.makeImmutable();
result.groups_ = groups_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
if (other instanceof org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto) {
return mergeFrom((org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto other) {
if (other == org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance()) return this;
if (!other.groups_.isEmpty()) {
if (groups_.isEmpty()) {
groups_ = other.groups_;
bitField0_ |= 0x00000001;
} else {
ensureGroupsIsMutable();
groups_.addAll(other.groups_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
ensureGroupsIsMutable();
groups_.add(bs);
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList groups_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
private void ensureGroupsIsMutable() {
if (!groups_.isModifiable()) {
groups_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(groups_);
}
bitField0_ |= 0x00000001;
}
/**
* repeated string groups = 1;
* @return A list containing the groups.
*/
public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
getGroupsList() {
groups_.makeImmutable();
return groups_;
}
/**
* repeated string groups = 1;
* @return The count of groups.
*/
public int getGroupsCount() {
return groups_.size();
}
/**
* repeated string groups = 1;
* @param index The index of the element to return.
* @return The groups at the given index.
*/
public java.lang.String getGroups(int index) {
return groups_.get(index);
}
/**
* repeated string groups = 1;
* @param index The index of the value to return.
* @return The bytes of the groups at the given index.
*/
public org.apache.hadoop.thirdparty.protobuf.ByteString
getGroupsBytes(int index) {
return groups_.getByteString(index);
}
/**
* repeated string groups = 1;
* @param index The index to set the value at.
* @param value The groups to set.
* @return This builder for chaining.
*/
public Builder setGroups(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureGroupsIsMutable();
groups_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @param value The groups to add.
* @return This builder for chaining.
*/
public Builder addGroups(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureGroupsIsMutable();
groups_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @param values The groups to add.
* @return This builder for chaining.
*/
public Builder addAllGroups(
java.lang.Iterable values) {
ensureGroupsIsMutable();
org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
values, groups_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @return This builder for chaining.
*/
public Builder clearGroups() {
groups_ =
org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);;
onChanged();
return this;
}
/**
* repeated string groups = 1;
* @param value The bytes of the groups to add.
* @return This builder for chaining.
*/
public Builder addGroupsBytes(
org.apache.hadoop.thirdparty.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
ensureGroupsIsMutable();
groups_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:hadoop.common.GetGroupsForUserResponseProto)
}
// @@protoc_insertion_point(class_scope:hadoop.common.GetGroupsForUserResponseProto)
private static final org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto();
}
public static org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser
PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser() {
@java.lang.Override
public GetGroupsForUserResponseProto parsePartialFrom(
org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static org.apache.hadoop.thirdparty.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.thirdparty.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
/**
*
**
* Protocol which maps users to groups.
*
*
* Protobuf service {@code hadoop.common.GetUserMappingsProtocolService}
*/
public static abstract class GetUserMappingsProtocolService
implements org.apache.hadoop.thirdparty.protobuf.Service {
protected GetUserMappingsProtocolService() {}
public interface Interface {
/**
*
**
* Get the groups which are mapped to the given user.
*
*
* rpc getGroupsForUser(.hadoop.common.GetGroupsForUserRequestProto) returns (.hadoop.common.GetGroupsForUserResponseProto);
*/
public abstract void getGroupsForUser(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
}
public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
final Interface impl) {
return new GetUserMappingsProtocolService() {
@java.lang.Override
public void getGroupsForUser(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.getGroupsForUser(controller, request, done);
}
};
}
public static org.apache.hadoop.thirdparty.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.thirdparty.protobuf.Message request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.getGroupsForUser(controller, (org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
*
**
* Get the groups which are mapped to the given user.
*
*
* rpc getGroupsForUser(.hadoop.common.GetGroupsForUserRequestProto) returns (.hadoop.common.GetGroupsForUserResponseProto);
*/
public abstract void getGroupsForUser(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
public static final
org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.getDescriptor().getServices().get(0);
}
public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.thirdparty.protobuf.Message request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback<
org.apache.hadoop.thirdparty.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.getGroupsForUser(controller, (org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto)request,
org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService implements Interface {
private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;
public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
return channel;
}
public void getGroupsForUser(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto request,
org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance(),
org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.class,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto getGroupsForUser(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto getGroupsForUser(
org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto request)
throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.common.GetUserMappingsProtocolService)
}
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_GetGroupsForUserRequestProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_GetGroupsForUserRequestProto_fieldAccessorTable;
private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
internal_static_hadoop_common_GetGroupsForUserResponseProto_descriptor;
private static final
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hadoop_common_GetGroupsForUserResponseProto_fieldAccessorTable;
public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\035GetUserMappingsProtocol.proto\022\rhadoop." +
"common\",\n\034GetGroupsForUserRequestProto\022\014" +
"\n\004user\030\001 \002(\t\"/\n\035GetGroupsForUserResponse" +
"Proto\022\016\n\006groups\030\001 \003(\t2\217\001\n\036GetUserMapping" +
"sProtocolService\022m\n\020getGroupsForUser\022+.h" +
"adoop.common.GetGroupsForUserRequestProt" +
"o\032,.hadoop.common.GetGroupsForUserRespon" +
"seProtoBD\n\035org.apache.hadoop.tools.proto" +
"B\035GetUserMappingsProtocolProtos\210\001\001\240\001\001"
};
descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
});
internal_static_hadoop_common_GetGroupsForUserRequestProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_hadoop_common_GetGroupsForUserRequestProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_GetGroupsForUserRequestProto_descriptor,
new java.lang.String[] { "User", });
internal_static_hadoop_common_GetGroupsForUserResponseProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_hadoop_common_GetGroupsForUserResponseProto_fieldAccessorTable = new
org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hadoop_common_GetGroupsForUserResponseProto_descriptor,
new java.lang.String[] { "Groups", });
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy